summaryrefslogtreecommitdiff
authorbrian.zhu <brian.zhu@amlogic.com>2013-07-22 10:19:26 (GMT)
committer brian.zhu <brian.zhu@amlogic.com>2013-07-22 10:19:26 (GMT)
commit0f9a1ec646ffab8841e8da760e6046a819fb2f27 (patch)
tree5132b8ba1d2f11c847e9a8846e6b1931d5a94a4b
parent0f8aaedf899ae5c3e400340794eefa2dc126aae0 (diff)
downloadcamera-0f9a1ec646ffab8841e8da760e6046a819fb2f27.zip
camera-0f9a1ec646ffab8841e8da760e6046a819fb2f27.tar.gz
camera-0f9a1ec646ffab8841e8da760e6046a819fb2f27.tar.bz2
1.add the mjpeg format for usb camera.default is disable. Need add system.prop--ro.camera.preview.UseMJPEG to enable it.
2.add the system.prop control for preview Max size and Max frame rate.
Diffstat
-rwxr-xr-xAndroid.mk10
-rwxr-xr-xCameraHalUtilClasses.cpp8
-rwxr-xr-xCameraHal_Module.cpp28
-rwxr-xr-xV4LCameraAdapter/V4LCameraAdapter.cpp1860
-rwxr-xr-xinc/V4LCameraAdapter/V4LCameraAdapter.h19
-rwxr-xr-xinc/mjpeg/colorspaces.h278
-rwxr-xr-xinc/mjpeg/defs.h57
-rwxr-xr-xinc/mjpeg/huffman.h99
-rwxr-xr-xinc/mjpeg/jutils.h142
-rwxr-xr-xmjpeg/colorspaces.c1654
-rwxr-xr-xmjpeg/jpegdec.c1003
-rwxr-xr-xvircam/V4LCamAdpt.cpp9
12 files changed, 4135 insertions, 1032 deletions
diff --git a/Android.mk b/Android.mk
index a5a7937..7381bca 100755
--- a/Android.mk
+++ b/Android.mk
@@ -32,13 +32,18 @@ CAMERA_HAL_VERTURAL_CAMERA_SRC:= \
vircam/AppCbNotifier.cpp \
vircam/V4LCamAdpt.cpp
+CAMERA_HAL_JPEG_SRC:=\
+ mjpeg/jpegdec.c \
+ mjpeg/colorspaces.c
+
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
$(CAMERA_HAL_SRC) \
$(CAMERA_V4L_SRC) \
$(CAMERA_COMMON_SRC) \
- $(CAMERA_UTILS_SRC)
+ $(CAMERA_UTILS_SRC) \
+ $(CAMERA_HAL_JPEG_SRC)
LOCAL_C_INCLUDES += \
$(LOCAL_PATH)/inc/ \
@@ -50,7 +55,8 @@ LOCAL_C_INCLUDES += \
external/jhead/ \
external/jpeg/ \
hardware/libhardware/modules/gralloc/ \
- frameworks/native/include/media/hardware
+ frameworks/native/include/media/hardware \
+ $(LOCAL_PATH)/inc/mjpeg/
LOCAL_C_INCLUDES_VIRCAM := \
$(LOCAL_PATH)/vircam/inc
diff --git a/CameraHalUtilClasses.cpp b/CameraHalUtilClasses.cpp
index d4f2dd0..2f7145b 100755
--- a/CameraHalUtilClasses.cpp
+++ b/CameraHalUtilClasses.cpp
@@ -121,7 +121,7 @@ status_t CameraArea::transfrom(size_t width,
size_t hRange, vRange;
double hScale, vScale;
- LOG_FUNCTION_NAME
+ LOG_FUNCTION_NAME;
hRange = CameraArea::RIGHT - CameraArea::LEFT;
vRange = CameraArea::BOTTOM - CameraArea::TOP;
@@ -135,7 +135,7 @@ status_t CameraArea::transfrom(size_t width,
areaWidth = ( mRight + hRange / 2) * hScale;
areaWidth -= left;
- LOG_FUNCTION_NAME_EXIT
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -206,7 +206,7 @@ status_t CameraArea::parseAreas(const char *area,
char *tmpBuffer = NULL;
sp<CameraArea> currentArea;
- LOG_FUNCTION_NAME
+ LOG_FUNCTION_NAME;
if ( ( NULL == area ) ||
( 0 >= areaLength ) )
@@ -322,7 +322,7 @@ status_t CameraArea::parseAreas(const char *area,
free(tmpBuffer);
}
- LOG_FUNCTION_NAME_EXIT
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
diff --git a/CameraHal_Module.cpp b/CameraHal_Module.cpp
index 76df824..ac6877e 100755
--- a/CameraHal_Module.cpp
+++ b/CameraHal_Module.cpp
@@ -55,6 +55,29 @@ static void setLogLevel(void *p){
android_atomic_write(level, &gCamHal_LogLevel);
}
+static const char *macro_info[]={
+#ifdef CAMHAL_USER_MODE
+ "user mode",
+#endif
+#ifdef AMLOGIC_FRONT_CAMERA_SUPPORT
+ "front board camera",
+#endif
+#ifdef AMLOGIC_BACK_CAMERA_SUPPORT
+ "back board camera",
+#endif
+#ifdef AMLOGIC_USB_CAMERA_SUPPORT
+ "usb camera",
+#endif
+#ifdef AMLOGIC_TWO_CH_UVC
+ "usb is two channel",
+#endif
+#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
+ "virtual camera enable",
+#endif
+#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
+ "nonblock mode",
+#endif
+};
static struct hw_module_methods_t camera_module_methods = {
@@ -881,7 +904,7 @@ extern "C" int CameraAdapter_CameraNum();
int camera_get_number_of_cameras(void)
{
int num_cameras = CameraAdapter_CameraNum();
- gCamerasSupported = num_cameras;
+ gCamerasSupported = num_cameras;
CAMHAL_LOGDB("gCamerasSupported=%d,num_cameras=%d\n",
gCamerasSupported, num_cameras);
@@ -906,6 +929,9 @@ int camera_get_number_of_cameras(void)
CAMHAL_IP, CAMHAL_PATH, CAMHAL_HOSTNAME
);
#endif
+ for(unsigned i = 0;i<sizeof(macro_info)/sizeof(macro_info[0]) ;i++){
+ CAMHAL_LOGIB("%s\n", macro_info[i]);
+ }
return num_cameras;
}
diff --git a/V4LCameraAdapter/V4LCameraAdapter.cpp b/V4LCameraAdapter/V4LCameraAdapter.cpp
index 526408f..03b3370 100755
--- a/V4LCameraAdapter/V4LCameraAdapter.cpp
+++ b/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -50,17 +50,15 @@
#include <sys/types.h>
#include <sys/stat.h>
#include "CameraHal.h"
-
+extern "C"{
+ #include "jutils.h"
+}
//for private_handle_t TODO move out of private header
#include <gralloc_priv.h>
-#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
-static int mDebugFps = 0;
static int iCamerasNum = -1;
-#define Q16_OFFSET 16
-
#ifndef ARRAY_SIZE
#define ARRAY_SIZE(x) (sizeof(x) / sizeof((x)[0]))
#endif
@@ -96,7 +94,8 @@ static int set_hflip_mode(int camera_fd, bool mode);
static int get_hflip_mode(int camera_fd);
static int get_supported_zoom(int camera_fd, char * zoom_str);
static int set_zoom_level(int camera_fd, int zoom);
-
+static bool is_mjpeg_supported(int camera_fd);
+static void ParserLimittedRateInfo(LimittedRate_t* rate);
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
extern "C" int get_framerate (int camera_fd,int *fps, int *fps_num);
extern "C" int enumFramerate ( int camera_fd, int *fps, int *fps_num);
@@ -121,17 +120,15 @@ static int writefile(char* path,char* content)
CAMHAL_LOGDB("Write file %s(%p) content %s", path, fp, content);
- if (fp) {
+ if (fp){
while( ((*content) != '\0') ) {
if (EOF == fputc(*content,fp)){
CAMHAL_LOGDA("write char fail");
}
content++;
}
-
fclose(fp);
- }
- else{
+ }else{
CAMHAL_LOGDA("open file fail\n");
}
return 1;
@@ -143,95 +140,79 @@ status_t V4LCameraAdapter::sendCommand(CameraCommands operation, int value1, int
mPreviewOriation=value1;
mCaptureOriation=value2;
return 1;
- }else
+ }else{
return BaseCameraAdapter::sendCommand(operation, value1, value2, value3);
+ }
}
status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
{
LOG_FUNCTION_NAME;
- char value[PROPERTY_VALUE_MAX];
- property_get("debug.camera.showfps", value, "0");
- mDebugFps = atoi(value);
+ //char value[PROPERTY_VALUE_MAX];
+ //property_get("debug.camera.showfps", value, "0");
int ret = NO_ERROR;
+ int oflag = O_RDWR;
+
+#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
+ oflag = O_RDWR | O_NONBLOCK;
+#endif
// Allocate memory for video info structure
mVideoInfo = (struct VideoInfo *) calloc (1, sizeof (struct VideoInfo));
- if(!mVideoInfo)
- {
- return NO_MEMORY;
+ if(!mVideoInfo){
+ return NO_MEMORY;
}
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
#ifdef AMLOGIC_TWO_CH_UVC
- mCamEncodeIndex = -1;
- mCamEncodeHandle = -1;
- ret = getVideodevId( mSensorIndex, mCamEncodeIndex);
- if(NO_ERROR == ret){
-#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
- if ((mCameraHandle = open(DEVICE_PATH(mSensorIndex), O_RDWR | O_NONBLOCK )) != -1)
-#else
- if ((mCameraHandle = open(DEVICE_PATH(mSensorIndex), O_RDWR)) != -1)
-#endif
- {
- CAMHAL_LOGDB("open %s success to preview\n", DEVICE_PATH(mSensorIndex));
- }
- if ( (0<= mCamEncodeIndex)&& (mCamEncodeIndex < (int)ARRAY_SIZE(SENSOR_PATH))&&
- ((mCamEncodeHandle = open(DEVICE_PATH(mCamEncodeIndex), O_RDWR)) != -1))
- {
- CAMHAL_LOGDB("open %s success to encode\n", DEVICE_PATH(mCamEncodeIndex));
- }
- }
-#else
- while(mSensorIndex < (int)ARRAY_SIZE(SENSOR_PATH)){
-#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
- if ((mCameraHandle = open(DEVICE_PATH(mSensorIndex), O_RDWR | O_NONBLOCK)) != -1)
+ mCamEncodeIndex = -1;
+ mCamEncodeHandle = -1;
+ ret = getVideodevId( mSensorIndex, mCamEncodeIndex);
+ if(NO_ERROR == ret){
+ if ((mCameraHandle = open(DEVICE_PATH(mSensorIndex), oflag )) != -1){
+ CAMHAL_LOGDB("open %s success to preview\n", DEVICE_PATH(mSensorIndex));
+ }
+ if ((0<= mCamEncodeIndex)&& (mCamEncodeIndex < (int)ARRAY_SIZE(SENSOR_PATH))&&
+ ((mCamEncodeHandle = open(DEVICE_PATH(mCamEncodeIndex), O_RDWR)) != -1)){
+ CAMHAL_LOGDB("open %s success to encode\n", DEVICE_PATH(mCamEncodeIndex));
+ }
+ }
#else
- if ((mCameraHandle = open(DEVICE_PATH(mSensorIndex), O_RDWR)) != -1)
-#endif
- {
- CAMHAL_LOGDB("open %s success!\n", DEVICE_PATH(mSensorIndex));
- break;
- }
- mSensorIndex++;
- }
- if(mSensorIndex >= (int)ARRAY_SIZE(SENSOR_PATH)){
- CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno));
- return -EINVAL;
- }
+ while(mSensorIndex < (int)ARRAY_SIZE(SENSOR_PATH)){
+ if ((mCameraHandle = open(DEVICE_PATH(mSensorIndex), oflag)) != -1){
+ CAMHAL_LOGDB("open %s success!\n", DEVICE_PATH(mSensorIndex));
+ break;
+ }
+ mSensorIndex++;
+ }
+ if(mSensorIndex >= (int)ARRAY_SIZE(SENSOR_PATH)){
+ CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno));
+ return -EINVAL;
+ }
#endif
#else
-
-#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
- if ((mCameraHandle = open(DEVICE_PATH(mSensorIndex), O_RDWR | O_NONBLOCK )) == -1)
-#else
- if ((mCameraHandle = open(DEVICE_PATH(mSensorIndex), O_RDWR)) == -1)
-#endif
- {
- CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno));
- return -EINVAL;
+ if ((mCameraHandle = open(DEVICE_PATH(mSensorIndex), oflag)) == -1){
+ CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno));
+ return -EINVAL;
}
#endif
ret = ioctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap);
- if (ret < 0)
- {
- CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
- return -EINVAL;
+ if (ret < 0){
+ CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
+ return -EINVAL;
}
- if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0)
- {
- CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
- return -EINVAL;
+ if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0){
+ CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
+ return -EINVAL;
}
- if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING))
- {
- CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o");
- return -EINVAL;
+ if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING)){
+ CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o");
+ return -EINVAL;
}
if (strcmp(caps->get(CameraProperties::FACING_INDEX), (const char *) android::ExCameraParameters::FACING_FRONT) == 0)
@@ -249,6 +230,7 @@ status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
cur_focus_mode_for_conti = CAM_FOCUS_MODE_RELEASE;
mFlashMode = FLASHLIGHT_OFF;
mPixelFormat = 0;
+ mSensorFormat = 0;
mPreviewWidth = 0 ;
mPreviewHeight = 0;
@@ -261,62 +243,41 @@ status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
IoctlStateProbe();
-#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
- int fps=0, fps_num=0;
- int PreviewFrameRate = 0;
- char *fpsrange=(char *)calloc(32,sizeof(char));
-
- ret = get_framerate(mCameraHandle, &fps, &fps_num);
- if((fpsrange != NULL)&&(NO_ERROR == ret) && ( 0 !=fps_num )){
- PreviewFrameRate = fps/fps_num;
- int tmp_fps = fps/fps_num/5;
- int iter = 0;
- int shift = 0;
- for(iter = 0;iter < tmp_fps;)
- {
- iter++;
- if(iter == tmp_fps)
- sprintf(fpsrange+shift,"%d",iter*5);
- else
- sprintf(fpsrange+shift,"%d,",iter*5);
- if(iter == 1)
- shift += 2;
- else
- shift += 3;
-
+ mSupportMJPEG = false;
+ {
+ char property[32];
+ int enable = 0;
+ memset(property,0,sizeof(property));
+ if(property_get("ro.camera.preview.UseMJPEG", property, NULL) > 0){
+ enable = atoi(property);
}
- if((fps/fps_num)%5 != 0)
- sprintf(fpsrange+shift-1,",%d",fps/fps_num);
- CAMHAL_LOGDB("supported preview rates is %s\n", fpsrange);
-
- mParams.set(CameraParameters::KEY_PREVIEW_FRAME_RATE,fps/fps_num);
- mParams.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,fpsrange);
+ mUseMJPEG = (enable!=0)?true:false;
+ }
+ if(mUseMJPEG == true){
+ mSupportMJPEG = is_mjpeg_supported(mCameraHandle);
+ if(mSupportMJPEG == true){
+ CAMHAL_LOGDA("Current Camera's preview format set as MJPEG\n");
+ }
+ }
- memset(fpsrange,0,32*sizeof(char));
- sprintf(fpsrange,"%s%d","5000,",fps/fps_num*1000);
- mParams.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,fpsrange);
- mParams.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,fpsrange);
- }else{
- PreviewFrameRate = 15;
-
- sprintf(fpsrange,"%s%d","5,", PreviewFrameRate);
- CAMHAL_LOGDB("default preview rates is %s\n", fpsrange);
+ ParserLimittedRateInfo(&LimittedRate);
+ if(LimittedRate.num>0){
+ CAMHAL_LOGDB("Current Camera's succeed parser %d limitted rate parameter(s)\n",LimittedRate.num);
+ for(int k = 0;k<LimittedRate.num;k++){
+ CAMHAL_LOGVB("limitted rate parameter %d : %dx%dx%d\n",LimittedRate.num,LimittedRate.arg[k].width,LimittedRate.arg[k].height,LimittedRate.arg[k].framerate);
+ }
+ }
- mParams.set(CameraParameters::KEY_PREVIEW_FRAME_RATE, PreviewFrameRate);
- mParams.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,fpsrange);
+ mLimittedFrameRate = 0; // no limitted
- mParams.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,fpsrange);
- mParams.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,fpsrange);
- }
-#endif
#ifndef AMLOGIC_USB_CAMERA_SUPPORT
writefile((char*)SYSFILE_CAMERA_SET_PARA, (char*)"1");
#endif
//mirror set at here will not work.
LOG_FUNCTION_NAME_EXIT;
-
return ret;
}
+
status_t V4LCameraAdapter::IoctlStateProbe(void)
{
struct v4l2_queryctrl qc;
@@ -325,7 +286,6 @@ status_t V4LCameraAdapter::IoctlStateProbe(void)
LOG_FUNCTION_NAME;
mIoctlSupport = 0;
-
if(get_hflip_mode(mCameraHandle)==0){
mIoctlSupport |= IOCTL_MASK_HFLIP;
}else{
@@ -335,26 +295,24 @@ status_t V4LCameraAdapter::IoctlStateProbe(void)
memset(&qc, 0, sizeof(struct v4l2_queryctrl));
qc.id = V4L2_CID_ZOOM_ABSOLUTE;
ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
- if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)
- || (qc.type != V4L2_CTRL_TYPE_INTEGER)){
- mIoctlSupport &= ~IOCTL_MASK_ZOOM;
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
+ mIoctlSupport &= ~IOCTL_MASK_ZOOM;
}else{
- mIoctlSupport |= IOCTL_MASK_ZOOM;
+ mIoctlSupport |= IOCTL_MASK_ZOOM;
}
#ifndef AMLOGIC_USB_CAMERA_SUPPORT
memset(&qc, 0, sizeof(struct v4l2_queryctrl));
qc.id = V4L2_ROTATE_ID;
ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
- if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)
- || (qc.type != V4L2_CTRL_TYPE_INTEGER)){
- mIoctlSupport &= ~IOCTL_MASK_ROTATE;
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
+ mIoctlSupport &= ~IOCTL_MASK_ROTATE;
}else{
- mIoctlSupport |= IOCTL_MASK_ROTATE;
+ mIoctlSupport |= IOCTL_MASK_ROTATE;
}
if(mIoctlSupport & IOCTL_MASK_ROTATE){
- CAMHAL_LOGDB("camera %d support capture rotate",mSensorIndex);
+ CAMHAL_LOGDB("camera %d support capture rotate",mSensorIndex);
}
mRotateValue = 0;
#endif
@@ -367,15 +325,15 @@ status_t V4LCameraAdapter::IoctlStateProbe(void)
#endif
ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0) ){
- mIoctlSupport &= ~IOCTL_MASK_EXPOSURE;
- mEVdef = 4;
- mEVmin = 0;
- mEVmax = 8;
+ mIoctlSupport &= ~IOCTL_MASK_EXPOSURE;
+ mEVdef = 4;
+ mEVmin = 0;
+ mEVmax = 8;
}else{
- mIoctlSupport |= IOCTL_MASK_EXPOSURE;
- mEVdef = qc.default_value;
- mEVmin = qc.minimum;
- mEVmax = qc.maximum;
+ mIoctlSupport |= IOCTL_MASK_EXPOSURE;
+ mEVdef = qc.default_value;
+ mEVmin = qc.minimum;
+ mEVmax = qc.maximum;
}
mEV = mEVdef;
@@ -386,53 +344,48 @@ status_t V4LCameraAdapter::IoctlStateProbe(void)
qc.id = V4L2_CID_DO_WHITE_BALANCE;
#endif
ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
- if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0) ){
- mIoctlSupport &= ~IOCTL_MASK_WB;
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)){
+ mIoctlSupport &= ~IOCTL_MASK_WB;
}else{
- mIoctlSupport |= IOCTL_MASK_WB;
+ mIoctlSupport |= IOCTL_MASK_WB;
}
mWhiteBalance = qc.default_value;
-
memset(&qc, 0, sizeof(struct v4l2_queryctrl));
qc.id = V4L2_CID_BACKLIGHT_COMPENSATION;
ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
- if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)
- || (qc.type != V4L2_CTRL_TYPE_MENU)){
- mIoctlSupport &= ~IOCTL_MASK_FLASH;
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_MENU)){
+ mIoctlSupport &= ~IOCTL_MASK_FLASH;
}else{
- mIoctlSupport |= IOCTL_MASK_FLASH;
+ mIoctlSupport |= IOCTL_MASK_FLASH;
}
memset(&qc, 0, sizeof(struct v4l2_queryctrl));
qc.id = V4L2_CID_COLORFX;
ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
- if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)
- || (qc.type != V4L2_CTRL_TYPE_MENU)){
- mIoctlSupport &= ~IOCTL_MASK_EFFECT;
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_MENU)){
+ mIoctlSupport &= ~IOCTL_MASK_EFFECT;
}else{
- mIoctlSupport |= IOCTL_MASK_EFFECT;
+ mIoctlSupport |= IOCTL_MASK_EFFECT;
}
memset(&qc, 0, sizeof(struct v4l2_queryctrl));
qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
- if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)
- || (qc.type != V4L2_CTRL_TYPE_MENU)){
- mIoctlSupport &= ~IOCTL_MASK_BANDING;
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_MENU)){
+ mIoctlSupport &= ~IOCTL_MASK_BANDING;
}else{
- mIoctlSupport |= IOCTL_MASK_BANDING;
+ mIoctlSupport |= IOCTL_MASK_BANDING;
}
mAntiBanding = qc.default_value;
memset(&qc, 0, sizeof(struct v4l2_queryctrl));
qc.id = V4L2_CID_FOCUS_AUTO;
ret = ioctl (mCameraHandle, VIDIOC_QUERYCTRL, &qc);
- if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)
- || (qc.type != V4L2_CTRL_TYPE_MENU)){
- mIoctlSupport &= ~IOCTL_MASK_FOCUS;
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_MENU)){
+ mIoctlSupport &= ~IOCTL_MASK_FOCUS;
}else{
- mIoctlSupport |= IOCTL_MASK_FOCUS;
+ mIoctlSupport |= IOCTL_MASK_FOCUS;
}
memset(&qc, 0, sizeof(struct v4l2_queryctrl));
@@ -445,38 +398,31 @@ status_t V4LCameraAdapter::IoctlStateProbe(void)
}
LOG_FUNCTION_NAME_EXIT;
-
return ret;
}
status_t V4LCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
{
-
status_t ret = NO_ERROR;
v4l2_buffer hbuf_query;
memset(&hbuf_query,0,sizeof(v4l2_buffer));
- //LOGD("fillThisBuffer frameType=%d", frameType);
- if (CameraFrame::IMAGE_FRAME == frameType)
- {
- //if (NULL != mEndImageCaptureCallback)
- //mEndImageCaptureCallback(mEndCaptureData);
- if (NULL != mReleaseImageBuffersCallback)
- mReleaseImageBuffersCallback(mReleaseData);
- return NO_ERROR;
+ if (CameraFrame::IMAGE_FRAME == frameType){
+ //if (NULL != mEndImageCaptureCallback)
+ //mEndImageCaptureCallback(mEndCaptureData);
+ if (NULL != mReleaseImageBuffersCallback)
+ mReleaseImageBuffersCallback(mReleaseData);
+ return NO_ERROR;
}
- if ( !mVideoInfo->isStreaming || !mPreviewing)
- {
- return NO_ERROR;
+ if ( !mVideoInfo->isStreaming || !mPreviewing){
+ return NO_ERROR;
}
int i = mPreviewBufs.valueFor(( unsigned int )frameBuf);
- if(i<0)
- {
- return BAD_VALUE;
+ if(i<0){
+ return BAD_VALUE;
}
- if(nQueued>=mPreviewBufferCount)
- {
+ if(nQueued>=mPreviewBufferCount){
CAMHAL_LOGEB("fill buffer error, reach the max preview buff:%d,max:%d",nQueued,mPreviewBufferCount);
return BAD_VALUE;
}
@@ -493,24 +439,20 @@ status_t V4LCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType
#endif
ret = ioctl(mCameraHandle, VIDIOC_QBUF, &hbuf_query);
if (ret < 0) {
- CAMHAL_LOGEB("Init: VIDIOC_QBUF %d Failed, errno=%d\n",i, errno);
- return -1;
+ CAMHAL_LOGEB("Init: VIDIOC_QBUF %d Failed, errno=%d\n",i, errno);
+ return -1;
}
- //CAMHAL_LOGEB("fillThis Buffer %d",i);
nQueued++;
return ret;
-
}
status_t V4LCameraAdapter::setParameters(const CameraParameters &params)
{
LOG_FUNCTION_NAME;
-
status_t rtn = NO_ERROR;
// Update the current parameter set
mParams = params;
-
//check zoom value
int zoom = mParams.getInt(CameraParameters::KEY_ZOOM);
int maxzoom = mParams.getInt(CameraParameters::KEY_MAX_ZOOM);
@@ -563,18 +505,18 @@ status_t V4LCameraAdapter::setParameters(const CameraParameters &params)
flashmode = mParams.get(CameraParameters::KEY_FLASH_MODE);
if((mIoctlSupport & IOCTL_MASK_FLASH) && flashmode){
- if(strcasecmp(flashmode, "torch")==0){
- set_flash_mode(mCameraHandle, flashmode);
- mFlashMode = FLASHLIGHT_TORCH;
- }else if(strcasecmp(flashmode, "on")==0){
- if( FLASHLIGHT_TORCH == mFlashMode){
- set_flash_mode(mCameraHandle, "off");
- }
- mFlashMode = FLASHLIGHT_ON;
- }else if(strcasecmp(flashmode, "off")==0){
- set_flash_mode(mCameraHandle, flashmode);
- mFlashMode = FLASHLIGHT_OFF;
- }
+ if(strcasecmp(flashmode, "torch")==0){
+ set_flash_mode(mCameraHandle, flashmode);
+ mFlashMode = FLASHLIGHT_TORCH;
+ }else if(strcasecmp(flashmode, "on")==0){
+ if( FLASHLIGHT_TORCH == mFlashMode){
+ set_flash_mode(mCameraHandle, "off");
+ }
+ mFlashMode = FLASHLIGHT_ON;
+ }else if(strcasecmp(flashmode, "off")==0){
+ set_flash_mode(mCameraHandle, flashmode);
+ mFlashMode = FLASHLIGHT_OFF;
+ }
}
exposure=mParams.get(CameraParameters::KEY_EXPOSURE_COMPENSATION);
@@ -584,7 +526,7 @@ status_t V4LCameraAdapter::setParameters(const CameraParameters &params)
white_balance=mParams.get(CameraParameters::KEY_WHITE_BALANCE);
if((mIoctlSupport & IOCTL_MASK_WB) && white_balance){
- set_white_balance(mCameraHandle,white_balance);
+ set_white_balance(mCameraHandle,white_balance);
}
effect=mParams.get(CameraParameters::KEY_EFFECT);
@@ -618,46 +560,46 @@ status_t V4LCameraAdapter::setParameters(const CameraParameters &params)
}
supportfocusmode = mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
if( NULL != strstr(supportfocusmode, "continuous")){
- if(CAM_FOCUS_MODE_AUTO != cur_focus_mode_for_conti){
- struct v4l2_control ctl;
- if( (CAM_FOCUS_MODE_CONTI_VID != cur_focus_mode_for_conti ) &&
- ( (CAM_FOCUS_MODE_AUTO == cur_focus_mode )
- ||( CAM_FOCUS_MODE_CONTI_PIC == cur_focus_mode )
- ||( CAM_FOCUS_MODE_CONTI_VID == cur_focus_mode ) )){
- mEnableContiFocus = true;
- ctl.id = V4L2_CID_FOCUS_AUTO;
- ctl.value = CAM_FOCUS_MODE_CONTI_VID;
- if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
- CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_CONTI_VID!\n");
- }
- mFocusWaitCount = FOCUS_PROCESS_FRAMES;
- bFocusMoveState = true;
- cur_focus_mode_for_conti = CAM_FOCUS_MODE_CONTI_VID;
- }else if( (CAM_FOCUS_MODE_CONTI_VID != cur_focus_mode_for_conti)&&
- (CAM_FOCUS_MODE_AUTO != cur_focus_mode) &&
- ( CAM_FOCUS_MODE_CONTI_PIC != cur_focus_mode )&&
- ( CAM_FOCUS_MODE_CONTI_VID != cur_focus_mode )){
- mEnableContiFocus = false;
- ctl.id = V4L2_CID_FOCUS_AUTO;
- ctl.value = CAM_FOCUS_MODE_RELEASE;
- if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
- CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_RELEASE!\n");
- }
- cur_focus_mode_for_conti = CAM_FOCUS_MODE_RELEASE;
- }else if( (CAM_FOCUS_MODE_INFINITY != cur_focus_mode_for_conti)&&
- (CAM_FOCUS_MODE_INFINITY == cur_focus_mode) ){
- mEnableContiFocus = false;
- ctl.id = V4L2_CID_FOCUS_AUTO;
- ctl.value = CAM_FOCUS_MODE_INFINITY;
- if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
- CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_INFINITY!\n");
- }
- cur_focus_mode_for_conti = CAM_FOCUS_MODE_INFINITY;
- }
- }
+ if(CAM_FOCUS_MODE_AUTO != cur_focus_mode_for_conti){
+ struct v4l2_control ctl;
+ if( (CAM_FOCUS_MODE_CONTI_VID != cur_focus_mode_for_conti ) &&
+ ( (CAM_FOCUS_MODE_AUTO == cur_focus_mode )
+ ||( CAM_FOCUS_MODE_CONTI_PIC == cur_focus_mode )
+ ||( CAM_FOCUS_MODE_CONTI_VID == cur_focus_mode ) )){
+ mEnableContiFocus = true;
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_CONTI_VID;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_CONTI_VID!\n");
+ }
+ mFocusWaitCount = FOCUS_PROCESS_FRAMES;
+ bFocusMoveState = true;
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_CONTI_VID;
+ }else if( (CAM_FOCUS_MODE_CONTI_VID != cur_focus_mode_for_conti)&&
+ (CAM_FOCUS_MODE_AUTO != cur_focus_mode) &&
+ ( CAM_FOCUS_MODE_CONTI_PIC != cur_focus_mode )&&
+ ( CAM_FOCUS_MODE_CONTI_VID != cur_focus_mode )){
+ mEnableContiFocus = false;
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_RELEASE;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_RELEASE!\n");
+ }
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_RELEASE;
+ }else if( (CAM_FOCUS_MODE_INFINITY != cur_focus_mode_for_conti)&&
+ (CAM_FOCUS_MODE_INFINITY == cur_focus_mode) ){
+ mEnableContiFocus = false;
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_INFINITY;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_INFINITY!\n");
+ }
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_INFINITY;
+ }
+ }
}else{
- mEnableContiFocus = false;
- CAMHAL_LOGDA("not support continuous mode!\n");
+ mEnableContiFocus = false;
+ CAMHAL_LOGDA("not support continuous mode!\n");
}
focusarea = mParams.get(CameraParameters::KEY_FOCUS_AREAS);
@@ -666,8 +608,7 @@ status_t V4LCameraAdapter::setParameters(const CameraParameters &params)
}
mParams.getPreviewFpsRange(&min_fps, &max_fps);
- if((min_fps<0)||(max_fps<0)||(max_fps<min_fps))
- {
+ if((min_fps<0)||(max_fps<0)||(max_fps<min_fps)){
rtn = INVALID_OPERATION;
}
@@ -687,18 +628,15 @@ void V4LCameraAdapter::getParameters(CameraParameters& params)
LOG_FUNCTION_NAME_EXIT;
}
-
///API to give the buffers to Adapter
status_t V4LCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
{
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
-
Mutex::Autolock lock(mLock);
- switch(mode)
- {
+ switch(mode){
case CAMERA_PREVIEW:
ret = UseBuffersPreview(bufArr, num);
//maxQueueable = queueable;
@@ -716,7 +654,6 @@ status_t V4LCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, si
}
LOG_FUNCTION_NAME_EXIT;
-
return ret;
}
@@ -737,11 +674,8 @@ status_t V4LCameraAdapter::setBuffersFormat(int width, int height, int pixelform
ret = ioctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format);
if (ret < 0) {
- CAMHAL_LOGEB("Open: VIDIOC_S_FMT Failed: %s, ret=%d\n",
- strerror(errno), ret);
- return ret;
+ CAMHAL_LOGEB("Open: VIDIOC_S_FMT Failed: %s, ret=%d\n", strerror(errno), ret);
}
-
return ret;
}
@@ -770,39 +704,51 @@ status_t V4LCameraAdapter::UseBuffersPreview(void* bufArr, int num)
int ret = NO_ERROR;
if(NULL == bufArr)
- {
return BAD_VALUE;
- }
- int width, height;
+ int width, height,k = 0;
mParams.getPreviewSize(&width, &height);
mPreviewWidth = width;
mPreviewHeight = height;
+ mLimittedFrameRate = 0;
+
+ for(k = 0; k<LimittedRate.num; k++){
+ if((mPreviewWidth == LimittedRate.arg[k].width)&&(mPreviewHeight == LimittedRate.arg[k].height)){
+ mLimittedFrameRate = LimittedRate.arg[k].framerate;
+ CAMHAL_LOGVB("UseBuffersPreview, Get the limitted rate: %dx%dx%d", mPreviewWidth, mPreviewHeight, mLimittedFrameRate);
+ break;
+ }
+ }
+
const char *pixfmtchar;
int pixfmt = V4L2_PIX_FMT_NV21;
pixfmtchar = mParams.getPreviewFormat();
if(strcasecmp( pixfmtchar, "yuv420p")==0){
- pixfmt = V4L2_PIX_FMT_YVU420;
- mPixelFormat =CameraFrame::PIXEL_FMT_YV12;
+ pixfmt = V4L2_PIX_FMT_YVU420;
+ mPixelFormat =CameraFrame::PIXEL_FMT_YV12;
}else if(strcasecmp( pixfmtchar, "yuv420sp")==0){
- pixfmt = V4L2_PIX_FMT_NV21;
- mPixelFormat = CameraFrame::PIXEL_FMT_NV21;
+ pixfmt = V4L2_PIX_FMT_NV21;
+ mPixelFormat = CameraFrame::PIXEL_FMT_NV21;
}else if(strcasecmp( pixfmtchar, "yuv422")==0){
- pixfmt = V4L2_PIX_FMT_YUYV;
- mPixelFormat = CameraFrame::PIXEL_FMT_YUYV;
+ pixfmt = V4L2_PIX_FMT_YUYV;
+ mPixelFormat = CameraFrame::PIXEL_FMT_YUYV;
}
+ mSensorFormat = pixfmt;
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- ret = setBuffersFormat(width, height, V4L2_PIX_FMT_YUYV);//
+ if((mUseMJPEG == true)&&(mSupportMJPEG == true)&&(width>=640)&&(height>=480))
+ mSensorFormat = V4L2_PIX_FMT_MJPEG;
+ else
+ mSensorFormat = V4L2_PIX_FMT_YUYV;
+#endif
+ ret = setBuffersFormat(width, height, mSensorFormat);
if( 0 > ret ){
+ CAMHAL_LOGEB("VIDIOC_S_FMT failed: %s", strerror(errno));
return BAD_VALUE;
}
-#else
- setBuffersFormat(width, height, pixfmt);
-#endif
//First allocate adapter internal buffers at V4L level for USB Cam
//These are the buffers from which we will copy the data into overlay buffers
/* Check if camera can handle NB_BUFFER buffers */
@@ -817,19 +763,15 @@ status_t V4LCameraAdapter::UseBuffersPreview(void* bufArr, int num)
}
for (int i = 0; i < num; i++) {
-
memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
-
mVideoInfo->buf.index = i;
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
-
ret = ioctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
if (ret < 0) {
CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
return ret;
}
-
mVideoInfo->mem[i] = mmap (0,
mVideoInfo->buf.length,
PROT_READ | PROT_WRITE,
@@ -843,21 +785,17 @@ status_t V4LCameraAdapter::UseBuffersPreview(void* bufArr, int num)
}
uint32_t *ptr = (uint32_t*) bufArr;
-
//Associate each Camera internal buffer with the one from Overlay
CAMHAL_LOGDB("mPreviewBufs.add %#x, %d", ptr[i], i);
mPreviewBufs.add((int)ptr[i], i);
-
}
- for(int i = 0;i < num; i++)
- {
+ for(int i = 0;i < num; i++){
mPreviewIdxs.add(mPreviewBufs.valueAt(i),i);
}
// Update the preview buffer count
mPreviewBufferCount = num;
-
return ret;
}
@@ -865,15 +803,12 @@ status_t V4LCameraAdapter::UseBuffersCapture(void* bufArr, int num)
{
int ret = NO_ERROR;
- LOG_FUNCTION_NAME
+ LOG_FUNCTION_NAME;
if(NULL == bufArr)
- {
return BAD_VALUE;
- }
- if (num != 1)
- {
+ if (num != 1){
CAMHAL_LOGDB("num=%d\n", num);
}
@@ -887,7 +822,7 @@ status_t V4LCameraAdapter::UseBuffersCapture(void* bufArr, int num)
mCaptureWidth = width;
mCaptureHeight = height;
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- setBuffersFormat(width, height, V4L2_PIX_FMT_YUYV);
+ mSensorFormat = V4L2_PIX_FMT_YUYV;
#else
if(mIoctlSupport & IOCTL_MASK_ROTATE){
int temp = 0;
@@ -900,8 +835,9 @@ status_t V4LCameraAdapter::UseBuffersCapture(void* bufArr, int num)
height = temp;
}
}
- setBuffersFormat(width, height, DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT);
+ mSensorFormat = DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT;
#endif
+ setBuffersFormat(width, height, mSensorFormat);
//First allocate adapter internal buffers at V4L level for Cam
//These are the buffers from which we will copy the data into display buffers
@@ -917,19 +853,15 @@ status_t V4LCameraAdapter::UseBuffersCapture(void* bufArr, int num)
}
for (int i = 0; i < num; i++) {
-
memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
-
mVideoInfo->buf.index = i;
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
-
ret = ioctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
if (ret < 0) {
CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
return ret;
}
-
mVideoInfo->mem[i] = mmap (0,
mVideoInfo->buf.length,
PROT_READ | PROT_WRITE,
@@ -947,7 +879,7 @@ status_t V4LCameraAdapter::UseBuffersCapture(void* bufArr, int num)
CAMHAL_LOGDB("UseBuffersCapture %#x", ptr[0]);
}
- LOG_FUNCTION_NAME_EXIT
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -968,29 +900,27 @@ int V4LCameraAdapter::beginAutoFocusThread(void *cookie)
int ret = -1;
if( c->mIoctlSupport & IOCTL_MASK_FOCUS){
- ctl.id = V4L2_CID_FOCUS_AUTO;
- ctl.value = CAM_FOCUS_MODE_AUTO;//c->cur_focus_mode;
- ret = ioctl(c->mCameraHandle, VIDIOC_S_CTRL, &ctl);
- for(int j=0; j<50; j++){
- usleep(30000);//30*50ms=1.5s
- ret = ioctl(c->mCameraHandle, VIDIOC_G_CTRL, &ctl);
- if( (0==ret) ||
- ((ret < 0)&&(EBUSY != errno)) ){
- break;
- }
- }
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_AUTO;//c->cur_focus_mode;
+ ret = ioctl(c->mCameraHandle, VIDIOC_S_CTRL, &ctl);
+ for(int j=0; j<50; j++){
+ usleep(30000);//30*50ms=1.5s
+ ret = ioctl(c->mCameraHandle, VIDIOC_G_CTRL, &ctl);
+ if( (0==ret) || ((ret < 0)&&(EBUSY != errno)) ){
+ break;
+ }
+ }
}
c->setState(CAMERA_CANCEL_AUTOFOCUS);
c->commitState();
- if( (c->mIoctlSupport & IOCTL_MASK_FLASH)
- &&(FLASHLIGHT_ON == c->mFlashMode)){
- set_flash_mode( c->mCameraHandle, "off");
+ if( (c->mIoctlSupport & IOCTL_MASK_FLASH)&&(FLASHLIGHT_ON == c->mFlashMode)){
+ set_flash_mode( c->mCameraHandle, "off");
}
if(ret < 0) {
- if( c->mIoctlSupport & IOCTL_MASK_FOCUS){
- CAMHAL_LOGDA("AUTO FOCUS Failed");
+ if( c->mIoctlSupport & IOCTL_MASK_FOCUS){
+ CAMHAL_LOGDA("AUTO FOCUS Failed");
}
c->notifyFocusSubscribers(false);
} else {
@@ -1003,25 +933,21 @@ int V4LCameraAdapter::beginAutoFocusThread(void *cookie)
status_t V4LCameraAdapter::autoFocus()
{
status_t ret = NO_ERROR;
-
LOG_FUNCTION_NAME;
- if( (mIoctlSupport & IOCTL_MASK_FLASH)
- &&(FLASHLIGHT_ON == mFlashMode)){
- set_flash_mode( mCameraHandle, "on");
+ if( (mIoctlSupport & IOCTL_MASK_FLASH)&&(FLASHLIGHT_ON == mFlashMode)){
+ set_flash_mode( mCameraHandle, "on");
}
cur_focus_mode_for_conti = CAM_FOCUS_MODE_AUTO;
- if (createThread(beginAutoFocusThread, this) == false)
- {
+
+ if (createThread(beginAutoFocusThread, this) == false){
ret = UNKNOWN_ERROR;
}
LOG_FUNCTION_NAME_EXIT;
-
return ret;
}
-
status_t V4LCameraAdapter::cancelAutoFocus()
{
status_t ret = NO_ERROR;
@@ -1030,7 +956,7 @@ status_t V4LCameraAdapter::cancelAutoFocus()
struct v4l2_control ctl;
if( (mIoctlSupport & IOCTL_MASK_FOCUS) == 0x00 ){
- return 0;
+ return 0;
}
if ( !mEnableContiFocus){
@@ -1038,27 +964,26 @@ status_t V4LCameraAdapter::cancelAutoFocus()
ctl.value = CAM_FOCUS_MODE_RELEASE;
ret = ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl);
if(ret < 0) {
- CAMHAL_LOGDA("AUTO FOCUS Failed");
+ CAMHAL_LOGDA("AUTO FOCUS Failed");
}
}else if( CAM_FOCUS_MODE_AUTO == cur_focus_mode_for_conti){
- if(CAM_FOCUS_MODE_INFINITY != cur_focus_mode){
- ctl.id = V4L2_CID_FOCUS_AUTO;
- ctl.value = CAM_FOCUS_MODE_CONTI_VID;
- if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
- CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_CONTI_VID\n");
- }
- cur_focus_mode_for_conti = CAM_FOCUS_MODE_CONTI_VID;
- }else{
- ctl.id = V4L2_CID_FOCUS_AUTO;
- ctl.value = CAM_FOCUS_MODE_INFINITY;
- if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
- CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_INFINITY\n");
- }
- cur_focus_mode_for_conti = CAM_FOCUS_MODE_INFINITY;
- }
+ if(CAM_FOCUS_MODE_INFINITY != cur_focus_mode){
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_CONTI_VID;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_CONTI_VID\n");
+ }
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_CONTI_VID;
+ }else{
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_INFINITY;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_INFINITY\n");
+ }
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_INFINITY;
+ }
}
LOG_FUNCTION_NAME_EXIT;
-
return ret;
}
@@ -1069,15 +994,13 @@ status_t V4LCameraAdapter::startPreview()
void *frame_buf = NULL;
Mutex::Autolock lock(mPreviewBufsLock);
- if(mPreviewing)
- {
+ if(mPreviewing){
return BAD_VALUE;
}
#ifndef AMLOGIC_USB_CAMERA_SUPPORT
-
setMirrorEffect();
-
+
if(mIoctlSupport & IOCTL_MASK_ROTATE){
if(mPreviewOriation!=0) {
set_rotate_value(mCameraHandle,mPreviewOriation);
@@ -1090,8 +1013,7 @@ status_t V4LCameraAdapter::startPreview()
#endif
nQueued = 0;
- for (int i = 0; i < mPreviewBufferCount; i++)
- {
+ for (int i = 0; i < mPreviewBufferCount; i++){
frame_count = -1;
frame_buf = (void *)mPreviewBufs.keyAt(i);
@@ -1124,8 +1046,7 @@ status_t V4LCameraAdapter::startPreview()
}
enum v4l2_buf_type bufType;
- if (!mVideoInfo->isStreaming)
- {
+ if (!mVideoInfo->isStreaming){
bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
gettimeofday( &previewTime1, NULL);
@@ -1135,20 +1056,19 @@ status_t V4LCameraAdapter::startPreview()
CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
return ret;
}
-
mVideoInfo->isStreaming = true;
}
if( mEnableContiFocus &&
- (CAM_FOCUS_MODE_AUTO != cur_focus_mode_for_conti) &&
- (CAM_FOCUS_MODE_INFINITY != cur_focus_mode_for_conti)){
- struct v4l2_control ctl;
- ctl.id = V4L2_CID_FOCUS_AUTO;
- ctl.value = CAM_FOCUS_MODE_CONTI_VID;
- if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
- CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_CONTI_VID!\n");
- }
- cur_focus_mode_for_conti = CAM_FOCUS_MODE_CONTI_VID;
+ (CAM_FOCUS_MODE_AUTO != cur_focus_mode_for_conti) &&
+ (CAM_FOCUS_MODE_INFINITY != cur_focus_mode_for_conti)){
+ struct v4l2_control ctl;
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_CONTI_VID;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_CONTI_VID!\n");
+ }
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_CONTI_VID;
}
// Create and start preview thread for receiving buffers from V4L Camera
mPreviewThread = new PreviewThread(this);
@@ -1170,11 +1090,10 @@ status_t V4LCameraAdapter::stopPreview()
enum v4l2_buf_type bufType;
int ret = NO_ERROR;
- LOG_FUNCTION_NAME
+ LOG_FUNCTION_NAME;
Mutex::Autolock lock(mPreviewBufsLock);
- if(!mPreviewing)
- {
- return NO_INIT;
+ if(!mPreviewing){
+ return NO_INIT;
}
mPreviewing = false;
@@ -1182,16 +1101,13 @@ status_t V4LCameraAdapter::stopPreview()
mPreviewThread->requestExitAndWait();
mPreviewThread.clear();
-
if (mVideoInfo->isStreaming) {
bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-
ret = ioctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
if (ret < 0) {
CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
return ret;
}
-
mVideoInfo->isStreaming = false;
}
@@ -1202,15 +1118,15 @@ status_t V4LCameraAdapter::stopPreview()
nDequeued = 0;
if( mEnableContiFocus &&
- (CAM_FOCUS_MODE_AUTO != cur_focus_mode_for_conti) &&
- (CAM_FOCUS_MODE_INFINITY != cur_focus_mode_for_conti)){
- struct v4l2_control ctl;
- ctl.id = V4L2_CID_FOCUS_AUTO;
- ctl.value = CAM_FOCUS_MODE_RELEASE;
- if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
- CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_RELEASE!\n");
- }
- cur_focus_mode_for_conti = CAM_FOCUS_MODE_RELEASE;
+ (CAM_FOCUS_MODE_AUTO != cur_focus_mode_for_conti) &&
+ (CAM_FOCUS_MODE_INFINITY != cur_focus_mode_for_conti)){
+ struct v4l2_control ctl;
+ ctl.id = V4L2_CID_FOCUS_AUTO;
+ ctl.value = CAM_FOCUS_MODE_RELEASE;
+ if(ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl)<0){
+ CAMHAL_LOGDA("failed to set CAM_FOCUS_MODE_RELEASE!\n");
+ }
+ cur_focus_mode_for_conti = CAM_FOCUS_MODE_RELEASE;
}
/* Unmap buffers */
@@ -1220,7 +1136,6 @@ status_t V4LCameraAdapter::stopPreview()
}
}
-
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
@@ -1228,24 +1143,22 @@ status_t V4LCameraAdapter::stopPreview()
ret = ioctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
if (ret < 0) {
- CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
- return ret;
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ return ret;
}else{
- CAMHAL_LOGDA("VIDIOC_REQBUFS delete buffer success\n");
+ CAMHAL_LOGDA("VIDIOC_REQBUFS delete buffer success\n");
}
#endif
mPreviewBufs.clear();
mPreviewIdxs.clear();
- LOG_FUNCTION_NAME_EXIT
+ LOG_FUNCTION_NAME_EXIT;
return ret;
-
}
char * V4LCameraAdapter::GetFrame(int &index)
{
int ret;
-
if(nQueued<=0){
CAMHAL_LOGEA("GetFrame: No buff for Dequeue");
return NULL;
@@ -1267,17 +1180,16 @@ char * V4LCameraAdapter::GetFrame(int &index)
mErrorNotifier->errorNotify(CAMERA_ERROR_SOFT);
}
#endif
- if(EAGAIN == errno){
- index = -1;
- }else{
- CAMHAL_LOGEB("GetFrame: VIDIOC_DQBUF Failed,errno=%d\n",errno);
- }
+ if(EAGAIN == errno){
+ index = -1;
+ }else{
+ CAMHAL_LOGEB("GetFrame: VIDIOC_DQBUF Failed,errno=%d\n",errno);
+ }
return NULL;
}
nDequeued++;
nQueued--;
index = mVideoInfo->buf.index;
-
return (char *)mVideoInfo->mem[mVideoInfo->buf.index];
}
@@ -1288,10 +1200,7 @@ status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
status_t ret = NO_ERROR;
// Just return the current preview size, nothing more to do here.
- mParams.getPreviewSize(( int * ) &width,
- ( int * ) &height);
-
- LOG_FUNCTION_NAME_EXIT;
+ mParams.getPreviewSize(( int * ) &width, ( int * ) &height);
return ret;
}
@@ -1339,31 +1248,23 @@ static void debugShowFPS()
status_t V4LCameraAdapter::recalculateFPS()
{
float currentFPS;
-
mFrameCount++;
-
- if ( ( mFrameCount % FPS_PERIOD ) == 0 )
- {
+ if ( ( mFrameCount % FPS_PERIOD ) == 0 ){
nsecs_t now = systemTime();
nsecs_t diff = now - mLastFPSTime;
currentFPS = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
mLastFPSTime = now;
mLastFrameCount = mFrameCount;
- if ( 1 == mIter )
- {
+ if ( 1 == mIter ){
mFPS = currentFPS;
- }
- else
- {
+ }else{
//cumulative moving average
mFPS = mLastFPS + (currentFPS - mLastFPS)/mIter;
}
-
mLastFPS = mFPS;
mIter++;
}
-
return NO_ERROR;
}
@@ -1378,7 +1279,6 @@ void V4LCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index)
{
LOG_FUNCTION_NAME;
-
mbDisableMirror = false;
mSensorIndex = sensor_index;
mPreviewOriation=0;
@@ -1394,11 +1294,10 @@ V4LCameraAdapter::~V4LCameraAdapter()
close(mCameraHandle);
#ifdef AMLOGIC_TWO_CH_UVC
if(mCamEncodeHandle > 0)
- close(mCamEncodeHandle);
+ close(mCamEncodeHandle);
#endif
- if (mVideoInfo)
- {
+ if (mVideoInfo){
free(mVideoInfo);
mVideoInfo = NULL;
}
@@ -1415,20 +1314,24 @@ int V4LCameraAdapter::previewThread()
int width, height;
CameraFrame frame;
unsigned delay;
- unsigned previewframeduration = 0;
+ int previewframeduration = 0;
int active_duration = 0;
uint8_t* ptr = NULL;
bool noFrame = true;
if (mPreviewing){
int index = -1;
- previewframeduration = (unsigned)(1000000.0f / float(mParams.getPreviewFrameRate()));
+ if((mLimittedFrameRate!=0)&&(mLimittedFrameRate<mParams.getPreviewFrameRate()))
+ previewframeduration = (unsigned)(1000000.0f / float(mLimittedFrameRate));
+ else
+ previewframeduration = (unsigned)(1000000.0f / float(mParams.getPreviewFrameRate()));
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
delay = previewframeduration>>2;
#else
delay = previewframeduration;
#endif
- usleep(delay);
+ if(mSensorFormat != V4L2_PIX_FMT_MJPEG)
+ usleep(delay);
char *fp = this->GetFrame(index);
@@ -1437,11 +1340,13 @@ int V4LCameraAdapter::previewThread()
}else{
noFrame = false;
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- if(mVideoInfo->buf.length != mVideoInfo->buf.bytesused){
- fillThisBuffer((uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(index)), CameraFrame::PREVIEW_FRAME_SYNC);
- CAMHAL_LOGDB("length=%d bytesused=%d index=%d\n", mVideoInfo->buf.length, mVideoInfo->buf.bytesused, index);
- noFrame = true;
- index = -1;
+ if(mSensorFormat != V4L2_PIX_FMT_MJPEG){
+ if(mVideoInfo->buf.length != mVideoInfo->buf.bytesused){
+ fillThisBuffer((uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(index)), CameraFrame::PREVIEW_FRAME_SYNC);
+ CAMHAL_LOGDB("length=%d bytesused=%d index=%d\n", mVideoInfo->buf.length, mVideoInfo->buf.bytesused, index);
+ noFrame = true;
+ index = -1;
+ }
}
#endif
}
@@ -1483,7 +1388,6 @@ int V4LCameraAdapter::previewThread()
memcpy( &previewTime1, &previewTime2, sizeof( struct timeval));
active_duration = mFrameInv - mFrameInvAdjust;
-
if((mFrameInv >= 20000) //the interval between two frame more than 20 ms for cts
&&((active_duration>previewframeduration)||((active_duration + 5000)>previewframeduration))){ // more preview duration -5000 us
if(noFrame == false){ //current catch a picture,use it and release tmp buf;
@@ -1494,6 +1398,7 @@ int V4LCameraAdapter::previewThread()
}else if(mCache.index != -1){ //current catch no picture,but have a tmp buf;
fp = mCache.bufPtr;
ptr = (uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(mCache.index));
+ index = mCache.index;
mCache.index = -1;
}else{
return 0;
@@ -1537,29 +1442,37 @@ int V4LCameraAdapter::previewThread()
height = mPreviewHeight;
}
- if(DEFAULT_PREVIEW_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV){ // 422I
- frame.mLength = width*height*2;
- memcpy(dest,src,frame.mLength);
- }else if(DEFAULT_PREVIEW_PIXEL_FORMAT == V4L2_PIX_FMT_NV21){ //420sp
+ if(mSensorFormat == V4L2_PIX_FMT_MJPEG){ //enable mjpeg
+ if(jpeg_decode(&dest,src,width,height, ( CameraFrame::PIXEL_FMT_NV21 == mPixelFormat)?V4L2_PIX_FMT_NV21:V4L2_PIX_FMT_YVU420) != 0){ // output format is nv21
+ fillThisBuffer((uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(index)), CameraFrame::PREVIEW_FRAME_SYNC);
+ //CAMHAL_LOGEA("jpeg decode failed");
+ return -1;
+ }
frame.mLength = width*height*3/2;
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- if ( CameraFrame::PIXEL_FMT_NV21 == mPixelFormat){
- //convert yuyv to nv21
- yuyv422_to_nv21(src,dest,width,height);
}else{
- yuyv_to_yv12( src, dest, width, height);
- }
+ if(DEFAULT_PREVIEW_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV){ // 422I
+ frame.mLength = width*height*2;
+ memcpy(dest,src,frame.mLength);
+ }else if(DEFAULT_PREVIEW_PIXEL_FORMAT == V4L2_PIX_FMT_NV21){ //420sp
+ frame.mLength = width*height*3/2;
+#ifdef AMLOGIC_USB_CAMERA_SUPPORT
+ if ( CameraFrame::PIXEL_FMT_NV21 == mPixelFormat){
+ //convert yuyv to nv21
+ yuyv422_to_nv21(src,dest,width,height);
+ }else{
+ yuyv_to_yv12( src, dest, width, height);
+ }
#else
- if ( CameraFrame::PIXEL_FMT_NV21 == mPixelFormat){
- memcpy(dest,src,frame.mLength);
- }else{
- yv12_adjust_memcpy(dest,src,width,height);
- }
+ if ( CameraFrame::PIXEL_FMT_NV21 == mPixelFormat){
+ memcpy(dest,src,frame.mLength);
+ }else{
+ yv12_adjust_memcpy(dest,src,width,height);
+ }
#endif
- }else{ //default case
-
- frame.mLength = width*height*3/2;
- memcpy(dest,src,frame.mLength);
+ }else{ //default case
+ frame.mLength = width*height*3/2;
+ memcpy(dest,src,frame.mLength);
+ }
}
frame.mFrameMask |= CameraFrame::PREVIEW_FRAME_SYNC;
@@ -1579,9 +1492,9 @@ int V4LCameraAdapter::previewThread()
ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
if (ret){
CAMHAL_LOGEB("setInitFrameRefCount err=%d", ret);
- }else
+ }else{
ret = sendFrameToSubscribers(&frame);
- //LOGD("previewThread /sendFrameToSubscribers ret=%d", ret);
+ }
}
if( (mIoctlSupport & IOCTL_MASK_FOCUS_MOVE) && mFocusMoveEnabled ){
getFocusMoveStatus();
@@ -1614,7 +1527,6 @@ int V4LCameraAdapter::GenExif(ExifElementsTable* exiftable)
else if(orientation == 270)
orientation = 8;
-
//Image width,height
int width,height;
if((mCaptureWidth <= 0)||(mCaptureHeight <= 0)){
@@ -1646,8 +1558,7 @@ int V4LCameraAdapter::GenExif(ExifElementsTable* exiftable)
//focal length RATIONAL
float focallen = mParams.getFloat(CameraParameters::KEY_FOCAL_LENGTH);
- if(focallen >= 0)
- {
+ if(focallen >= 0){
int focalNum = focallen*1000;
int focalDen = 1000;
sprintf(exifcontent,"%d/%d",focalNum,focalDen);
@@ -1668,8 +1579,7 @@ int V4LCameraAdapter::GenExif(ExifElementsTable* exiftable)
//gps date stamp & time stamp
times = mParams.getInt(CameraParameters::KEY_GPS_TIMESTAMP);
- if(times != -1)
- {
+ if(times != -1){
struct tm tmstruct;
tmstruct = *(gmtime(&times));//convert to standard time
//date
@@ -1682,12 +1592,10 @@ int V4LCameraAdapter::GenExif(ExifElementsTable* exiftable)
//gps latitude info
char* latitudestr = (char*)mParams.get(CameraParameters::KEY_GPS_LATITUDE);
- if(latitudestr!=NULL)
- {
+ if(latitudestr!=NULL){
int offset = 0;
float latitude = mParams.getFloat(CameraParameters::KEY_GPS_LATITUDE);
- if(latitude < 0.0)
- {
+ if(latitude < 0.0){
offset = 1;
latitude*= (float)(-1);
}
@@ -1699,18 +1607,15 @@ int V4LCameraAdapter::GenExif(ExifElementsTable* exiftable)
int latituseconds_int = latituseconds;
sprintf(exifcontent,"%d/%d,%d/%d,%d/%d",latitudedegree,1,latitudeminuts_int,1,latituseconds_int,1);
exiftable->insertElement("GPSLatitude",(const char*)exifcontent);
-
exiftable->insertElement("GPSLatitudeRef",(offset==1)?"S":"N");
}
//gps Longitude info
char* longitudestr = (char*)mParams.get(CameraParameters::KEY_GPS_LONGITUDE);
- if(longitudestr!=NULL)
- {
+ if(longitudestr!=NULL){
int offset = 0;
float longitude = mParams.getFloat(CameraParameters::KEY_GPS_LONGITUDE);
- if(longitude < 0.0)
- {
+ if(longitude < 0.0){
offset = 1;
longitude*= (float)(-1);
}
@@ -1722,18 +1627,15 @@ int V4LCameraAdapter::GenExif(ExifElementsTable* exiftable)
int longitudeseconds_int = longitudeseconds;
sprintf(exifcontent,"%d/%d,%d/%d,%d/%d",longitudedegree,1,longitudeminuts_int,1,longitudeseconds_int,1);
exiftable->insertElement("GPSLongitude",(const char*)exifcontent);
-
exiftable->insertElement("GPSLongitudeRef",(offset==1)?"S":"N");
}
//gps Altitude info
char* altitudestr = (char*)mParams.get(CameraParameters::KEY_GPS_ALTITUDE);
- if(altitudestr!=NULL)
- {
+ if(altitudestr!=NULL){
int offset = 0;
float altitude = mParams.getFloat(CameraParameters::KEY_GPS_ALTITUDE);
- if(altitude < 0.0)
- {
+ if(altitude < 0.0){
offset = 1;
altitude*= (float)(-1);
}
@@ -1742,15 +1644,13 @@ int V4LCameraAdapter::GenExif(ExifElementsTable* exiftable)
int altitudedec= 1000;
sprintf(exifcontent,"%d/%d",altitudenum,altitudedec);
exiftable->insertElement("GPSAltitude",(const char*)exifcontent);
-
sprintf(exifcontent,"%d",offset);
exiftable->insertElement("GPSAltitudeRef",(const char*)exifcontent);
}
//gps processing method
char* processmethod = (char*)mParams.get(CameraParameters::KEY_GPS_PROCESSING_METHOD);
- if(processmethod!=NULL)
- {
+ if(processmethod!=NULL){
memset(exifcontent,0,sizeof(exifcontent));
char ExifAsciiPrefix[] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };//asicii
memcpy(exifcontent,ExifAsciiPrefix,8);
@@ -1777,12 +1677,10 @@ int V4LCameraAdapter::pictureThread()
setMirrorEffect();
#endif
- if( (mIoctlSupport & IOCTL_MASK_FLASH)
- &&(FLASHLIGHT_ON == mFlashMode)){
- set_flash_mode( mCameraHandle, "on");
+ if( (mIoctlSupport & IOCTL_MASK_FLASH)&&(FLASHLIGHT_ON == mFlashMode)){
+ set_flash_mode( mCameraHandle, "on");
}
- if (true)
- {
+ if (true){
mVideoInfo->buf.index = 0;
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
@@ -1794,8 +1692,7 @@ int V4LCameraAdapter::pictureThread()
}
#endif
ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
- if (ret < 0)
- {
+ if (ret < 0){
CAMHAL_LOGEA("VIDIOC_QBUF Failed");
return -EINVAL;
}
@@ -1813,16 +1710,13 @@ int V4LCameraAdapter::pictureThread()
#endif
enum v4l2_buf_type bufType;
- if (!mVideoInfo->isStreaming)
- {
+ if (!mVideoInfo->isStreaming){
bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-
ret = ioctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
if (ret < 0) {
CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
return ret;
}
-
mVideoInfo->isStreaming = true;
}
@@ -1830,49 +1724,45 @@ int V4LCameraAdapter::pictureThread()
char *fp = this->GetFrame(index);
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
while((mVideoInfo->buf.length != mVideoInfo->buf.bytesused)&&(dqTryNum>0)){
- if(NULL != fp){
- mVideoInfo->buf.index = 0;
- mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
-
-
- if(mIsDequeuedEIOError){
- CAMHAL_LOGEA("DQBUF EIO has occured!\n");
- break;
- }
-
- ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
- if (ret < 0)
- {
- CAMHAL_LOGEB("VIDIOC_QBUF Failed errno=%d\n", errno);
- break;
- }
- nQueued ++;
- dqTryNum --;
- }
+ if(NULL != fp){
+ mVideoInfo->buf.index = 0;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ if(mIsDequeuedEIOError){
+ CAMHAL_LOGEA("DQBUF EIO has occured!\n");
+ break;
+ }
+
+ ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ if (ret < 0){
+ CAMHAL_LOGEB("VIDIOC_QBUF Failed errno=%d\n", errno);
+ break;
+ }
+ nQueued ++;
+ dqTryNum --;
+ }
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
- usleep( 10000 );
+ usleep( 10000 );
#endif
- fp = this->GetFrame(index);
+ fp = this->GetFrame(index);
}
#endif
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
- while(!fp && (-1 == index) ){
- usleep( 10000 );
- fp = this->GetFrame(index);
- }
+ while(!fp && (-1 == index) ){
+ usleep( 10000 );
+ fp = this->GetFrame(index);
+ }
#else
- if(!fp)
- {
- CAMHAL_LOGDA("GetFrame fail, this may stop preview\n");
- return 0; //BAD_VALUE;
- }
+ if(!fp){
+ CAMHAL_LOGDA("GetFrame fail, this may stop preview\n");
+ return 0; //BAD_VALUE;
+ }
#endif
- if (!mCaptureBuf || !mCaptureBuf->data)
- {
- return 0; //BAD_VALUE;
+ if (!mCaptureBuf || !mCaptureBuf->data){
+ return 0; //BAD_VALUE;
}
int width, height;
@@ -1899,6 +1789,9 @@ int V4LCameraAdapter::pictureThread()
mCaptureBuf, dest, fp,index, width, height,
mVideoInfo->buf.length, mVideoInfo->buf.bytesused);
+ //if(mSensorFormat == V4L2_PIX_FMT_MIPEG){
+ // memcpy(dest,src,mVideoInfo->buf.length);
+ //}else
if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_RGB24){ // rgb24
frame.mLength = width*height*3;
frame.mQuirks = CameraFrame::ENCODE_RAW_RGB24_TO_JPEG | CameraFrame::HAS_EXIF_DATA;
@@ -1948,16 +1841,13 @@ int V4LCameraAdapter::pictureThread()
frame.mHeight = height;
frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
- if (mVideoInfo->isStreaming)
- {
+ if (mVideoInfo->isStreaming){
bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = ioctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
- if (ret < 0)
- {
+ if (ret < 0){
CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
return ret;
}
-
mVideoInfo->isStreaming = false;
}
@@ -1972,25 +1862,23 @@ int V4LCameraAdapter::pictureThread()
CAMHAL_LOGEA("Unmap failed");
}
-
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
- mVideoInfo->rb.count = 0;
-
- ret = ioctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
- if (ret < 0) {
- CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
- return ret;
- }else{
- CAMHAL_LOGDA("VIDIOC_REQBUFS delete buffer success\n");
- }
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+ mVideoInfo->rb.count = 0;
+
+ ret = ioctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ return ret;
+ }else{
+ CAMHAL_LOGDA("VIDIOC_REQBUFS delete buffer success\n");
+ }
#endif
}
- if( (mIoctlSupport & IOCTL_MASK_FLASH)
- &&(FLASHLIGHT_ON == mFlashMode)){
- set_flash_mode( mCameraHandle, "off");
+ if( (mIoctlSupport & IOCTL_MASK_FLASH)&&(FLASHLIGHT_ON == mFlashMode)){
+ set_flash_mode( mCameraHandle, "off");
}
#ifndef AMLOGIC_USB_CAMERA_SUPPORT
if(mIoctlSupport & IOCTL_MASK_ROTATE){
@@ -2012,13 +1900,11 @@ int V4LCameraAdapter::pictureThread()
}else{
ret = sendFrameToSubscribers(&frame);
}
- //LOGD("pictureThread /sendFrameToSubscribers ret=%d", ret);
-
return ret;
}
-
-status_t V4LCameraAdapter::disableMirror(bool bDisable) {
+status_t V4LCameraAdapter::disableMirror(bool bDisable)
+{
CAMHAL_LOGDB("disableMirror %d\n",bDisable);
mbDisableMirror = bDisable;
setMirrorEffect();
@@ -2027,7 +1913,6 @@ status_t V4LCameraAdapter::disableMirror(bool bDisable) {
status_t V4LCameraAdapter::setMirrorEffect() {
#ifndef AMLOGIC_USB_CAMERA_SUPPORT
-
bool bEnable = mbFrontCamera&&(!mbDisableMirror);
CAMHAL_LOGDB("setmirror effect %d",bEnable);
@@ -2041,8 +1926,6 @@ status_t V4LCameraAdapter::setMirrorEffect() {
return NO_ERROR;
}
-
-
// ---------------------------------------------------------------------------
extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
{
@@ -2052,10 +1935,9 @@ extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
LOG_FUNCTION_NAME;
#ifdef AMLOGIC_VIRTUAL_CAMERA_SUPPORT
-
if( sensor_index == (size_t)(iCamerasNum)){
- //MAX_CAM_NUM_ADD_VCAM-1) ){
- adapter = new V4LCamAdpt(sensor_index);
+ //MAX_CAM_NUM_ADD_VCAM-1) ){
+ adapter = new V4LCamAdpt(sensor_index);
}else{
#endif
adapter = new V4LCameraAdapter(sensor_index);
@@ -2070,7 +1952,6 @@ extern "C" CameraAdapter* CameraAdapter_Factory(size_t sensor_index)
}
LOG_FUNCTION_NAME_EXIT;
-
return adapter;
}
@@ -2083,12 +1964,9 @@ extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properti
LOG_FUNCTION_NAME;
if(!properties_array)
- {
return -EINVAL;
- }
- while (starting_camera + num_cameras_supported < camera_num)
- {
+ while (starting_camera + num_cameras_supported < camera_num){
properties = properties_array + starting_camera + num_cameras_supported;
properties->set(CameraProperties::CAMERA_NAME, "Camera");
extern void loadCaps(int camera_id, CameraProperties::Properties* params);
@@ -2097,7 +1975,6 @@ extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properti
}
LOG_FUNCTION_NAME_EXIT;
-
return num_cameras_supported;
}
@@ -2112,43 +1989,31 @@ extern "C" int CameraAdapter_CameraNum()
#endif
#elif defined ( AMLOGIC_VIRTUAL_CAMERA_SUPPORT)
iCamerasNum = 0;
- for( int i = 0; i < (int)ARRAY_SIZE(SENSOR_PATH); i++ )
- {
- if( access(DEVICE_PATH(i), 0) == 0 )
- {
- iCamerasNum++;
- }
+ for( int i = 0; i < (int)ARRAY_SIZE(SENSOR_PATH); i++ ){
+ if( access(DEVICE_PATH(i), 0) == 0 )
+ iCamerasNum++;
}
CAMHAL_LOGDB("GetCameraNums %d\n", iCamerasNum+1);
return iCamerasNum+1;
#elif defined (AMLOGIC_USB_CAMERA_SUPPORT)
iCamerasNum = 0;
- for( int i = 0; i < (int)ARRAY_SIZE(SENSOR_PATH); i++ )
- {
- if( access(DEVICE_PATH(i), 0) == 0 )
- {
- iCamerasNum++;
- }
+ for( int i = 0; i < (int)ARRAY_SIZE(SENSOR_PATH); i++ ){
+ if( access(DEVICE_PATH(i), 0) == 0 )
+ iCamerasNum++;
}
- iCamerasNum = iCamerasNum > MAX_CAMERAS_SUPPORTED?
- MAX_CAMERAS_SUPPORTED :iCamerasNum;
+ iCamerasNum = iCamerasNum > MAX_CAMERAS_SUPPORTED?MAX_CAMERAS_SUPPORTED :iCamerasNum;
return iCamerasNum;
#else
CAMHAL_LOGDB("CameraAdapter_CameraNum %d",iCamerasNum);
- if(iCamerasNum == -1)
- {
+ if(iCamerasNum == -1){
iCamerasNum = 0;
- for(int i = 0;i < MAX_CAMERAS_SUPPORTED;i++)
- {
+ for(int i = 0;i < MAX_CAMERAS_SUPPORTED;i++){
if( access(DEVICE_PATH(i), 0) == 0 )
- {
iCamerasNum++;
- }
}
CAMHAL_LOGDB("GetCameraNums %d",iCamerasNum);
}
-
return iCamerasNum;
#endif
}
@@ -2156,90 +2021,102 @@ extern "C" int CameraAdapter_CameraNum()
#ifdef AMLOGIC_TWO_CH_UVC
extern "C" bool isPreviewDevice(int camera_fd)
{
- int ret;
- int index;
- struct v4l2_fmtdesc fmtdesc;
-
- for(index=0;;index++){
- memset(&fmtdesc, 0, sizeof(struct v4l2_fmtdesc));
- fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- fmtdesc.index = index;
- ret = ioctl( camera_fd, VIDIOC_ENUM_FMT, &fmtdesc);
- if(V4L2_PIX_FMT_YUYV==fmtdesc.pixelformat){
- return true;
- }
- if(ret < 0)
- break;
- }
+ int ret;
+ int index;
+ struct v4l2_fmtdesc fmtdesc;
- return false;
+ for(index=0;;index++){
+ memset(&fmtdesc, 0, sizeof(struct v4l2_fmtdesc));
+ fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ fmtdesc.index = index;
+ ret = ioctl( camera_fd, VIDIOC_ENUM_FMT, &fmtdesc);
+ if(V4L2_PIX_FMT_YUYV==fmtdesc.pixelformat){
+ return true;
+ }
+ if(ret < 0)
+ break;
+ }
+ return false;
}
+
extern "C" status_t getVideodevId(int &camera_id, int &main_id)
{
- int tmp_id = camera_id;
- int tmp_fd = -1;
- int suc_id = -1;
- int camera_fd = -1;
- int ret = NO_ERROR;
- char cardname[32]="";
- char cardname2[32]="";
- struct v4l2_capability cap;
- bool needPreviewCh=false;
- while(1){
- if ((tmp_fd = open(DEVICE_PATH(tmp_id), O_RDWR)) != -1)
- {
- if(isPreviewDevice(tmp_fd)){
- if(needPreviewCh){
- memset(&cap, 0, sizeof(struct v4l2_capability));
- ret = ioctl(tmp_fd,VIDIOC_QUERYCAP,&cap);
- if(ret < 0){
- CAMHAL_LOGDB("failed to query %s !\n", DEVICE_PATH(tmp_id));
- }
- strncpy(cardname2,(char *)cap.card, sizeof(cardname2));
- if(strcmp(cardname, cardname2)==0){
- close(tmp_fd);
- camera_id = tmp_id;
- return NO_ERROR;
- }
- suc_id = tmp_id;
- close(tmp_fd);
- }else{
- close(tmp_fd);
- camera_id = tmp_id;
- return NO_ERROR;
- }
- }else{
- main_id = tmp_id;
- needPreviewCh = true;
- memset(&cap, 0, sizeof(struct v4l2_capability));
- ret = ioctl(tmp_fd,VIDIOC_QUERYCAP,&cap);
- if(ret < 0){
- CAMHAL_LOGDB("failed to query %s !\n", DEVICE_PATH(tmp_id));
- }
- strncpy(cardname,(char *)cap.card, sizeof(cardname));
- CAMHAL_LOGDB("%s for main channel!\n", DEVICE_PATH(tmp_id));
- close(tmp_fd);
- }
- }
- tmp_id++;
- tmp_id%= ARRAY_SIZE(SENSOR_PATH);
- if(tmp_id ==camera_id){
- needPreviewCh = false;
-
- camera_id = suc_id;
- return NO_ERROR;
- }
- }
- return NO_ERROR;
+ int tmp_id = camera_id;
+ int tmp_fd = -1;
+ int suc_id = -1;
+ int camera_fd = -1;
+ int ret = NO_ERROR;
+ char cardname[32]="";
+ char cardname2[32]="";
+ struct v4l2_capability cap;
+ bool needPreviewCh=false;
+ while(1){
+ if ((tmp_fd = open(DEVICE_PATH(tmp_id), O_RDWR)) != -1){
+ if(isPreviewDevice(tmp_fd)){
+ if(needPreviewCh){
+ memset(&cap, 0, sizeof(struct v4l2_capability));
+ ret = ioctl(tmp_fd,VIDIOC_QUERYCAP,&cap);
+ if(ret < 0){
+ CAMHAL_LOGDB("failed to query %s !\n", DEVICE_PATH(tmp_id));
+ }
+ strncpy(cardname2,(char *)cap.card, sizeof(cardname2));
+ if(strcmp(cardname, cardname2)==0){
+ close(tmp_fd);
+ camera_id = tmp_id;
+ return NO_ERROR;
+ }
+ suc_id = tmp_id;
+ close(tmp_fd);
+ }else{
+ close(tmp_fd);
+ camera_id = tmp_id;
+ return NO_ERROR;
+ }
+ }else{
+ main_id = tmp_id;
+ needPreviewCh = true;
+ memset(&cap, 0, sizeof(struct v4l2_capability));
+ ret = ioctl(tmp_fd,VIDIOC_QUERYCAP,&cap);
+ if(ret < 0){
+ CAMHAL_LOGDB("failed to query %s !\n", DEVICE_PATH(tmp_id));
+ }
+ strncpy(cardname,(char *)cap.card, sizeof(cardname));
+ CAMHAL_LOGDB("%s for main channel!\n", DEVICE_PATH(tmp_id));
+ close(tmp_fd);
+ }
+ }
+ tmp_id++;
+ tmp_id%= ARRAY_SIZE(SENSOR_PATH);
+ if(tmp_id ==camera_id){
+ needPreviewCh = false;
+ camera_id = suc_id;
+ return NO_ERROR;
+ }
+ }
+ return NO_ERROR;
}
#endif
-extern "C" int getValidFrameSize(int camera_fd, int pixel_format, char *framesize)
+extern "C" int getValidFrameSize(int camera_fd, int pixel_format, char *framesize, bool preview)
{
struct v4l2_frmsizeenum frmsize;
int i=0;
char tempsize[12];
framesize[0] = '\0';
+ unsigned int support_w,support_h;
+ if(preview == true){
+ char property[32];
+ support_w = 10000;
+ support_h = 10000;
+ memset(property,0,sizeof(property));
+ if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
+ CAMHAL_LOGDB("support Max Preview Size :%s",property);
+ if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
+ support_w = 10000;
+ support_h = 10000;
+ }
+ }
+ }
if (camera_fd >= 0) {
memset(&frmsize,0,sizeof(v4l2_frmsizeenum));
for(i=0;;i++){
@@ -2247,19 +2124,16 @@ extern "C" int getValidFrameSize(int camera_fd, int pixel_format, char *framesiz
frmsize.pixel_format = pixel_format;
if(ioctl(camera_fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0){
if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
-#ifdef AMLOGIC_UVC_320X240
- if( (frmsize.discrete.width>320) || (frmsize.discrete.height > 240))
- continue;
-#endif
- snprintf(tempsize, sizeof(tempsize), "%dx%d,",
- frmsize.discrete.width, frmsize.discrete.height);
+ if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h)&&(preview == true))
+ continue;
+ snprintf(tempsize, sizeof(tempsize), "%dx%d,", frmsize.discrete.width, frmsize.discrete.height);
strcat(framesize, tempsize);
- }
- else
+ }else{
break;
- }
- else
+ }
+ }else{
break;
+ }
}
}
if(framesize[0] == '\0')
@@ -2286,8 +2160,49 @@ static int getCameraOrientation(bool frontcamera, char* property)
return degree;
}
-static int enumCtrlMenu(int camera_fd, struct v4l2_queryctrl *qi,
- char* menu_items, char*def_menu_item)
+static bool is_mjpeg_supported(int camera_fd)
+{
+ bool ret = false;
+ struct v4l2_fmtdesc fmt;
+ memset(&fmt,0,sizeof(fmt));
+ fmt.index = 0;
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ while((ret = ioctl(camera_fd,VIDIOC_ENUM_FMT,&fmt)) == 0){
+ if(fmt.pixelformat == V4L2_PIX_FMT_MJPEG){
+ ret = true;
+ break;
+ }
+ fmt.index++;
+ }
+ return ret;
+}
+
+static void ParserLimittedRateInfo(LimittedRate_t* rate)
+{
+ char property[100];
+ int w,h,r;
+ char* pos = NULL;
+ memset(property,0,sizeof(property));
+ rate->num = 0;
+ if(property_get("ro.camera.preview.LimmitedRate", property, NULL) > 0){
+ pos = &property[0];
+ while((pos != NULL)&&(rate->num<MAX_LIMITTED_RATE_NUM)){
+ if(sscanf(pos,"%dx%dx%d",&w,&h,&r)!=3){
+ break;
+ }
+ rate->arg[rate->num].width = w;
+ rate->arg[rate->num].height = h;
+ rate->arg[rate->num].framerate = r;
+ rate->num++;
+ pos = strchr(pos, ',');
+ if(pos)
+ pos++;
+ }
+ }
+}
+
+static int enumCtrlMenu(int camera_fd, struct v4l2_queryctrl *qi, char* menu_items, char*def_menu_item)
{
struct v4l2_queryctrl qc;
struct v4l2_querymenu qm;
@@ -2297,18 +2212,18 @@ static int enumCtrlMenu(int camera_fd, struct v4l2_queryctrl *qi,
memset(&qc, 0, sizeof(struct v4l2_queryctrl));
qc.id = qi->id;
ret = ioctl (camera_fd, VIDIOC_QUERYCTRL, &qc);
- if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED) ){
+ if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
CAMHAL_LOGDB("camera handle %d can't support this ctrl",camera_fd);
- return mode_count;
+ return mode_count;
}else if( qc.type != V4L2_CTRL_TYPE_MENU){
CAMHAL_LOGDB("this ctrl of camera handle %d can't support menu type",camera_fd);
- return 0;
+ return 0;
}else{
memset(&qm, 0, sizeof(qm));
qm.id = qi->id;
qm.index = qc.default_value;
if(ioctl (camera_fd, VIDIOC_QUERYMENU, &qm) < 0){
- return 0;
+ return 0;
} else {
strcpy(def_menu_item, (char*)qm.name);
}
@@ -2338,17 +2253,15 @@ static bool getCameraWhiteBalance(int camera_fd, char* wb_modes, char*def_wb_mod
int item_count=0;
memset( &qc, 0, sizeof(qc));
-
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
qc.id = V4L2_CID_AUTO_WHITE_BALANCE;
#else
qc.id = V4L2_CID_DO_WHITE_BALANCE;
#endif
item_count = enumCtrlMenu( camera_fd, &qc, wb_modes, def_wb_mode);
-
if(0 >= item_count){
- strcpy( wb_modes, "auto,daylight,incandescent,fluorescent");
- strcpy(def_wb_mode, "auto");
+ strcpy( wb_modes, "auto,daylight,incandescent,fluorescent");
+ strcpy(def_wb_mode, "auto");
}
return true;
}
@@ -2361,62 +2274,57 @@ static bool getCameraBanding(int camera_fd, char* banding_modes, char*def_bandin
memset( &qc, 0, sizeof(qc));
qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
-
item_count = enumCtrlMenu( camera_fd, &qc, banding_modes, def_banding_mode);
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
tmpbuf = (char *) calloc (1, 256);
memset( tmpbuf, 0, 256);
- if( (0 < item_count)
- &&( NULL!= tmpbuf)){
-
- item_count =0;
- char *tmp =NULL;
- tmp = strstr( banding_modes, "auto");
- if(tmp){
- item_count ++;
- strcat( tmpbuf, "auto,");
- }
- tmp = strstr( banding_modes, "isable");//Disabled
- if(tmp){
- item_count ++;
- strcat( tmpbuf, "off,");
- }
- tmp = strstr( banding_modes, "50");
- if(tmp){
- item_count ++;
- strcat( tmpbuf, "50hz,");
- }
- tmp = strstr( banding_modes, "60");
- if(tmp){
- item_count ++;
- strcat( tmpbuf, "60hz,");
- }
- strcpy( banding_modes, tmpbuf);
-
- memset(tmpbuf, 0, 256);
- if( NULL != (tmp = strstr(def_banding_mode, "50")) ){
- strcat(tmpbuf, "50hz");
- }else if( NULL != (tmp = strstr(def_banding_mode, "60")) ){
- strcat(tmpbuf, "60hz");
- }else if( NULL != (tmp = strstr(def_banding_mode, "isable")) ){
- strcat(tmpbuf, "off");
- }else if( NULL != (tmp = strstr(def_banding_mode, "auto")) ){
- strcat(tmpbuf, "auto");
- }
-
- strcpy( def_banding_mode, tmpbuf);
+ if( (0 < item_count)&&( NULL!= tmpbuf)){
+ char *tmp =NULL;
+ item_count =0;
+ tmp = strstr( banding_modes, "auto");
+ if(tmp){
+ item_count ++;
+ strcat( tmpbuf, "auto,");
+ }
+ tmp = strstr( banding_modes, "isable");//Disabled
+ if(tmp){
+ item_count ++;
+ strcat( tmpbuf, "off,");
+ }
+ tmp = strstr( banding_modes, "50");
+ if(tmp){
+ item_count ++;
+ strcat( tmpbuf, "50hz,");
+ }
+ tmp = strstr( banding_modes, "60");
+ if(tmp){
+ item_count ++;
+ strcat( tmpbuf, "60hz,");
+ }
+ strcpy( banding_modes, tmpbuf);
+ memset(tmpbuf, 0, 256);
+ if( NULL != (tmp = strstr(def_banding_mode, "50")) ){
+ strcat(tmpbuf, "50hz");
+ }else if( NULL != (tmp = strstr(def_banding_mode, "60")) ){
+ strcat(tmpbuf, "60hz");
+ }else if( NULL != (tmp = strstr(def_banding_mode, "isable")) ){
+ strcat(tmpbuf, "off");
+ }else if( NULL != (tmp = strstr(def_banding_mode, "auto")) ){
+ strcat(tmpbuf, "auto");
+ }
+ strcpy( def_banding_mode, tmpbuf);
}
if(tmpbuf){
- free(tmpbuf);
- tmpbuf = NULL;
+ free(tmpbuf);
+ tmpbuf = NULL;
}
#endif
if(0 >= item_count){
- strcpy( banding_modes, "50hz,60hz");
- strcpy( def_banding_mode, "50hz");
+ strcpy( banding_modes, "50hz,60hz");
+ strcpy( def_banding_mode, "50hz");
}
return true;
}
@@ -2424,8 +2332,7 @@ static bool getCameraBanding(int camera_fd, char* banding_modes, char*def_bandin
#define MAX_LEVEL_FOR_EXPOSURE 16
#define MIN_LEVEL_FOR_EXPOSURE 3
-static bool getCameraExposureValue(int camera_fd, int &min, int &max,
- int &step, int &def)
+static bool getCameraExposureValue(int camera_fd, int &min, int &max, int &step, int &def)
{
struct v4l2_queryctrl qc;
int ret=0;
@@ -2441,32 +2348,32 @@ static bool getCameraExposureValue(int camera_fd, int &min, int &max,
#endif
ret = ioctl( camera_fd, VIDIOC_QUERYCTRL, &qc);
if(ret<0){
- CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
+ CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- min = 0;
- max = 0;
- def = 0;
- step = 0;
+ min = 0;
+ max = 0;
+ def = 0;
+ step = 0;
#else
- min = -4;
- max = 4;
- def = 0;
- step = 1;
+ min = -4;
+ max = 4;
+ def = 0;
+ step = 1;
#endif
- return true;
+ return true;
}
if(0 < qc.step)
- level = ( qc.maximum - qc.minimum + 1 )/qc.step;
+ level = ( qc.maximum - qc.minimum + 1 )/qc.step;
if((level > MAX_LEVEL_FOR_EXPOSURE)
- || (level < MIN_LEVEL_FOR_EXPOSURE)){
- min = -4;
- max = 4;
- def = 0;
- step = 1;
- CAMHAL_LOGDB("not in[min,max], min=%d, max=%d, def=%d, step=%d\n", min, max, def, step);
- return true;
+ || (level < MIN_LEVEL_FOR_EXPOSURE)){
+ min = -4;
+ max = 4;
+ def = 0;
+ step = 1;
+ CAMHAL_LOGDB("not in[min,max], min=%d, max=%d, def=%d, step=%d\n", min, max, def, step);
+ return true;
}
middle = (qc.minimum+qc.maximum)/2;
@@ -2474,7 +2381,6 @@ static bool getCameraExposureValue(int camera_fd, int &min, int &max,
max = qc.maximum - middle;
def = qc.default_value - middle;
step = qc.step;
-
return true;
}
@@ -2520,8 +2426,8 @@ static bool getCameraAutoFocus(int camera_fd, char* focus_mode_str, char*def_foc
}
return auto_focus_enable;
}
-static bool getCameraFocusArea(int camera_fd, char* max_num_focus_area,
- char*focus_area)
+
+static bool getCameraFocusArea(int camera_fd, char* max_num_focus_area, char*focus_area)
{
struct v4l2_queryctrl qc;
int ret = 0;
@@ -2533,12 +2439,11 @@ static bool getCameraFocusArea(int camera_fd, char* max_num_focus_area,
memset(&qc, 0, sizeof(struct v4l2_queryctrl));
qc.id = V4L2_CID_FOCUS_ABSOLUTE;
ret = ioctl (camera_fd, VIDIOC_QUERYCTRL, &qc);
- if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)
- || (qc.type != V4L2_CTRL_TYPE_INTEGER)){
+ if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0) || (qc.type != V4L2_CTRL_TYPE_INTEGER)){
CAMHAL_LOGDB("can't support touch focus,%sret=%d%s\n",
- qc.flags == V4L2_CTRL_FLAG_DISABLED? "disble,":"",
- ret,
- qc.type == V4L2_CTRL_TYPE_INTEGER?"":", type not right");
+ qc.flags == V4L2_CTRL_FLAG_DISABLED? "disble,":"",
+ ret,
+ qc.type == V4L2_CTRL_TYPE_INTEGER?"":", type not right");
return false;
}
@@ -2548,7 +2453,6 @@ static bool getCameraFocusArea(int camera_fd, char* max_num_focus_area,
y1 = (qc.maximum >> 16) & 0xFFFF;
strcpy(max_num_focus_area, "1");
sprintf(focus_area, "(%d,%d,%d,%d, 1)", x0, y0, x1, y1);
-
return true;
}
@@ -2647,10 +2551,9 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
}
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- params->set(CameraProperties::RELOAD_WHEN_OPEN, "1");
-
+ params->set(CameraProperties::RELOAD_WHEN_OPEN, "1");
#else
- params->set(CameraProperties::RELOAD_WHEN_OPEN, "0");
+ params->set(CameraProperties::RELOAD_WHEN_OPEN, "0");
#endif
params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS,"yuv420sp,yuv420p"); //yuv420p for cts
if(DEFAULT_PREVIEW_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV){ // 422I
@@ -2664,7 +2567,7 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
params->set(CameraProperties::PREVIEW_FORMAT,PREVIEW_FORMAT_420SP);
}
- //get preview size & set
+ //get preview size & set
char *sizes = (char *) calloc (1, 1024);
if(!sizes){
CAMHAL_LOGDA("Alloc string buff error!");
@@ -2673,25 +2576,23 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
#ifdef AMLOGIC_TWO_CH_UVC
- int main_id = -1;
- if(NO_ERROR == getVideodevId( camera_id,main_id )){
- if ((camera_fd = open(DEVICE_PATH(camera_id), O_RDWR)) != -1)
- {
- CAMHAL_LOGDB("open %s success to loadCaps\n", DEVICE_PATH(camera_id));
- }
- }
+ int main_id = -1;
+ if(NO_ERROR == getVideodevId( camera_id,main_id )){
+ if ((camera_fd = open(DEVICE_PATH(camera_id), O_RDWR)) != -1){
+ CAMHAL_LOGDB("open %s success to loadCaps\n", DEVICE_PATH(camera_id));
+ }
+ }
#else
- while( camera_id < (int)ARRAY_SIZE(SENSOR_PATH)){
- if ((camera_fd = open(DEVICE_PATH(camera_id), O_RDWR)) != -1)
- {
- CAMHAL_LOGDB("open %s success when loadCaps!\n", DEVICE_PATH(camera_id));
- break;
- }
- camera_id++;
- }
- if(camera_id >= (int)ARRAY_SIZE(SENSOR_PATH)){
- CAMHAL_LOGDB("failed to opening Camera when loadCaps: %s", strerror(errno));
- }
+ while( camera_id < (int)ARRAY_SIZE(SENSOR_PATH)){
+ if ((camera_fd = open(DEVICE_PATH(camera_id), O_RDWR)) != -1){
+ CAMHAL_LOGDB("open %s success when loadCaps!\n", DEVICE_PATH(camera_id));
+ break;
+ }
+ camera_id++;
+ }
+ if(camera_id >= (int)ARRAY_SIZE(SENSOR_PATH)){
+ CAMHAL_LOGDB("failed to opening Camera when loadCaps: %s", strerror(errno));
+ }
#endif
#else
camera_fd = open(DEVICE_PATH(camera_id), O_RDWR);
@@ -2703,16 +2604,16 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
int fps=0, fps_num=0;
int ret;
- char *fpsrange=(char *)calloc(32,sizeof(char));
+ char fpsrange[64];
+ memset(fpsrange,0,sizeof(fpsrange));
ret = enumFramerate(camera_fd, &fps, &fps_num);
- if((fpsrange != NULL)&&(NO_ERROR == ret) && ( 0 !=fps_num )){
- CAMHAL_LOGDA("O_NONBLOCK operation to do previewThread\n");
+ if((NO_ERROR == ret) && ( 0 !=fps_num )){
+ CAMHAL_LOGDA("O_NONBLOCK operation to do previewThread\n");
int tmp_fps = fps/fps_num/5;
int iter = 0;
int shift = 0;
- for(iter = 0;iter < tmp_fps;)
- {
+ for(iter = 0;iter < tmp_fps;){
iter++;
if(iter == tmp_fps)
sprintf(fpsrange+shift,"%d",iter*5);
@@ -2722,44 +2623,43 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
shift += 2;
else
shift += 3;
-
}
if((fps/fps_num)%5 != 0)
sprintf(fpsrange+shift-1,",%d",fps/fps_num);
- params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, fpsrange);
- params->set(CameraProperties::PREVIEW_FRAME_RATE, fps/fps_num);
-
- memset( fpsrange, 0, 32*sizeof(char));
- sprintf(fpsrange,"%s%d","5000,",fps*1000/fps_num);
- params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, fpsrange);
- params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, fpsrange);
-
- memset( fpsrange, 0, 32*sizeof(char));
- sprintf(fpsrange,"(%s%d)","5000,",fps*1000/fps_num);
- params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, fpsrange);
- memset( fpsrange, 0, 32*sizeof(char));
- sprintf(fpsrange,"%s%d","5000,",fps*1000/fps_num);
- params->set(CameraProperties::FRAMERATE_RANGE, fpsrange);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, fpsrange);
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, fps/fps_num);
+
+ memset(fpsrange, 0, sizeof(fpsrange));
+ sprintf(fpsrange,"%s%d","5000,",fps*1000/fps_num);
+ params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, fpsrange);
+ params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, fpsrange);
+
+ memset(fpsrange, 0, sizeof(fpsrange));;
+ sprintf(fpsrange,"(%s%d)","5000,",fps*1000/fps_num);
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, fpsrange);
+ memset(fpsrange, 0, sizeof(fpsrange));
+ sprintf(fpsrange,"%s%d","5000,",fps*1000/fps_num);
+ params->set(CameraProperties::FRAMERATE_RANGE, fpsrange);
}else{
- if(NO_ERROR != ret){
- CAMHAL_LOGDA("sensor driver need to implement enum framerate func!!!\n");
- }
- params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, "5,15");
- params->set(CameraProperties::PREVIEW_FRAME_RATE, "15");
+ if(NO_ERROR != ret){
+ CAMHAL_LOGDA("sensor driver need to implement enum framerate func!!!\n");
+ }
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, "5,15");
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, "15");
- params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, "(5000,26623)");
- params->set(CameraProperties::FRAMERATE_RANGE, "5000,26623");
- params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, "5000,15000");
- params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, "5000,15000");
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, "(5000,26623)");
+ params->set(CameraProperties::FRAMERATE_RANGE, "5000,26623");
+ params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, "5000,15000");
+ params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, "5000,15000");
}
#else
- params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, "5,15");
- params->set(CameraProperties::PREVIEW_FRAME_RATE, "15");
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, "5,15");
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, "15");
- params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, "(5000,26623)");
- params->set(CameraProperties::FRAMERATE_RANGE, "5000,26623");
- params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, "5000,15000");
- params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, "5000,15000");
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, "(5000,26623)");
+ params->set(CameraProperties::FRAMERATE_RANGE, "5000,26623");
+ params->set(CameraProperties::FRAMERATE_RANGE_IMAGE, "5000,15000");
+ params->set(CameraProperties::FRAMERATE_RANGE_VIDEO, "5000,15000");
#endif
memset(sizes,0,1024);
@@ -2767,14 +2667,13 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
preview_format = V4L2_PIX_FMT_YUYV;
#endif
- if (!getValidFrameSize(camera_fd, preview_format, sizes)) {
+ if (!getValidFrameSize(camera_fd, preview_format, sizes,true)) {
int len = strlen(sizes);
unsigned int supported_w = 0, supported_h = 0,w = 0,h = 0;
if(len>1){
if(sizes[len-1] == ',')
sizes[len-1] = '\0';
}
-
#ifndef AMLOGIC_USB_CAMERA_SUPPORT
char small_size[8] = "176x144"; //for cts
if(strstr(sizes,small_size)==NULL){
@@ -2803,15 +2702,8 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
memset(sizes, 0, 1024);
sprintf(sizes,"%dx%d",w,h);
}
- //char * b = strrchr(sizes, ',');
- //if (b)
- // b++;
- //else
- // b = sizes;
params->set(CameraProperties::PREVIEW_SIZE, sizes);
- }
- else
- {
+ }else {
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, "320x240,176x144,160x120");
params->set(CameraProperties::PREVIEW_SIZE,"320x240");
@@ -2836,7 +2728,7 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
picture_format = V4L2_PIX_FMT_YUYV;
#endif
- if (!getValidFrameSize(camera_fd, picture_format, sizes)) {
+ if (!getValidFrameSize(camera_fd, picture_format, sizes,false)) {
int len = strlen(sizes);
unsigned int supported_w = 0, supported_h = 0,w = 0,h = 0;
if(len>1){
@@ -2863,15 +2755,8 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
memset(sizes, 0, 1024);
sprintf(sizes,"%dx%d",w,h);
}
- //char * b = strrchr(sizes, ',');
- //if (b)
- // b++;
- //else
- // b = sizes;
params->set(CameraProperties::PICTURE_SIZE, sizes);
- }
- else
- {
+ }else{
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, "320x240");
params->set(CameraProperties::PICTURE_SIZE,"320x240");
@@ -2892,10 +2777,9 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
params->set(CameraProperties::FOCUS_MODE, def_focus_mode);
memset(focus_mode,0,256);
memset(def_focus_mode,0,64);
- if ( getCameraFocusArea( camera_fd, def_focus_mode, focus_mode)){
+ if (getCameraFocusArea( camera_fd, def_focus_mode, focus_mode)){
params->set(CameraProperties::MAX_FOCUS_AREAS, def_focus_mode);
- CAMHAL_LOGDB("focus_area=%s, max_num_focus_area=%s\n",
- focus_mode, def_focus_mode);
+ CAMHAL_LOGDB("focus_area=%s, max_num_focus_area=%s\n", focus_mode, def_focus_mode);
}
}else {
params->set(CameraProperties::SUPPORTED_FOCUS_MODES, "fixed");
@@ -2919,21 +2803,20 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
if((banding_mode)&&(def_banding_mode)){
memset(banding_mode,0,256);
memset(def_banding_mode,0,64);
-
- getCameraBanding(camera_fd, banding_mode, def_banding_mode);
- params->set(CameraProperties::SUPPORTED_ANTIBANDING, banding_mode);
- params->set(CameraProperties::ANTIBANDING, def_banding_mode);
+ getCameraBanding(camera_fd, banding_mode, def_banding_mode);
+ params->set(CameraProperties::SUPPORTED_ANTIBANDING, banding_mode);
+ params->set(CameraProperties::ANTIBANDING, def_banding_mode);
}else{
- params->set(CameraProperties::SUPPORTED_ANTIBANDING, "50hz,60hz");
- params->set(CameraProperties::ANTIBANDING, "50hz");
+ params->set(CameraProperties::SUPPORTED_ANTIBANDING, "50hz,60hz");
+ params->set(CameraProperties::ANTIBANDING, "50hz");
}
if(banding_mode){
free(banding_mode);
- banding_mode = NULL;
+ banding_mode = NULL;
}
if(def_banding_mode){
free(def_banding_mode);
- def_banding_mode = NULL;
+ def_banding_mode = NULL;
}
params->set(CameraProperties::FOCAL_LENGTH, "4.31");
@@ -2949,23 +2832,23 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
char *def_wb_mode = (char *) calloc (1, 64);
if( wb_mode && def_wb_mode){
- memset(wb_mode, 0, 256);
- memset(def_wb_mode, 0, 64);
- getCameraWhiteBalance(camera_fd, wb_mode, def_wb_mode);
- params->set(CameraProperties::SUPPORTED_WHITE_BALANCE, wb_mode);
- params->set(CameraProperties::WHITEBALANCE, def_wb_mode);
+ memset(wb_mode, 0, 256);
+ memset(def_wb_mode, 0, 64);
+ getCameraWhiteBalance(camera_fd, wb_mode, def_wb_mode);
+ params->set(CameraProperties::SUPPORTED_WHITE_BALANCE, wb_mode);
+ params->set(CameraProperties::WHITEBALANCE, def_wb_mode);
}else{
- params->set(CameraProperties::SUPPORTED_WHITE_BALANCE, "auto,daylight,incandescent,fluorescent");
- params->set(CameraProperties::WHITEBALANCE, "auto");
+ params->set(CameraProperties::SUPPORTED_WHITE_BALANCE, "auto,daylight,incandescent,fluorescent");
+ params->set(CameraProperties::WHITEBALANCE, "auto");
}
if(wb_mode){
free(wb_mode);
- wb_mode = NULL;
+ wb_mode = NULL;
}
if(def_wb_mode){
free(def_wb_mode);
- def_wb_mode = NULL;
+ def_wb_mode = NULL;
}
#endif
@@ -2982,8 +2865,7 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
if (get_flash_mode(camera_fd, flash_mode,def_flash_mode)) {
params->set(CameraProperties::SUPPORTED_FLASH_MODES, flash_mode);
params->set(CameraProperties::FLASH_MODE, def_flash_mode);
- CAMHAL_LOGDB("def_flash_mode=%s, flash_mode=%s\n",
- def_flash_mode, flash_mode);
+ CAMHAL_LOGDB("def_flash_mode=%s, flash_mode=%s\n", def_flash_mode, flash_mode);
}
}
if (flash_mode) {
@@ -3060,9 +2942,9 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
params->set(CameraProperties::REQUIRED_PREVIEW_BUFS, DEFAULT_NUM_PREV_BUFS);
params->set(CameraProperties::REQUIRED_IMAGE_BUFS, DEFAULT_NUM_PIC_BUFS);
#ifdef AMLOGIC_ENABLE_VIDEO_SNAPSHOT
- params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, "true");
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, "true");
#else
- params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, "false");
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, "false");
#endif
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
params->set(CameraProperties::VIDEO_SIZE,params->get(CameraProperties::PREVIEW_SIZE));
@@ -3076,7 +2958,6 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
close(camera_fd);
}
-
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
/* gets video device defined frame rate (not real - consider it a maximum value)
* args:
@@ -3086,125 +2967,99 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
extern "C" int get_framerate ( int camera_fd, int *fps, int *fps_num)
{
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
-
- *fps = 15;
-
- *fps_num = 1;
-
- return 0;
+ *fps = 15;
+ *fps_num = 1;
+ return 0;
#else
- int ret=0;
-
- struct v4l2_streamparm streamparm;
-
- streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- ret = ioctl( camera_fd,VIDIOC_G_PARM,&streamparm);
- if (ret < 0)
- {
- CAMHAL_LOGDA("VIDIOC_G_PARM - Unable to get timeperframe");
- }
- else
- {
- if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
- // it seems numerator is allways 1 but we don't do assumptions here :-)
- *fps = streamparm.parm.capture.timeperframe.denominator;
- *fps_num = streamparm.parm.capture.timeperframe.numerator;
- }
- }
+ int ret=0;
+
+ struct v4l2_streamparm streamparm;
- return ret;
+ streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ ret = ioctl( camera_fd,VIDIOC_G_PARM,&streamparm);
+ if (ret < 0){
+ CAMHAL_LOGDA("VIDIOC_G_PARM - Unable to get timeperframe");
+ }else{
+ if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
+ // it seems numerator is allways 1 but we don't do assumptions here :-)
+ *fps = streamparm.parm.capture.timeperframe.denominator;
+ *fps_num = streamparm.parm.capture.timeperframe.numerator;
+ }
+ }
+ return ret;
#endif
}
-int enumFramerate ( int camera_fd, int *fps, int *fps_num)
+int enumFramerate (int camera_fd, int *fps, int *fps_num)
{
- int ret=0;
- int framerate=0;
- int temp_rate=0;
- struct v4l2_frmivalenum fival;
- int i,j;
+ int ret=0;
+ int framerate=0;
+ int temp_rate=0;
+ struct v4l2_frmivalenum fival;
+ int i,j;
- int pixelfmt_tbl[]={
+ int pixelfmt_tbl[]={
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- V4L2_PIX_FMT_YUYV,
+ V4L2_PIX_FMT_YUYV,
#else
- V4L2_PIX_FMT_NV21,
+ V4L2_PIX_FMT_NV21,
#endif
- V4L2_PIX_FMT_YVU420,
- };
- struct v4l2_frmsize_discrete resolution_tbl[]={
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- {960, 720},
-#endif
- {640, 480},
- {320, 240},
- };
-
- for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++){
- for( j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++){
-
- memset(&fival, 0, sizeof(fival));
- fival.index = 0;
- fival.pixel_format = pixelfmt_tbl[i];
- fival.width = resolution_tbl[j].width;
- fival.height = resolution_tbl[j].height;
-
- while ((ret = ioctl(camera_fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0)
- {
- if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE)
- {
- temp_rate = fival.discrete.denominator/fival.discrete.numerator;
- if(framerate < temp_rate){
- framerate = temp_rate;
- }
- }
- else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS)
- {
- framerate = fival.stepwise.max.denominator/fival.stepwise.max.numerator;
- CAMHAL_LOGDB("pixelfmt=%d,resolution:%dx%d,"
- "FRAME TYPE is continuous,step=%d/%d s\n",
- pixelfmt_tbl[i],
- resolution_tbl[j].width,
- resolution_tbl[j].height,
- fival.stepwise.max.numerator,
- fival.stepwise.max.denominator);
- break;
- }
- else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE)
- {
- CAMHAL_LOGDB("pixelfmt=%d,resolution:%dx%d,"
- "FRAME TYPE is step wise,step=%d/%d s\n",
- pixelfmt_tbl[i],
- resolution_tbl[j].width,
- resolution_tbl[j].height,
- fival.stepwise.step.numerator,
- fival.stepwise.step.denominator);
- framerate = fival.stepwise.max.denominator/fival.stepwise.max.numerator;
- break;
- }
-
- fival.index++;
- }
- }
- }
-
- *fps = framerate;
- *fps_num = 1;
-
- CAMHAL_LOGDB("enum framerate=%d\n", framerate);
+ V4L2_PIX_FMT_YVU420,
+ };
+ struct v4l2_frmsize_discrete resolution_tbl[]={
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- if( (framerate > 15) || framerate <=10 ){
-#else
- if( framerate <= 10){
-#endif
- return -1;
- }
+ {960, 720},
+#endif
+ {640, 480},
+ {320, 240},
+ };
+
+ for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++){
+ for( j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++){
+ memset(&fival, 0, sizeof(fival));
+ fival.index = 0;
+ fival.pixel_format = pixelfmt_tbl[i];
+ fival.width = resolution_tbl[j].width;
+ fival.height = resolution_tbl[j].height;
+
+ while ((ret = ioctl(camera_fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0){
+ if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
+ temp_rate = fival.discrete.denominator/fival.discrete.numerator;
+ if(framerate < temp_rate)
+ framerate = temp_rate;
+ }else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
+ framerate = fival.stepwise.max.denominator/fival.stepwise.max.numerator;
+ CAMHAL_LOGDB("pixelfmt=%d,resolution:%dx%d,"
+ "FRAME TYPE is continuous,step=%d/%d s\n",
+ pixelfmt_tbl[i],
+ resolution_tbl[j].width,
+ resolution_tbl[j].height,
+ fival.stepwise.max.numerator,
+ fival.stepwise.max.denominator);
+ break;
+ }else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
+ CAMHAL_LOGDB("pixelfmt=%d,resolution:%dx%d,"
+ "FRAME TYPE is step wise,step=%d/%d s\n",
+ pixelfmt_tbl[i],
+ resolution_tbl[j].width,
+ resolution_tbl[j].height,
+ fival.stepwise.step.numerator,
+ fival.stepwise.step.denominator);
+ framerate = fival.stepwise.max.denominator/fival.stepwise.max.numerator;
+ break;
+ }
+ fival.index++;
+ }
+ }
+ }
- return 0;
+ *fps = framerate;
+ *fps_num = 1;
+ CAMHAL_LOGDB("enum framerate=%d\n", framerate);
+ return 0;
}
#endif
-
extern "C" int V4LCameraAdapter::set_white_balance(int camera_fd,const char *swb)
{
int ret = 0;
@@ -3238,9 +3093,9 @@ extern "C" int V4LCameraAdapter::set_white_balance(int camera_fd,const char *swb
#endif
if(mWhiteBalance == ctl.value){
- return 0;
+ return 0;
}else{
- mWhiteBalance = ctl.value;
+ mWhiteBalance = ctl.value;
}
ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
if(ret<0){
@@ -3251,49 +3106,48 @@ extern "C" int V4LCameraAdapter::set_white_balance(int camera_fd,const char *swb
status_t V4LCameraAdapter::getFocusMoveStatus()
{
- struct v4l2_control ctl;
- int ret;
-
- if( (cur_focus_mode != CAM_FOCUS_MODE_CONTI_VID) &&
- (cur_focus_mode != CAM_FOCUS_MODE_CONTI_PIC) &&
- (cur_focus_mode != CAM_FOCUS_MODE_AUTO)){
- mFocusMoveEnabled = false;
- return 0;
- }
+ struct v4l2_control ctl;
+ int ret;
+ if( (cur_focus_mode != CAM_FOCUS_MODE_CONTI_VID) &&
+ (cur_focus_mode != CAM_FOCUS_MODE_CONTI_PIC) &&
+ (cur_focus_mode != CAM_FOCUS_MODE_AUTO)){
+ mFocusMoveEnabled = false;
+ return 0;
+ }
- mFocusWaitCount --;
- if(mFocusWaitCount >= 0){
- return 0;
- }
- mFocusWaitCount = 0;
-
- memset( &ctl, 0, sizeof(ctl));
- ctl.id =V4L2_CID_AUTO_FOCUS_STATUS;
- ret = ioctl(mCameraHandle, VIDIOC_G_CTRL, &ctl);
- if ( 0 > ret ){
- CAMHAL_LOGDA("V4L2_CID_AUTO_FOCUS_STATUS failed\n");
- return -EINVAL;
- }
+ mFocusWaitCount --;
+ if(mFocusWaitCount >= 0){
+ return 0;
+ }
+ mFocusWaitCount = 0;
- if( ctl.value == V4L2_AUTO_FOCUS_STATUS_BUSY ){
- if(!bFocusMoveState){
- bFocusMoveState = true;
- notifyFocusMoveSubscribers(FOCUS_MOVE_START);
- }
- }else {
- mFocusWaitCount = FOCUS_PROCESS_FRAMES;
- if(bFocusMoveState){
- bFocusMoveState = false;
- notifyFocusMoveSubscribers(FOCUS_MOVE_STOP);
- }
- }
+ memset( &ctl, 0, sizeof(ctl));
+ ctl.id =V4L2_CID_AUTO_FOCUS_STATUS;
+ ret = ioctl(mCameraHandle, VIDIOC_G_CTRL, &ctl);
+ if (0 > ret ){
+ CAMHAL_LOGDA("V4L2_CID_AUTO_FOCUS_STATUS failed\n");
+ return -EINVAL;
+ }
- return ctl.value;
+ if( ctl.value == V4L2_AUTO_FOCUS_STATUS_BUSY ){
+ if(!bFocusMoveState){
+ bFocusMoveState = true;
+ notifyFocusMoveSubscribers(FOCUS_MOVE_START);
+ }
+ }else {
+ mFocusWaitCount = FOCUS_PROCESS_FRAMES;
+ if(bFocusMoveState){
+ bFocusMoveState = false;
+ notifyFocusMoveSubscribers(FOCUS_MOVE_STOP);
+ }
+ }
+ return ctl.value;
}
+
extern "C" int V4LCameraAdapter::set_focus_area( int camera_fd, const char *focusarea)
{
- struct v4l2_control ctl;
- int ret;
+ struct v4l2_control ctl;
+ int ret;
int x0 = 0;
int y0 = 0;
int x1 = 0;
@@ -3306,22 +3160,21 @@ extern "C" int V4LCameraAdapter::set_focus_area( int camera_fd, const char *focu
CAMHAL_LOGDA("Invalid position for tap focus!\n");
return 0;
}
- memset( &ctl, 0, sizeof(ctl));
- ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
-
- tempvalue = ((x0+x1)/2 + 1000);
+ memset( &ctl, 0, sizeof(ctl));
+ ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
+ tempvalue = ((x0+x1)/2 + 1000);
tempvalue <<= 16;
ctl.value = tempvalue;
tempvalue = ((y0+y1)/2 + 1000) & 0xffff;
ctl.value |= tempvalue;
- ret = ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl);
- if ( 0 > ret ){
- CAMHAL_LOGDA("focus tap failed\n");
- return -EINVAL;
- }
-
+ ret = ioctl(mCameraHandle, VIDIOC_S_CTRL, &ctl);
+ if ( 0 > ret ){
+ CAMHAL_LOGDA("focus tap failed\n");
+ return -EINVAL;
+ }
return 0;
}
+
/*
* use id V4L2_CID_EXPOSURE_AUTO to set exposure mode
* 0: Auto Mode, commit failure @20120504
@@ -3340,24 +3193,20 @@ extern "C" int V4LCameraAdapter::SetExposureMode(int camera_fd, unsigned int mod
ctl.id = V4L2_CID_EXPOSURE_AUTO;
ctl.value = mode;
ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
- if(ret<0)
- {
- CAMHAL_LOGDB("fail: %s. ret=%d", strerror(errno),ret);
- return ret;
- }
- if( (V4L2_EXPOSURE_APERTURE_PRIORITY ==ctl.value)
- ||(V4L2_EXPOSURE_AUTO ==ctl.value)){
- memset( &ctl, 0, sizeof(ctl));
- ctl.id = V4L2_CID_EXPOSURE_AUTO_PRIORITY;
- ctl.value = true;
- ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
- if(ret<0){
- CAMHAL_LOGDB("Exposure auto priority Set manual fail: %s. ret=%d",
- strerror(errno),ret);
- return ret;
- }
+ if(ret<0){
+ CAMHAL_LOGDB("fail: %s. ret=%d", strerror(errno),ret);
+ return ret;
+ }
+ if( (V4L2_EXPOSURE_APERTURE_PRIORITY ==ctl.value)||(V4L2_EXPOSURE_AUTO ==ctl.value)){
+ memset( &ctl, 0, sizeof(ctl));
+ ctl.id = V4L2_CID_EXPOSURE_AUTO_PRIORITY;
+ ctl.value = true;
+ ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
+ if(ret<0){
+ CAMHAL_LOGDB("Exposure auto priority Set manual fail: %s. ret=%d", strerror(errno),ret);
+ return ret;
+ }
}
- LOG_FUNCTION_NAME_EXIT;
return 0;
}
#endif
@@ -3372,38 +3221,32 @@ extern "C" int V4LCameraAdapter::SetExposure(int camera_fd,const char *sbn)
return -1;
level = atoi(sbn);
if(mEV == level){
- return 0;
+ return 0;
}else{
- mEV = level;
+ mEV = level;
}
-
memset(&ctl, 0, sizeof(ctl));
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
level ++;
-
if(level !=1){
- ret = SetExposureMode( camera_fd, V4L2_EXPOSURE_MANUAL);
- if(ret<0)
- {
- CAMHAL_LOGDA("Exposure Mode change to manual mode failure\n");
- return ret;
- }
+ ret = SetExposureMode( camera_fd, V4L2_EXPOSURE_MANUAL);
+ if(ret<0){
+ CAMHAL_LOGDA("Exposure Mode change to manual mode failure\n");
+ return ret;
+ }
}else{
- ret = SetExposureMode( camera_fd, V4L2_EXPOSURE_APERTURE_PRIORITY);// 3);
- if(ret<0)
- {
- CAMHAL_LOGDA("Exposure Mode change to Aperture mode failure\n");
- }
- return ret;//APERTURE mode cann't set followed control
+ ret = SetExposureMode( camera_fd, V4L2_EXPOSURE_APERTURE_PRIORITY);// 3);
+ if(ret<0){
+ CAMHAL_LOGDA("Exposure Mode change to Aperture mode failure\n");
+ }
+ return ret;//APERTURE mode cann't set followed control
}
ctl.id = V4L2_CID_EXPOSURE_ABSOLUTE;
- if(level>=0)
- {
- ctl.value= mEVdef << level;
- }else
- {
- ctl.value= mEVdef >> (-level);
+ if(level>=0){
+ ctl.value= mEVdef << level;
+ }else{
+ ctl.value= mEVdef >> (-level);
}
ctl.value= ctl.value>mEVmax? mEVmax:ctl.value;
ctl.value= ctl.value<mEVmin? mEVmin:ctl.value;
@@ -3418,7 +3261,6 @@ extern "C" int V4LCameraAdapter::SetExposure(int camera_fd,const char *sbn)
if(ret<0){
CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
}
-
return ret ;
}
@@ -3431,7 +3273,6 @@ extern "C" int set_effect(int camera_fd,const char *sef)
memset(&ctl, 0, sizeof(ctl));
ctl.id = V4L2_CID_COLORFX;
-
if(strcasecmp(sef,"none")==0)
ctl.value=CAM_EFFECT_ENC_NORMAL;
else if(strcasecmp(sef,"negative")==0)
@@ -3442,7 +3283,7 @@ extern "C" int set_effect(int camera_fd,const char *sef)
if(ret<0){
CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
}
- return ret ;
+ return ret ;
}
extern "C" int set_night_mode(int camera_fd,const char *snm)
@@ -3457,14 +3298,12 @@ extern "C" int set_night_mode(int camera_fd,const char *snm)
ctl.value=CAM_NM_AUTO;
else if(strcasecmp(snm,"night")==0)
ctl.value=CAM_NM_ENABLE;
-
ctl.id = V4L2_CID_DO_WHITE_BALANCE;
-
ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
if(ret<0){
CAMHAL_LOGDB("Set night mode fail: %s. ret=%d", strerror(errno),ret);
}
- return ret ;
+ return ret ;
}
extern "C" int V4LCameraAdapter::set_banding(int camera_fd,const char *snm)
@@ -3486,11 +3325,10 @@ extern "C" int V4LCameraAdapter::set_banding(int camera_fd,const char *snm)
ctl.value= CAM_ANTIBANDING_OFF;
ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
-
if(mAntiBanding == ctl.value){
- return 0;
+ return 0;
}else{
- mAntiBanding = ctl.value;
+ mAntiBanding = ctl.value;
}
ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
if(ret<0){
@@ -3565,9 +3403,8 @@ extern "C" int set_flash_mode(int camera_fd, const char *sfm)
ctl.id = V4L2_CID_BACKLIGHT_COMPENSATION;
ret = ioctl( camera_fd, VIDIOC_S_CTRL, &ctl);
if( ret < 0 ){
- CAMHAL_LOGDB("BACKLIGHT_COMPENSATION failed, errno=%d\n", errno);
+ CAMHAL_LOGDB("BACKLIGHT_COMPENSATION failed, errno=%d\n", errno);
}
-
return ret;
}
@@ -3595,7 +3432,6 @@ static int get_hflip_mode(int camera_fd)
return ret;
}
-
static int set_hflip_mode(int camera_fd, bool mode)
{
int ret = 0;
@@ -3605,9 +3441,7 @@ static int set_hflip_mode(int camera_fd, bool mode)
memset(&ctl, 0,sizeof(ctl));
ctl.value=mode?1:0;
-
ctl.id = V4L2_CID_HFLIP;
-
ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
if(ret<0){
CAMHAL_LOGDB("Set hflip mode fail: %s. ret=%d", strerror(errno),ret);
@@ -3657,6 +3491,7 @@ static int set_zoom_level(int camera_fd, int zoom)
}
return ret ;
}
+
#ifndef AMLOGIC_USB_CAMERA_SUPPORT
static int set_rotate_value(int camera_fd, int value)
{
@@ -3669,20 +3504,15 @@ static int set_rotate_value(int camera_fd, int value)
CAMHAL_LOGDB("Set rotate value invalid: %d.", value);
return -1;
}
-
memset( &ctl, 0, sizeof(ctl));
-
ctl.value=value;
-
ctl.id = V4L2_ROTATE_ID;
-
ret = ioctl(camera_fd, VIDIOC_S_CTRL, &ctl);
if(ret<0){
CAMHAL_LOGDB("Set rotate value fail: %s. ret=%d", strerror(errno),ret);
}
return ret ;
}
-
#endif
};
diff --git a/inc/V4LCameraAdapter/V4LCameraAdapter.h b/inc/V4LCameraAdapter/V4LCameraAdapter.h
index 4bb5ea7..f68f90d 100755
--- a/inc/V4LCameraAdapter/V4LCameraAdapter.h
+++ b/inc/V4LCameraAdapter/V4LCameraAdapter.h
@@ -27,7 +27,6 @@
namespace android {
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
-//#define AMLOGIC_UVC_320X240
#define DEFAULT_PREVIEW_PIXEL_FORMAT V4L2_PIX_FMT_NV21
//#define DEFAULT_PREVIEW_PIXEL_FORMAT V4L2_PIX_FMT_YUYV
#define DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT V4L2_PIX_FMT_RGB24
@@ -39,6 +38,8 @@ namespace android {
#endif
#define NB_BUFFER 6
+#define MAX_LIMITTED_RATE_NUM 6
+
struct VideoInfo {
struct v4l2_capability cap;
struct v4l2_format format;
@@ -213,6 +214,17 @@ typedef struct cam_cache_buf{
int index;
}cache_buf_t;
+typedef struct cam_LimittedRate_Item{
+ int width;
+ int height;
+ int framerate;
+}RateInfo_t;
+
+typedef struct cam_LimittedRate_Info{
+ int num;
+ RateInfo_t arg[MAX_LIMITTED_RATE_NUM];
+}LimittedRate_t;
+
#define V4L2_ROTATE_ID 0x980922 //V4L2_CID_ROTATE
#define V4L2_CID_AUTO_FOCUS_STATUS (V4L2_CID_CAMERA_CLASS_BASE+30)
@@ -381,6 +393,7 @@ private:
int mZoomlevel;
unsigned int mPixelFormat;
+ unsigned int mSensorFormat;
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
bool mIsDequeuedEIOError;
@@ -416,6 +429,10 @@ private:
#ifndef AMLOGIC_USB_CAMERA_SUPPORT
int mRotateValue;
#endif
+ LimittedRate_t LimittedRate;
+ int mLimittedFrameRate;
+ bool mUseMJPEG;
+ bool mSupportMJPEG;
};
}; //// namespace
#endif //V4L_CAMERA_ADAPTER_H
diff --git a/inc/mjpeg/colorspaces.h b/inc/mjpeg/colorspaces.h
new file mode 100755
index 0000000..8f0f325
--- a/dev/null
+++ b/inc/mjpeg/colorspaces.h
@@ -0,0 +1,278 @@
+/*******************************************************************************#
+# guvcview http://guvcview.sourceforge.net #
+# #
+# Paulo Assis <pj.assis@gmail.com> #
+# #
+# This program is free software; you can redistribute it and/or modify #
+# it under the terms of the GNU General Public License as published by #
+# the Free Software Foundation; either version 2 of the License, or #
+# (at your option) any later version. #
+# #
+# This program is distributed in the hope that it will be useful, #
+# but WITHOUT ANY WARRANTY; without even the implied warranty of #
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
+# GNU General Public License for more details. #
+# #
+# You should have received a copy of the GNU General Public License #
+# along with this program; if not, write to the Free Software #
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA #
+# #
+********************************************************************************/
+
+#ifndef COLORSPACES_H
+#define COLORSPACES_H
+
+#include "defs.h"
+
+/*convert yuv 420 planar (yu12) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yuv420 planar data frame
+* width: picture width
+* height: picture height
+*/
+void
+yuv420_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert yvu 420 planar (yv12) to yuv 422 (yuyv)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yvu420 planar data frame
+* width: picture width
+* height: picture height
+*/
+void yvu420_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert yuv 420 planar (uv interleaved) (nv12) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yuv420 (nv12) planar data frame
+* width: picture width
+* height: picture height
+*/
+void nv12_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert yuv 420 planar (vu interleaved) (nv21) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yuv420 (nv21) planar data frame
+* width: picture width
+* height: picture height
+*/
+void nv21_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert yuv 422 planar (uv interleaved) (nv16) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yuv422 (nv16) planar data frame
+* width: picture width
+* height: picture height
+*/
+void nv16_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert yuv 422 planar (vu interleaved) (nv61) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yuv422 (nv61) planar data frame
+* width: picture width
+* height: picture height
+*/
+void nv61_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert y10b (bit-packed array greyscale format) to yuyv (packed)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing y10b (bit-packed array) data frame
+* width: picture width
+* height: picture height
+*/
+void y10b_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert y16 (grey) to yuyv (packed)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing y16 (grey) data frame
+* width: picture width
+* height: picture height
+*/
+void y16_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert yyuv to yuyv
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing a yyuv data frame
+* width: picture width
+* height: picture height
+*/
+void
+yyuv_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert uyvy (packed) to yuyv (packed)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing uyvy packed data frame
+* width: picture width
+* height: picture height
+*/
+void uyvy_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert yvyu (packed) to yuyv (packed)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yvyu packed data frame
+* width: picture width
+* height: picture height
+*/
+void yvyu_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert yuv 411 packed (y41p) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing y41p data frame
+* width: picture width
+* height: picture height
+*/
+void y41p_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert yuv mono (grey) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing grey (y only) data frame
+* width: picture width
+* height: picture height
+*/
+void grey_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert SPCA501 (s501) to yuv 422
+* s501 |Y0..width..Y0|U..width/2..U|Y1..width..Y1|V..width/2..V|
+* signed values (-128;+127) must be converted to unsigned (0; 255)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing s501 data frame
+* width: picture width
+* height: picture height
+*/
+void s501_to_yuyv(BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert SPCA505 (s505) to yuv 422
+* s505 |Y0..width..Y0|Y1..width..Y1|U..width/2..U|V..width/2..V|
+* signed values (-128;+127) must be converted to unsigned (0; 255)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing s501 data frame
+* width: picture width
+* height: picture height
+*/
+void s505_to_yuyv(BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert SPCA508 (s508) to yuv 422
+* s508 |Y0..width..Y0|U..width/2..U|V..width/2..V|Y1..width..Y1|
+* signed values (-128;+127) must be converted to unsigned (0; 255)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing s501 data frame
+* width: picture width
+* height: picture height
+*/
+void s508_to_yuyv(BYTE *framebuffer, BYTE *tmpbuffer, int width, int height);
+
+/*convert yuyv to rgb24
+* args:
+* pyuv: pointer to buffer containing yuv data (yuyv)
+* prgb: pointer to buffer containing rgb24 data
+* width: picture width
+* height: picture height
+*/
+void
+yuyv2rgb (BYTE *pyuv, BYTE *prgb, int width, int height);
+
+
+/*convert yuyv to bgr with lines upsidedown
+* used for bitmap files (DIB24)
+* args:
+* pyuv: pointer to buffer containing yuv data (yuyv)
+* prgb: pointer to buffer containing rgb24 data
+* width: picture width
+* height: picture height
+*/
+void
+yuyv2bgr (BYTE *pyuv, BYTE *pbgr, int width, int height);
+
+/* used for rgb video (fourcc="RGB ")
+* lines are in correct order
+*/
+void
+yuyv2bgr1 (BYTE *pyuv, BYTE *pbgr, int width, int height);
+
+/*convert bayer raw data to rgb24
+* args:
+* pBay: pointer to buffer containing Raw bayer data data
+* pRGB24: pointer to buffer containing rgb24 data
+* width: picture width
+* height: picture height
+* pix_order: bayer pixel order (0=gb/rg 1=gr/bg 2=bg/gr 3=rg/bg)
+*/
+void
+bayer_to_rgb24(BYTE *pBay, BYTE *pRGB24, int width, int height, int pix_order);
+
+/*convert rgb24 to yuyv
+* args:
+* prgb: pointer to buffer containing rgb24 data
+* pyuv: pointer to buffer containing yuv data (yuyv)
+* width: picture width
+* height: picture height
+*/
+void
+rgb2yuyv(BYTE *prgb, BYTE *pyuv, int width, int height);
+
+/*convert bgr24 to yuyv
+* args:
+* pbgr: pointer to buffer containing bgr24 data
+* pyuv: pointer to buffer containing yuv data (yuyv)
+* width: picture width
+* height: picture height
+*/
+void
+bgr2yuyv(BYTE *pbgr, BYTE *pyuv, int width, int height);
+
+/*use in utils.c for jpeg decoding 420 planar to 422
+* args:
+* out: pointer to data output of idct (macroblocks yyyy u v)
+* pic: pointer to picture buffer (yuyv)
+* width: picture width
+*/
+void
+yuv420pto422(int * out,unsigned char *pic,int width);
+
+/*use in utils.c for jpeg decoding 422 planar to 422
+* args:
+* out: pointer to data output of idct (macroblocks yyyy u v)
+* pic: pointer to picture buffer (yuyv)
+* width: picture width
+*/
+void
+yuv422pto422(int * out,unsigned char *pic,int width);
+
+void
+yuv422pto420(int * out,unsigned char *pic,int width,unsigned char *uv);
+
+/*use in utils.c for jpeg decoding 444 planar to 422
+* args:
+* out: pointer to data output of idct (macroblocks yyyy u v)
+* pic: pointer to picture buffer (yuyv)
+* width: picture width
+*/
+void
+yuv444pto422(int * out,unsigned char *pic,int width);
+
+/*use in utils.c for jpeg decoding 400 planar to 422
+* args:
+* out: pointer to data output of idct (macroblocks yyyy )
+* pic: pointer to picture buffer (yuyv)
+* width: picture width
+*/
+void
+yuv400pto422(int * out,unsigned char *pic,int width);
+
+#endif
+
diff --git a/inc/mjpeg/defs.h b/inc/mjpeg/defs.h
new file mode 100755
index 0000000..b286a08
--- a/dev/null
+++ b/inc/mjpeg/defs.h
@@ -0,0 +1,57 @@
+/*******************************************************************************#
+# guvcview http://guvcview.sourceforge.net #
+# #
+# Paulo Assis <pj.assis@gmail.com> #
+# #
+# This program is free software; you can redistribute it and/or modify #
+# it under the terms of the GNU General Public License as published by #
+# the Free Software Foundation; either version 2 of the License, or #
+# (at your option) any later version. #
+# #
+# This program is distributed in the hope that it will be useful, #
+# but WITHOUT ANY WARRANTY; without even the implied warranty of #
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
+# GNU General Public License for more details. #
+# #
+# You should have received a copy of the GNU General Public License #
+# along with this program; if not, write to the Free Software #
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA #
+# #
+********************************************************************************/
+
+#ifndef DEFS_H
+#define DEFS_H
+#include <inttypes.h>
+#include <sys/types.h>
+
+typedef uint64_t QWORD;
+typedef uint32_t DWORD;
+typedef uint16_t WORD;
+typedef uint8_t BYTE;
+typedef unsigned int LONG;
+typedef unsigned int UINT;
+
+typedef unsigned long long ULLONG;
+typedef unsigned long ULONG;
+
+typedef char* pchar;
+
+typedef int8_t INT8;
+typedef uint8_t UINT8;
+typedef int16_t INT16;
+typedef uint16_t UINT16;
+typedef int32_t INT32;
+typedef uint32_t UINT32;
+typedef int64_t INT64;
+typedef uint64_t UINT64;
+
+/*clip value between 0 and 255*/
+#define CLIP(value) (BYTE)(((value)>0xFF)?0xff:(((value)<0)?0:(value)))
+
+/*MAX macro - gets the bigger value*/
+#ifndef MAX
+#define MAX(a,b) (((a) < (b)) ? (b) : (a))
+#endif
+
+#endif
+
diff --git a/inc/mjpeg/huffman.h b/inc/mjpeg/huffman.h
new file mode 100755
index 0000000..4291844
--- a/dev/null
+++ b/inc/mjpeg/huffman.h
@@ -0,0 +1,99 @@
+/*******************************************************************************#
+# guvcview http://guvcview.sourceforge.net #
+# #
+# Paulo Assis <pj.assis@gmail.com> #
+# #
+# This program is free software; you can redistribute it and/or modify #
+# it under the terms of the GNU General Public License as published by #
+# the Free Software Foundation; either version 2 of the License, or #
+# (at your option) any later version. #
+# #
+# This program is distributed in the hope that it will be useful, #
+# but WITHOUT ANY WARRANTY; without even the implied warranty of #
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
+# GNU General Public License for more details. #
+# #
+# You should have received a copy of the GNU General Public License #
+# along with this program; if not, write to the Free Software #
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA #
+# #
+********************************************************************************/
+
+/*******************************************************************************#
+# #
+# huffman tables for Jpeg encoder/decoder #
+# #
+# Adapted for linux, Paulo Assis, 2007 <pj.assis@gmail.com> #
+********************************************************************************/
+
+#ifndef HUFFMAN_H
+#define HUFFMAN_H
+
+#include "defs.h"
+
+#define JPG_HUFFMAN_TABLE_LENGTH 0x01A0
+
+static const unsigned char JPEGHuffmanTable[JPG_HUFFMAN_TABLE_LENGTH] =
+{
+ // luminance dc - length bits
+ 0x00,
+ 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ // luminance dc - code
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09,
+ 0x0A, 0x0B,
+ // chrominance dc - length bits
+ 0x01,
+ 0x00, 0x03, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
+ 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
+ // chrominance dc - code
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09,
+ 0x0A, 0x0B,
+ // luminance ac - number of codes with # bits (ordered by code length 1-16)
+ 0x10,
+ 0x00, 0x02, 0x01, 0x03, 0x03, 0x02, 0x04, 0x03, 0x05, 0x05,
+ 0x04, 0x04, 0x00, 0x00, 0x01, 0x7D,
+ // luminance ac - run size (ordered by code length)
+ 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12, 0x21, 0x31,
+ 0x41, 0x06, 0x13, 0x51, 0x61, 0x07, 0x22, 0x71, 0x14, 0x32,
+ 0x81, 0x91, 0xA1, 0x08, 0x23, 0x42, 0xB1, 0xC1, 0x15, 0x52,
+ 0xD1, 0xF0, 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0A, 0x16,
+ 0x17, 0x18, 0x19, 0x1A, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A,
+ 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45,
+ 0x46, 0x47, 0x48, 0x49, 0x4A, 0x53, 0x54, 0x55, 0x56, 0x57,
+ 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
+ 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, 0x83,
+ 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94,
+ 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5,
+ 0xA6, 0xA7, 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6,
+ 0xB7, 0xB8, 0xB9, 0xBA, 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7,
+ 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4, 0xD5, 0xD6, 0xD7, 0xD8,
+ 0xD9, 0xDA, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, 0xE8,
+ 0xE9, 0xEA, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8,
+ 0xF9, 0xFA,
+ // chrominance ac -number of codes with # bits (ordered by code length 1-16)
+ 0x11,
+ 0x00, 0x02, 0x01, 0x02, 0x04, 0x04, 0x03, 0x04, 0x07, 0x05,
+ 0x04, 0x04, 0x00, 0x01, 0x02, 0x77,
+ // chrominance ac - run size (ordered by code length)
+ 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21, 0x31, 0x06,
+ 0x12, 0x41, 0x51, 0x07, 0x61, 0x71, 0x13, 0x22, 0x32, 0x81,
+ 0x08, 0x14, 0x42, 0x91, 0xA1, 0xB1, 0xC1, 0x09, 0x23, 0x33,
+ 0x52, 0xF0, 0x15, 0x62, 0x72, 0xD1, 0x0A, 0x16, 0x24, 0x34,
+ 0xE1, 0x25, 0xF1, 0x17, 0x18, 0x19, 0x1A, 0x26, 0x27, 0x28,
+ 0x29, 0x2A, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44,
+ 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, 0x53, 0x54, 0x55, 0x56,
+ 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
+ 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A,
+ 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92,
+ 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3,
+ 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4,
+ 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, 0xC2, 0xC3, 0xC4, 0xC5,
+ 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4, 0xD5, 0xD6,
+ 0xD7, 0xD8, 0xD9, 0xDA, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7,
+ 0xE8, 0xE9, 0xEA, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8,
+ 0xF9, 0xFA
+};
+
+#endif
+
diff --git a/inc/mjpeg/jutils.h b/inc/mjpeg/jutils.h
new file mode 100755
index 0000000..2760426
--- a/dev/null
+++ b/inc/mjpeg/jutils.h
@@ -0,0 +1,142 @@
+/*******************************************************************************#
+# guvcview http://guvcview.sourceforge.net #
+# #
+# Paulo Assis <pj.assis@gmail.com> #
+# #
+# This program is free software; you can redistribute it and/or modify #
+# it under the terms of the GNU General Public License as published by #
+# the Free Software Foundation; either version 2 of the License, or #
+# (at your option) any later version. #
+# #
+# This program is distributed in the hope that it will be useful, #
+# but WITHOUT ANY WARRANTY; without even the implied warranty of #
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
+# GNU General Public License for more details. #
+# #
+# You should have received a copy of the GNU General Public License #
+# along with this program; if not, write to the Free Software #
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA #
+# #
+********************************************************************************/
+
+/*******************************************************************************#
+# #
+# MJpeg decoding and frame capture taken from luvcview #
+# #
+# #
+********************************************************************************/
+
+#ifndef UTILS_H
+#define UTILS_H
+
+#include "defs.h"
+
+/*video defs*/
+//#define BI_RGB 0;
+//#define BI_RLE4 1;
+//#define BI_RLE8 2;
+//#define BI_BITFIELDS 3;
+
+/* Fixed point arithmetic */
+//#define FIXED Sint32
+//#define FIXED_BITS 16
+//#define TO_FIXED(X) (((Sint32)(X))<<(FIXED_BITS))
+//#define FROM_FIXED(X) (((Sint32)(X))>>(FIXED_BITS))
+
+#define ISHIFT 11
+
+#define IFIX(a) ((int)((a) * (1 << ISHIFT) + .5))
+
+#ifndef __P
+# define __P(x) x
+#endif
+
+/* special markers */
+#define M_BADHUFF -1
+#define M_EOF 0x80
+
+struct jpeg_decdata
+{
+ int dcts[6 * 64 + 16];
+ int out[64 * 6];
+ int dquant[3][64];
+};
+
+struct in
+{
+ BYTE *p;
+ DWORD bits;
+ int left;
+ int marker;
+ int (*func) __P((void *));
+ void *data;
+};
+
+/*********************************/
+#define DECBITS 10 /* seems to be the optimum */
+
+struct dec_hufftbl
+{
+ int maxcode[17];
+ int valptr[16];
+ BYTE vals[256];
+ DWORD llvals[1 << DECBITS];
+};
+
+//struct enc_hufftbl;
+
+union hufftblp
+{
+ struct dec_hufftbl *dhuff;
+ //struct enc_hufftbl *ehuff;
+};
+
+struct scan
+{
+ int dc; /* old dc value */
+
+ union hufftblp hudc;
+ union hufftblp huac;
+ int next; /* when to switch to next scan */
+
+ int cid; /* component id */
+ int hv; /* horiz/vert, copied from comp */
+ int tq; /* quant tbl, copied from comp */
+};
+
+/******** Markers *********/
+#ifndef M_SOI
+#define M_SOI 0xd8
+#define M_APP0 0xe0
+#define M_DQT 0xdb
+#define M_SOF0 0xc0
+#define M_DHT 0xc4
+#define M_DRI 0xdd
+#define M_SOS 0xda
+#define M_RST0 0xd0
+#define M_EOI 0xd9
+#define M_COM 0xfe
+#endif
+
+/*******Error codes *******/
+#define ERR_NO_SOI 1
+#define ERR_NOT_8BIT 2
+#define ERR_HEIGHT_MISMATCH 3
+#define ERR_WIDTH_MISMATCH 4
+#define ERR_BAD_WIDTH_OR_HEIGHT 5
+#define ERR_TOO_MANY_COMPPS 6
+#define ERR_ILLEGAL_HV 7
+#define ERR_QUANT_TABLE_SELECTOR 8
+#define ERR_NOT_YCBCR_221111 9
+#define ERR_UNKNOWN_CID_IN_SCAN 10
+#define ERR_NOT_SEQUENTIAL_DCT 11
+#define ERR_WRONG_MARKER 12
+#define ERR_NO_EOI 13
+#define ERR_BAD_TABLES 14
+#define ERR_DEPTH_MISMATCH 15
+
+
+int jpeg_decode(unsigned char **pic, unsigned char *buf, int width, int height,unsigned int outformat);
+
+#endif
+
diff --git a/mjpeg/colorspaces.c b/mjpeg/colorspaces.c
new file mode 100755
index 0000000..c11f287
--- a/dev/null
+++ b/mjpeg/colorspaces.c
@@ -0,0 +1,1654 @@
+/*******************************************************************************#
+# guvcview http://guvcview.sourceforge.net #
+# #
+# Paulo Assis <pj.assis@gmail.com> #
+# Nobuhiro Iwamatsu <iwamatsu@nigauri.org> #
+# #
+# This program is free software; you can redistribute it and/or modify #
+# it under the terms of the GNU General Public License as published by #
+# the Free Software Foundation; either version 2 of the License, or #
+# (at your option) any later version. #
+# #
+# This program is distributed in the hope that it will be useful, #
+# but WITHOUT ANY WARRANTY; without even the implied warranty of #
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
+# GNU General Public License for more details. #
+# #
+# You should have received a copy of the GNU General Public License #
+# along with this program; if not, write to the Free Software #
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA #
+# #
+********************************************************************************/
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include "colorspaces.h"
+#include <stdbool.h>
+#define TRUE 1
+#define FALSE 0
+/*------------------------------- Color space conversions --------------------*/
+/* regular yuv (YUYV) to rgb24*/
+void
+yuyv2rgb (BYTE *pyuv, BYTE *prgb, int width, int height)
+{
+ int l=0;
+ int SizeYUV=height * width * 2; /* 2 bytes per pixel*/
+ for(l=0;l<SizeYUV;l=l+4)
+ { /*iterate every 4 bytes*/
+ /* standart: r = y0 + 1.402 (v-128) */
+ /* logitech: r = y0 + 1.370705 (v-128) */
+ *prgb++=CLIP(pyuv[l] + 1.402 * (pyuv[l+3]-128));
+ /* standart: g = y0 - 0.34414 (u-128) - 0.71414 (v-128)*/
+ /* logitech: g = y0 - 0.337633 (u-128)- 0.698001 (v-128)*/
+ *prgb++=CLIP(pyuv[l] - 0.34414 * (pyuv[l+1]-128) -0.71414*(pyuv[l+3]-128));
+ /* standart: b = y0 + 1.772 (u-128) */
+ /* logitech: b = y0 + 1.732446 (u-128) */
+ *prgb++=CLIP(pyuv[l] + 1.772 *( pyuv[l+1]-128));
+ /* standart: r1 =y1 + 1.402 (v-128) */
+ /* logitech: r1 = y1 + 1.370705 (v-128) */
+ *prgb++=CLIP(pyuv[l+2] + 1.402 * (pyuv[l+3]-128));
+ /* standart: g1 = y1 - 0.34414 (u-128) - 0.71414 (v-128)*/
+ /* logitech: g1 = y1 - 0.337633 (u-128)- 0.698001 (v-128)*/
+ *prgb++=CLIP(pyuv[l+2] - 0.34414 * (pyuv[l+1]-128) -0.71414 * (pyuv[l+3]-128));
+ /* standart: b1 = y1 + 1.772 (u-128) */
+ /* logitech: b1 = y1 + 1.732446 (u-128) */
+ *prgb++=CLIP(pyuv[l+2] + 1.772*(pyuv[l+1]-128));
+ }
+}
+
+/* used for rgb video (fourcc="RGB ") */
+/* lines are on correct order */
+void
+yuyv2bgr1 (BYTE *pyuv, BYTE *pbgr, int width, int height)
+{
+
+ int l=0;
+ int SizeYUV=height * width * 2; /* 2 bytes per pixel*/
+ for(l=0;l<SizeYUV;l=l+4)
+ { /*iterate every 4 bytes*/
+ /* standart: b = y0 + 1.772 (u-128) */
+ /* logitech: b = y0 + 1.732446 (u-128) */
+ *pbgr++=CLIP(pyuv[l] + 1.772 *( pyuv[l+1]-128));
+ /* standart: g = y0 - 0.34414 (u-128) - 0.71414 (v-128)*/
+ /* logitech: g = y0 - 0.337633 (u-128)- 0.698001 (v-128)*/
+ *pbgr++=CLIP(pyuv[l] - 0.34414 * (pyuv[l+1]-128) -0.71414*(pyuv[l+3]-128));
+ /* standart: r = y0 + 1.402 (v-128) */
+ /* logitech: r = y0 + 1.370705 (v-128) */
+ *pbgr++=CLIP(pyuv[l] + 1.402 * (pyuv[l+3]-128));
+ /* standart: b1 = y1 + 1.772 (u-128) */
+ /* logitech: b1 = y1 + 1.732446 (u-128) */
+ *pbgr++=CLIP(pyuv[l+2] + 1.772*(pyuv[l+1]-128));
+ /* standart: g1 = y1 - 0.34414 (u-128) - 0.71414 (v-128)*/
+ /* logitech: g1 = y1 - 0.337633 (u-128)- 0.698001 (v-128)*/
+ *pbgr++=CLIP(pyuv[l+2] - 0.34414 * (pyuv[l+1]-128) -0.71414 * (pyuv[l+3]-128));
+ /* standart: r1 =y1 + 1.402 (v-128) */
+ /* logitech: r1 = y1 + 1.370705 (v-128) */
+ *pbgr++=CLIP(pyuv[l+2] + 1.402 * (pyuv[l+3]-128));
+ }
+}
+
+/* yuv (YUYV) to bgr with lines upsidedown */
+/* used for bitmap files (DIB24) */
+void
+yuyv2bgr (BYTE *pyuv, BYTE *pbgr, int width, int height)
+{
+
+ int l=0;
+ int k=0;
+ BYTE *preverse;
+ int bytesUsed;
+ int SizeBGR=height * width * 3; /* 3 bytes per pixel*/
+ /* BMP byte order is bgr and the lines start from last to first*/
+ preverse=pbgr+SizeBGR;/*start at the end and decrement*/
+ for(l=0;l<height;l++)
+ { /*iterate every 1 line*/
+ preverse-=width*3;/*put pointer at begin of unprocessed line*/
+ bytesUsed=l*width*2;
+ for (k=0;k<(width*2);k=k+4)/*iterate every 4 bytes in the line*/
+ {
+ /* standart: b = y0 + 1.772 (u-128) */
+ /* logitech: b = y0 + 1.732446 (u-128) */
+ *preverse++=CLIP(pyuv[k+bytesUsed] + 1.772 *( pyuv[k+1+bytesUsed]-128));
+ /* standart: g = y0 - 0.34414 (u-128) - 0.71414 (v-128)*/
+ /* logitech: g = y0 - 0.337633 (u-128)- 0.698001 (v-128)*/
+ *preverse++=CLIP(pyuv[k+bytesUsed] - 0.34414 * (pyuv[k+1+bytesUsed]-128)
+ -0.71414*(pyuv[k+3+bytesUsed]-128));
+ /* standart: r = y0 + 1.402 (v-128) */
+ /* logitech: r = y0 + 1.370705 (v-128) */
+ *preverse++=CLIP(pyuv[k+bytesUsed] + 1.402 * (pyuv[k+3+bytesUsed]-128));
+ /* standart: b1 = y1 + 1.772 (u-128) */
+ /* logitech: b1 = y1 + 1.732446 (u-128) */
+ *preverse++=CLIP(pyuv[k+2+bytesUsed] + 1.772*(pyuv[k+1+bytesUsed]-128));
+ /* standart: g1 = y1 - 0.34414 (u-128) - 0.71414 (v-128)*/
+ /* logitech: g1 = y1 - 0.337633 (u-128)- 0.698001 (v-128)*/
+ *preverse++=CLIP(pyuv[k+2+bytesUsed] - 0.34414 * (pyuv[k+1+bytesUsed]-128)
+ -0.71414 * (pyuv[k+3+bytesUsed]-128));
+ /* standart: r1 =y1 + 1.402 (v-128) */
+ /* logitech: r1 = y1 + 1.370705 (v-128) */
+ *preverse++=CLIP(pyuv[k+2+bytesUsed] + 1.402 * (pyuv[k+3+bytesUsed]-128));
+ }
+ preverse-=width*3;/*get it back at the begin of processed line*/
+ }
+ preverse=NULL;
+}
+
+/* Unpack buffer of (vw bit) data into padded 16bit buffer. */
+static inline void convert_packed_to_16bit(uint8_t *raw, uint16_t *unpacked, int vw, int unpacked_len)
+{
+ int mask = (1 << vw) - 1;
+ uint32_t buffer = 0;
+ int bitsIn = 0;
+ while (unpacked_len--) {
+ while (bitsIn < vw) {
+ buffer = (buffer << 8) | *(raw++);
+ bitsIn += 8;
+ }
+ bitsIn -= vw;
+ *(unpacked++) = (buffer >> bitsIn) & mask;
+ }
+}
+
+/*convert y10b (bit-packed array greyscale format) to yuyv (packed)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing y10b (bit-packed array) data frame
+* width: picture width
+* height: picture height
+*/
+void y10b_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ UINT16 *unpacked_buffer = NULL;
+ UINT16 *ptmp;
+ int h = 0;
+ int w = 0;
+
+ unpacked_buffer = malloc(width * height * sizeof(UINT16));
+ convert_packed_to_16bit(tmpbuffer, unpacked_buffer, 10, width * height);
+
+ ptmp = unpacked_buffer;
+
+ for (h = 0; h < height; h++)
+ {
+ for (w = 0; w < width; w += 2)
+ {
+ /* Y0 */
+ *framebuffer++ = (BYTE) ((ptmp[0] & 0x3FF) >> 2);
+ /* U */
+ *framebuffer++ = 0x80;
+ /* Y1 */
+ *framebuffer++ = (BYTE) ((ptmp[1] & 0x3FF) >> 2);
+ /* V */
+ *framebuffer++ = 0x80;
+
+ ptmp += 2;
+ }
+ }
+
+ free(unpacked_buffer);
+}
+
+/*convert y16 (grey) to yuyv (packed)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing y16 (grey) data frame
+* width: picture width
+* height: picture height
+*/
+void y16_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ UINT16 *ptmp= (UINT16 *) tmpbuffer;
+
+ int h=0;
+ int w=0;
+
+ for(h=0;h<height;h++)
+ {
+ for(w=0;w<width;w+=2)
+ {
+ /* Y0 */
+ *framebuffer++ = (BYTE) ((ptmp[0] & 0xFF00) >> 8);
+ /* U */
+ *framebuffer++ = 0x80;
+ /* Y1 */
+ *framebuffer++ = (BYTE) ((ptmp[1] & 0xFF00) >> 8);
+ /* V */
+ *framebuffer++ = 0x80;
+
+ ptmp += 2;
+ }
+ }
+}
+
+/*convert yyuv (packed) to yuyv (packed)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yyuv packed data frame
+* width: picture width
+* height: picture height
+*/
+void yyuv_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ BYTE *ptmp=NULL;
+ BYTE *pfmb=NULL;
+ ptmp = tmpbuffer;
+ pfmb = framebuffer;
+
+ int h=0;
+ int w=0;
+
+ for(h=0;h<height;h++)
+ {
+ for(w=0;w<(width*2);w+=4)
+ {
+ /* Y0 */
+ pfmb[0] = ptmp[0];
+ /* U */
+ pfmb[1] = ptmp[2];
+ /* Y1 */
+ pfmb[2] = ptmp[1];
+ /* V */
+ pfmb[3] = ptmp[3];
+
+ ptmp += 4;
+ pfmb += 4;
+ }
+ }
+}
+
+
+/*convert uyvy (packed) to yuyv (packed)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing uyvy packed data frame
+* width: picture width
+* height: picture height
+*/
+void uyvy_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ BYTE *ptmp = tmpbuffer;
+ BYTE *pfmb = framebuffer;
+ int h=0;
+ int w=0;
+
+ for(h=0;h<height;h++)
+ {
+ for(w=0;w<(width*2);w+=4)
+ {
+ /* Y0 */
+ pfmb[0] = ptmp[1];
+ /* U */
+ pfmb[1] = ptmp[0];
+ /* Y1 */
+ pfmb[2] = ptmp[3];
+ /* V */
+ pfmb[3] = ptmp[2];
+
+ ptmp += 4;
+ pfmb += 4;
+ }
+ }
+}
+
+
+/*convert yvyu (packed) to yuyv (packed)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yvyu packed data frame
+* width: picture width
+* height: picture height
+*/
+void yvyu_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ BYTE *ptmp=NULL;
+ BYTE *pfmb=NULL;
+ ptmp = tmpbuffer;
+ pfmb = framebuffer;
+
+ int h=0;
+ int w=0;
+
+ for(h=0;h<height;h++)
+ {
+ for(w=0;w<(width*2);w+=4)
+ {
+ /* Y0 */
+ pfmb[0] = ptmp[0];
+ /* U */
+ pfmb[1] = ptmp[3];
+ /* Y1 */
+ pfmb[2] = ptmp[2];
+ /* V */
+ pfmb[3] = ptmp[1];
+
+ ptmp += 4;
+ pfmb += 4;
+ }
+ }
+}
+
+/*convert yuv 420 planar (yu12) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yuv420 planar data frame
+* width: picture width
+* height: picture height
+*/
+void yuv420_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ BYTE *py;
+ BYTE *pu;
+ BYTE *pv;
+
+ int linesize = width * 2;
+ int uvlinesize = width / 2;
+ int offset=0;
+ int offset1=0;
+ int offsety=0;
+ int offsety1=0;
+ int offsetuv=0;
+
+ py=tmpbuffer;
+ pu=py+(width*height);
+ pv=pu+(width*height/4);
+
+ int h=0;
+ int w=0;
+
+ int wy=0;
+ int huv=0;
+ int wuv=0;
+
+ for(h=0;h<height;h+=2)
+ {
+ wy=0;
+ wuv=0;
+ offset = h * linesize;
+ offset1 = (h + 1) * linesize;
+ offsety = h * width;
+ offsety1 = (h + 1) * width;
+ offsetuv = huv * uvlinesize;
+
+ for(w=0;w<linesize;w+=4)
+ {
+ /*y00*/
+ framebuffer[w + offset] = py[wy + offsety];
+ /*u0*/
+ framebuffer[(w + 1) + offset] = pu[wuv + offsetuv];
+ /*y01*/
+ framebuffer[(w + 2) + offset] = py[(wy + 1) + offsety];
+ /*v0*/
+ framebuffer[(w + 3) + offset] = pv[wuv + offsetuv];
+
+ /*y10*/
+ framebuffer[w + offset1] = py[wy + offsety1];
+ /*u0*/
+ framebuffer[(w + 1) + offset1] = pu[wuv + offsetuv];
+ /*y11*/
+ framebuffer[(w + 2) + offset1] = py[(wy + 1) + offsety1];
+ /*v0*/
+ framebuffer[(w + 3) + offset1] = pv[wuv + offsetuv];
+
+ wuv++;
+ wy+=2;
+ }
+ huv++;
+ }
+}
+
+/*convert yvu 420 planar (yv12) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yuv420 planar data frame
+* width: picture width
+* height: picture height
+*/
+void yvu420_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ BYTE *py;
+ BYTE *pv;
+ BYTE *pu;
+
+ int linesize = width * 2;
+ int uvlinesize = width / 2;
+ int offset=0;
+ int offset1=0;
+ int offsety=0;
+ int offsety1=0;
+ int offsetuv=0;
+
+ py=tmpbuffer;
+ pv=py+(width*height);
+ pu=pv+((width*height)/4);
+
+ int h=0;
+ int w=0;
+
+ int wy=0;
+ int huv=0;
+ int wuv=0;
+
+ for(h=0;h<height;h+=2)
+ {
+ wy=0;
+ wuv=0;
+ offset = h * linesize;
+ offset1 = (h + 1) * linesize;
+ offsety = h * width;
+ offsety1 = (h + 1) * width;
+ offsetuv = huv * uvlinesize;
+
+ for(w=0;w<linesize;w+=4)
+ {
+ /*y00*/
+ framebuffer[w + offset] = py[wy + offsety];
+ /*u0*/
+ framebuffer[(w + 1) + offset] = pu[wuv + offsetuv];
+ /*y01*/
+ framebuffer[(w + 2) + offset] = py[(wy + 1) + offsety];
+ /*v0*/
+ framebuffer[(w + 3) + offset] = pv[wuv + offsetuv];
+
+ /*y10*/
+ framebuffer[w + offset1] = py[wy + offsety1];
+ /*u0*/
+ framebuffer[(w + 1) + offset1] = pu[wuv + offsetuv];
+ /*y11*/
+ framebuffer[(w + 2) + offset1] = py[(wy + 1) + offsety1];
+ /*v0*/
+ framebuffer[(w + 3) + offset1] = pv[wuv + offsetuv];
+
+ wuv++;
+ wy+=2;
+ }
+ huv++;
+ }
+}
+
+/*convert yuv 420 planar (uv interleaved) (nv12) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yuv420 (nv12) planar data frame
+* width: picture width
+* height: picture height
+*/
+void nv12_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ BYTE *py;
+ BYTE *puv;
+
+ int linesize = width * 2;
+ int offset=0;
+ int offset1=0;
+ int offsety=0;
+ int offsety1=0;
+ int offsetuv=0;
+
+ py=tmpbuffer;
+ puv=py+(width*height);
+
+ int h=0;
+ int w=0;
+
+ int wy=0;
+ int huv=0;
+ int wuv=0;
+
+ for(h=0;h<height;h+=2)
+ {
+ wy=0;
+ wuv=0;
+ offset = h * linesize;
+ offset1 = (h+1) * linesize;
+ offsety = h * width;
+ offsety1 = (h+1) * width;
+ offsetuv = huv * width;
+ for(w=0;w<linesize;w+=4)
+ {
+ /*y00*/
+ framebuffer[w + offset] = py[wy + offsety];
+ /*u0*/
+ framebuffer[(w + 1) + offset] = puv[wuv + offsetuv];
+ /*y01*/
+ framebuffer[(w + 2) + offset] = py[(wy + 1) + offsety];
+ /*v0*/
+ framebuffer[(w + 3) + offset] = puv[(wuv + 1) + offsetuv];
+
+ /*y10*/
+ framebuffer[w + offset1] = py[wy + offsety1];
+ /*u0*/
+ framebuffer[(w + 1) + offset1] = puv[wuv + offsetuv];
+ /*y11*/
+ framebuffer[(w + 2) + offset1] = py[(wy + 1) + offsety1];
+ /*v0*/
+ framebuffer[(w + 3) + offset1] = puv[(wuv + 1) + offsetuv];
+
+ wuv+=2;
+ wy+=2;
+ }
+ huv++;
+ }
+}
+
+/*convert yuv 420 planar (vu interleaved) (nv21) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yuv420 (nv21) planar data frame
+* width: picture width
+* height: picture height
+*/
+void nv21_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ BYTE *py;
+ BYTE *puv;
+
+ int linesize = width * 2;
+ int offset=0;
+ int offset1=0;
+ int offsety=0;
+ int offsety1=0;
+ int offsetuv=0;
+
+ py=tmpbuffer;
+ puv=py+(width*height);
+
+ int h=0;
+ int w=0;
+
+ int wy=0;
+ int huv=0;
+ int wuv=0;
+
+ for(h=0;h<height;h+=2)
+ {
+ wy=0;
+ wuv=0;
+ offset = h * linesize;
+ offset1 = (h+1) * linesize;
+ offsety = h * width;
+ offsety1 = (h+1) * width;
+ offsetuv = huv * width;
+ for(w=0;w<linesize;w+=4)
+ {
+ /*y00*/
+ framebuffer[w + offset] = py[wy + offsety];
+ /*u0*/
+ framebuffer[(w + 1) + offset] = puv[(wuv + 1) + offsetuv];
+ /*y01*/
+ framebuffer[(w + 2) + offset] = py[(wy + 1) + offsety];
+ /*v0*/
+ framebuffer[(w + 3) + offset] = puv[wuv + offsetuv];
+
+ /*y10*/
+ framebuffer[w + offset1] = py[wy + offsety1];
+ /*u0*/
+ framebuffer[(w + 1) + offset1] = puv[(wuv + 1) + offsetuv];
+ /*y11*/
+ framebuffer[(w + 2) + offset1] = py[(wy + 1) + offsety1];
+ /*v0*/
+ framebuffer[(w + 3) + offset1] = puv[wuv + offsetuv];
+
+ wuv+=2;
+ wy+=2;
+ }
+ huv++;
+ }
+}
+
+/*convert yuv 422 planar (uv interleaved) (nv16) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yuv422 (nv16) planar data frame
+* width: picture width
+* height: picture height
+*/
+void nv16_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ BYTE *py;
+ BYTE *puv;
+
+ int linesize = width * 2;
+ int offset=0;
+ int offsety=0;
+ int offsetuv=0;
+
+ py=tmpbuffer;
+ puv=py+(width*height);
+
+ int h=0;
+ int w=0;
+
+ int wy=0;
+ int huv=0;
+ int wuv=0;
+
+ for(h=0;h<height;h++)
+ {
+ wy=0;
+ wuv=0;
+ offset = h * linesize;
+ offsety = h * width;
+ offsetuv = huv * width;
+ for(w=0;w<linesize;w+=4)
+ {
+ /*y00*/
+ framebuffer[w + offset] = py[wy + offsety];
+ /*u0*/
+ framebuffer[(w + 1) + offset] = puv[wuv + offsetuv];
+ /*y01*/
+ framebuffer[(w + 2) + offset] = py[(wy + 1) + offsety];
+ /*v0*/
+ framebuffer[(w + 3) + offset] = puv[(wuv + 1) + offsetuv];
+
+ wuv+=2;
+ wy+=2;
+ }
+ huv++;
+ }
+}
+
+/*convert yuv 422 planar (vu interleaved) (nv61) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing yuv422 (nv61) planar data frame
+* width: picture width
+* height: picture height
+*/
+void nv61_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ BYTE *py;
+ BYTE *puv;
+
+ int linesize = width * 2;
+ int offset=0;
+ int offsety=0;
+ int offsetuv=0;
+
+ py=tmpbuffer;
+ puv=py+(width*height);
+
+ int h=0;
+ int w=0;
+
+ int wy=0;
+ int huv=0;
+ int wuv=0;
+
+ for(h=0;h<height;h++)
+ {
+ wy=0;
+ wuv=0;
+ offset = h * linesize;
+ offsety = h * width;
+ offsetuv = huv * width;
+ for(w=0;w<linesize;w+=4)
+ {
+ /*y00*/
+ framebuffer[w + offset] = py[wy + offsety];
+ /*u0*/
+ framebuffer[(w + 1) + offset] = puv[(wuv + 1) + offsetuv];
+ /*y01*/
+ framebuffer[(w + 2) + offset] = py[(wy + 1) + offsety];
+ /*v0*/
+ framebuffer[(w + 3) + offset] = puv[wuv + offsetuv];
+
+ wuv+=2;
+ wy+=2;
+ }
+ huv++;
+ }
+}
+
+/*convert yuv 411 packed (y41p) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing y41p data frame
+* width: picture width
+* height: picture height
+*/
+void y41p_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ int h=0;
+ int w=0;
+ int linesize = width * 3 /2;
+ int offset = 0;
+
+ for(h=0;h<height;h++)
+ {
+ offset = linesize * h;
+ for(w=0;w<linesize;w+=12)
+ {
+ *framebuffer++=tmpbuffer[w+1 + offset]; //Y0
+ *framebuffer++=tmpbuffer[w + offset]; //U0
+ *framebuffer++=tmpbuffer[w+3 + offset]; //Y1
+ *framebuffer++=tmpbuffer[w+2 + offset]; //V0
+ *framebuffer++=tmpbuffer[w+5 + offset]; //Y2
+ *framebuffer++=tmpbuffer[w + offset]; //U0
+ *framebuffer++=tmpbuffer[w+7 + offset]; //Y3
+ *framebuffer++=tmpbuffer[w+2 + offset]; //V0
+ *framebuffer++=tmpbuffer[w+8 + offset]; //Y4
+ *framebuffer++=tmpbuffer[w+4 + offset]; //U4
+ *framebuffer++=tmpbuffer[w+9 + offset]; //Y5
+ *framebuffer++=tmpbuffer[w+6 + offset]; //V4
+ *framebuffer++=tmpbuffer[w+10+ offset]; //Y6
+ *framebuffer++=tmpbuffer[w+4 + offset]; //U4
+ *framebuffer++=tmpbuffer[w+11+ offset]; //Y7
+ *framebuffer++=tmpbuffer[w+6 + offset]; //V4
+ }
+ }
+}
+
+/*convert yuv mono (grey) to yuv 422
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing grey (y only) data frame
+* width: picture width
+* height: picture height
+*/
+void grey_to_yuyv (BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ int h=0;
+ int w=0;
+ int offset = 0;
+
+ for(h=0;h<height;h++)
+ {
+ offset = width * h;
+ for(w=0;w<width;w++)
+ {
+ *framebuffer++=tmpbuffer[w + offset]; //Y
+ *framebuffer++=0x80; //U or V
+ }
+ }
+}
+
+/*convert SPCA501 (s501) to yuv 422
+* s501 |Y0..width..Y0|U..width/2..U|Y1..width..Y1|V..width/2..V|
+* signed values (-128;+127) must be converted to unsigned (0; 255)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing s501 data frame
+* width: picture width
+* height: picture height
+*/
+void s501_to_yuyv(BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ BYTE *U, *V, *Y0, *Y1;
+ BYTE *line2;
+ int h, w;
+
+ Y0 = tmpbuffer; /*fisrt line*/
+ for (h = 0; h < height/2; h++ )
+ {
+ line2 = framebuffer + width * 2; /* next line */
+ U = Y0 + width;
+ Y1 = U + width / 2;
+ V = Y1 + width;
+ for (w = width / 2; --w >= 0; )
+ {
+ *framebuffer++ = 0x80 + *Y0++;
+ *framebuffer++ = 0x80 + *U;
+ *framebuffer++ = 0x80 + *Y0++;
+ *framebuffer++ = 0x80 + *V;
+
+ *line2++ = 0x80 + *Y1++;
+ *line2++ = 0x80 + *U++;
+ *line2++ = 0x80 + *Y1++;
+ *line2++ = 0x80 + *V++;
+ }
+ Y0 += width * 2; /* next block of lines */
+ framebuffer = line2;
+ }
+}
+
+/*convert SPCA505 (s505) to yuv 422
+* s505 |Y0..width..Y0|Y1..width..Y1|U..width/2..U|V..width/2..V|
+* signed values (-128;+127) must be converted to unsigned (0; 255)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing s501 data frame
+* width: picture width
+* height: picture height
+*/
+void s505_to_yuyv(BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ BYTE *U, *V, *Y0, *Y1;
+ BYTE *line2;
+ int h, w;
+
+ Y0 = tmpbuffer; /*fisrt line*/
+ for (h = 0; h < height/2; h++ )
+ {
+ line2 = framebuffer + width * 2; /* next line */
+ Y1 = Y0 + width;
+ U = Y1 + width;
+ V = U + width/2;
+ for (w = width / 2; --w >= 0; )
+ {
+ *framebuffer++ = 0x80 + *Y0++;
+ *framebuffer++ = 0x80 + *U;
+ *framebuffer++ = 0x80 + *Y0++;
+ *framebuffer++ = 0x80 + *V;
+
+ *line2++ = 0x80 + *Y1++;
+ *line2++ = 0x80 + *U++;
+ *line2++ = 0x80 + *Y1++;
+ *line2++ = 0x80 + *V++;
+ }
+ Y0 += width * 2; /* next block of lines */
+ framebuffer = line2;
+ }
+}
+
+/*convert SPCA508 (s508) to yuv 422
+* s508 |Y0..width..Y0|U..width/2..U|V..width/2..V|Y1..width..Y1|
+* signed values (-128;+127) must be converted to unsigned (0; 255)
+* args:
+* framebuffer: pointer to frame buffer (yuyv)
+* tmpbuffer: pointer to temp buffer containing s501 data frame
+* width: picture width
+* height: picture height
+*/
+void s508_to_yuyv(BYTE *framebuffer, BYTE *tmpbuffer, int width, int height)
+{
+ BYTE *U, *V, *Y0, *Y1;
+ BYTE *line2;
+ int h, w;
+
+ Y0 = tmpbuffer; /*fisrt line*/
+ for (h = 0; h < height/2; h++ )
+ {
+ line2 = framebuffer + width * 2; /* next line */
+ U = Y0 + width;
+ V = U + width/2;
+ Y1= V + width/2;
+ for (w = width / 2; --w >= 0; )
+ {
+ *framebuffer++ = 0x80 + *Y0++;
+ *framebuffer++ = 0x80 + *U;
+ *framebuffer++ = 0x80 + *Y0++;
+ *framebuffer++ = 0x80 + *V;
+
+ *line2++ = 0x80 + *Y1++;
+ *line2++ = 0x80 + *U++;
+ *line2++ = 0x80 + *Y1++;
+ *line2++ = 0x80 + *V++;
+ }
+ Y0 += width * 2; /* next block of lines */
+ framebuffer = line2;
+ }
+}
+
+// raw bayer functions
+// from libv4l bayer.c, (C) 2008 Hans de Goede <j.w.r.degoede@hhs.nl>
+//Note: original bayer_to_bgr24 code from :
+// 1394-Based Digital Camera Control Library
+//
+// Bayer pattern decoding functions
+//
+// Written by Damien Douxchamps and Frederic Devernay
+static void convert_border_bayer_line_to_bgr24( BYTE* bayer, BYTE* adjacent_bayer,
+ BYTE *bgr, int width, bool start_with_green, bool blue_line)
+{
+ int t0, t1;
+
+ if (start_with_green)
+ {
+ /* First pixel */
+ if (blue_line)
+ {
+ *bgr++ = bayer[1];
+ *bgr++ = bayer[0];
+ *bgr++ = adjacent_bayer[0];
+ }
+ else
+ {
+ *bgr++ = adjacent_bayer[0];
+ *bgr++ = bayer[0];
+ *bgr++ = bayer[1];
+ }
+ /* Second pixel */
+ t0 = (bayer[0] + bayer[2] + adjacent_bayer[1] + 1) / 3;
+ t1 = (adjacent_bayer[0] + adjacent_bayer[2] + 1) >> 1;
+ if (blue_line)
+ {
+ *bgr++ = bayer[1];
+ *bgr++ = t0;
+ *bgr++ = t1;
+ }
+ else
+ {
+ *bgr++ = t1;
+ *bgr++ = t0;
+ *bgr++ = bayer[1];
+ }
+ bayer++;
+ adjacent_bayer++;
+ width -= 2;
+ }
+ else
+ {
+ /* First pixel */
+ t0 = (bayer[1] + adjacent_bayer[0] + 1) >> 1;
+ if (blue_line)
+ {
+ *bgr++ = bayer[0];
+ *bgr++ = t0;
+ *bgr++ = adjacent_bayer[1];
+ }
+ else
+ {
+ *bgr++ = adjacent_bayer[1];
+ *bgr++ = t0;
+ *bgr++ = bayer[0];
+ }
+ width--;
+ }
+
+ if (blue_line)
+ {
+ for ( ; width > 2; width -= 2)
+ {
+ t0 = (bayer[0] + bayer[2] + 1) >> 1;
+ *bgr++ = t0;
+ *bgr++ = bayer[1];
+ *bgr++ = adjacent_bayer[1];
+ bayer++;
+ adjacent_bayer++;
+
+ t0 = (bayer[0] + bayer[2] + adjacent_bayer[1] + 1) / 3;
+ t1 = (adjacent_bayer[0] + adjacent_bayer[2] + 1) >> 1;
+ *bgr++ = bayer[1];
+ *bgr++ = t0;
+ *bgr++ = t1;
+ bayer++;
+ adjacent_bayer++;
+ }
+ }
+ else
+ {
+ for ( ; width > 2; width -= 2)
+ {
+ t0 = (bayer[0] + bayer[2] + 1) >> 1;
+ *bgr++ = adjacent_bayer[1];
+ *bgr++ = bayer[1];
+ *bgr++ = t0;
+ bayer++;
+ adjacent_bayer++;
+
+ t0 = (bayer[0] + bayer[2] + adjacent_bayer[1] + 1) / 3;
+ t1 = (adjacent_bayer[0] + adjacent_bayer[2] + 1) >> 1;
+ *bgr++ = t1;
+ *bgr++ = t0;
+ *bgr++ = bayer[1];
+ bayer++;
+ adjacent_bayer++;
+ }
+ }
+
+ if (width == 2)
+ {
+ /* Second to last pixel */
+ t0 = (bayer[0] + bayer[2] + 1) >> 1;
+ if (blue_line)
+ {
+ *bgr++ = t0;
+ *bgr++ = bayer[1];
+ *bgr++ = adjacent_bayer[1];
+ }
+ else
+ {
+ *bgr++ = adjacent_bayer[1];
+ *bgr++ = bayer[1];
+ *bgr++ = t0;
+ }
+ /* Last pixel */
+ t0 = (bayer[1] + adjacent_bayer[2] + 1) >> 1;
+ if (blue_line)
+ {
+ *bgr++ = bayer[2];
+ *bgr++ = t0;
+ *bgr++ = adjacent_bayer[1];
+ }
+ else
+ {
+ *bgr++ = adjacent_bayer[1];
+ *bgr++ = t0;
+ *bgr++ = bayer[2];
+ }
+ }
+ else
+ {
+ /* Last pixel */
+ if (blue_line)
+ {
+ *bgr++ = bayer[0];
+ *bgr++ = bayer[1];
+ *bgr++ = adjacent_bayer[1];
+ }
+ else
+ {
+ *bgr++ = adjacent_bayer[1];
+ *bgr++ = bayer[1];
+ *bgr++ = bayer[0];
+ }
+ }
+}
+
+/* From libdc1394, which on turn was based on OpenCV's Bayer decoding */
+static void bayer_to_rgbbgr24(BYTE *bayer,
+ BYTE *bgr, int width, int height,
+ bool start_with_green, bool blue_line)
+{
+ /* render the first line */
+ convert_border_bayer_line_to_bgr24(bayer, bayer + width, bgr, width,
+ start_with_green, blue_line);
+ bgr += width * 3;
+
+ /* reduce height by 2 because of the special case top/bottom line */
+ for (height -= 2; height; height--)
+ {
+ int t0, t1;
+ /* (width - 2) because of the border */
+ BYTE *bayerEnd = bayer + (width - 2);
+
+ if (start_with_green)
+ {
+ /* OpenCV has a bug in the next line, which was
+ t0 = (bayer[0] + bayer[width * 2] + 1) >> 1; */
+ t0 = (bayer[1] + bayer[width * 2 + 1] + 1) >> 1;
+ /* Write first pixel */
+ t1 = (bayer[0] + bayer[width * 2] + bayer[width + 1] + 1) / 3;
+ if (blue_line)
+ {
+ *bgr++ = t0;
+ *bgr++ = t1;
+ *bgr++ = bayer[width];
+ }
+ else
+ {
+ *bgr++ = bayer[width];
+ *bgr++ = t1;
+ *bgr++ = t0;
+ }
+
+ /* Write second pixel */
+ t1 = (bayer[width] + bayer[width + 2] + 1) >> 1;
+ if (blue_line)
+ {
+ *bgr++ = t0;
+ *bgr++ = bayer[width + 1];
+ *bgr++ = t1;
+ }
+ else
+ {
+ *bgr++ = t1;
+ *bgr++ = bayer[width + 1];
+ *bgr++ = t0;
+ }
+ bayer++;
+ }
+ else
+ {
+ /* Write first pixel */
+ t0 = (bayer[0] + bayer[width * 2] + 1) >> 1;
+ if (blue_line)
+ {
+ *bgr++ = t0;
+ *bgr++ = bayer[width];
+ *bgr++ = bayer[width + 1];
+ }
+ else
+ {
+ *bgr++ = bayer[width + 1];
+ *bgr++ = bayer[width];
+ *bgr++ = t0;
+ }
+ }
+
+ if (blue_line)
+ {
+ for (; bayer <= bayerEnd - 2; bayer += 2)
+ {
+ t0 = (bayer[0] + bayer[2] + bayer[width * 2] +
+ bayer[width * 2 + 2] + 2) >> 2;
+ t1 = (bayer[1] + bayer[width] +
+ bayer[width + 2] + bayer[width * 2 + 1] +
+ 2) >> 2;
+ *bgr++ = t0;
+ *bgr++ = t1;
+ *bgr++ = bayer[width + 1];
+
+ t0 = (bayer[2] + bayer[width * 2 + 2] + 1) >> 1;
+ t1 = (bayer[width + 1] + bayer[width + 3] +
+ 1) >> 1;
+ *bgr++ = t0;
+ *bgr++ = bayer[width + 2];
+ *bgr++ = t1;
+ }
+ }
+ else
+ {
+ for (; bayer <= bayerEnd - 2; bayer += 2)
+ {
+ t0 = (bayer[0] + bayer[2] + bayer[width * 2] +
+ bayer[width * 2 + 2] + 2) >> 2;
+ t1 = (bayer[1] + bayer[width] +
+ bayer[width + 2] + bayer[width * 2 + 1] +
+ 2) >> 2;
+ *bgr++ = bayer[width + 1];
+ *bgr++ = t1;
+ *bgr++ = t0;
+
+ t0 = (bayer[2] + bayer[width * 2 + 2] + 1) >> 1;
+ t1 = (bayer[width + 1] + bayer[width + 3] +
+ 1) >> 1;
+ *bgr++ = t1;
+ *bgr++ = bayer[width + 2];
+ *bgr++ = t0;
+ }
+ }
+
+ if (bayer < bayerEnd)
+ {
+ /* write second to last pixel */
+ t0 = (bayer[0] + bayer[2] + bayer[width * 2] +
+ bayer[width * 2 + 2] + 2) >> 2;
+ t1 = (bayer[1] + bayer[width] +
+ bayer[width + 2] + bayer[width * 2 + 1] +
+ 2) >> 2;
+ if (blue_line)
+ {
+ *bgr++ = t0;
+ *bgr++ = t1;
+ *bgr++ = bayer[width + 1];
+ }
+ else
+ {
+ *bgr++ = bayer[width + 1];
+ *bgr++ = t1;
+ *bgr++ = t0;
+ }
+ /* write last pixel */
+ t0 = (bayer[2] + bayer[width * 2 + 2] + 1) >> 1;
+ if (blue_line)
+ {
+ *bgr++ = t0;
+ *bgr++ = bayer[width + 2];
+ *bgr++ = bayer[width + 1];
+ }
+ else
+ {
+ *bgr++ = bayer[width + 1];
+ *bgr++ = bayer[width + 2];
+ *bgr++ = t0;
+ }
+ bayer++;
+ }
+ else
+ {
+ /* write last pixel */
+ t0 = (bayer[0] + bayer[width * 2] + 1) >> 1;
+ t1 = (bayer[1] + bayer[width * 2 + 1] + bayer[width] + 1) / 3;
+ if (blue_line)
+ {
+ *bgr++ = t0;
+ *bgr++ = t1;
+ *bgr++ = bayer[width + 1];
+ }
+ else
+ {
+ *bgr++ = bayer[width + 1];
+ *bgr++ = t1;
+ *bgr++ = t0;
+ }
+ }
+
+ /* skip 2 border pixels */
+ bayer += 2;
+
+ blue_line = !blue_line;
+ start_with_green = !start_with_green;
+ }
+
+ /* render the last line */
+ convert_border_bayer_line_to_bgr24(bayer + width, bayer, bgr, width,
+ !start_with_green, !blue_line);
+}
+
+/*convert bayer raw data to rgb24
+* args:
+* pBay: pointer to buffer containing Raw bayer data data
+* pRGB24: pointer to buffer containing rgb24 data
+* width: picture width
+* height: picture height
+* pix_order: bayer pixel order (0=gb/rg 1=gr/bg 2=bg/gr 3=rg/bg)
+*/
+void
+bayer_to_rgb24(BYTE *pBay, BYTE *pRGB24, int width, int height, int pix_order)
+{
+ switch (pix_order)
+ {
+ //conversion functions are build for bgr, by switching b and r lines we get rgb
+ case 0: /* gbgbgb... | rgrgrg... (V4L2_PIX_FMT_SGBRG8)*/
+ bayer_to_rgbbgr24(pBay, pRGB24, width, height, TRUE, FALSE);
+ break;
+
+ case 1: /* grgrgr... | bgbgbg... (V4L2_PIX_FMT_SGRBG8)*/
+ bayer_to_rgbbgr24(pBay, pRGB24, width, height, TRUE, TRUE);
+ break;
+
+ case 2: /* bgbgbg... | grgrgr... (V4L2_PIX_FMT_SBGGR8)*/
+ bayer_to_rgbbgr24(pBay, pRGB24, width, height, FALSE, FALSE);
+ break;
+
+ case 3: /* rgrgrg... ! gbgbgb... (V4L2_PIX_FMT_SRGGB8)*/
+ bayer_to_rgbbgr24(pBay, pRGB24, width, height, FALSE, TRUE);
+ break;
+
+ default: /* default is 0*/
+ bayer_to_rgbbgr24(pBay, pRGB24, width, height, TRUE, FALSE);
+ break;
+ }
+}
+
+
+void
+rgb2yuyv(BYTE *prgb, BYTE *pyuv, int width, int height)
+{
+
+ int i=0;
+ for(i=0;i<(width*height*3);i=i+6)
+ {
+ /* y */
+ *pyuv++ =CLIP(0.299 * (prgb[i] - 128) + 0.587 * (prgb[i+1] - 128) + 0.114 * (prgb[i+2] - 128) + 128);
+ /* u */
+ *pyuv++ =CLIP(((- 0.147 * (prgb[i] - 128) - 0.289 * (prgb[i+1] - 128) + 0.436 * (prgb[i+2] - 128) + 128) +
+ (- 0.147 * (prgb[i+3] - 128) - 0.289 * (prgb[i+4] - 128) + 0.436 * (prgb[i+5] - 128) + 128))/2);
+ /* y1 */
+ *pyuv++ =CLIP(0.299 * (prgb[i+3] - 128) + 0.587 * (prgb[i+4] - 128) + 0.114 * (prgb[i+5] - 128) + 128);
+ /* v*/
+ *pyuv++ =CLIP(((0.615 * (prgb[i] - 128) - 0.515 * (prgb[i+1] - 128) - 0.100 * (prgb[i+2] - 128) + 128) +
+ (0.615 * (prgb[i+3] - 128) - 0.515 * (prgb[i+4] - 128) - 0.100 * (prgb[i+5] - 128) + 128))/2);
+ }
+}
+
+void
+bgr2yuyv(BYTE *pbgr, BYTE *pyuv, int width, int height)
+{
+
+ int i=0;
+ for(i=0;i<(width*height*3);i=i+6)
+ {
+ /* y */
+ *pyuv++ =CLIP(0.299 * (pbgr[i+2] - 128) + 0.587 * (pbgr[i+1] - 128) + 0.114 * (pbgr[i] - 128) + 128);
+ /* u */
+ *pyuv++ =CLIP(((- 0.147 * (pbgr[i+2] - 128) - 0.289 * (pbgr[i+1] - 128) + 0.436 * (pbgr[i] - 128) + 128) +
+ (- 0.147 * (pbgr[i+5] - 128) - 0.289 * (pbgr[i+4] - 128) + 0.436 * (pbgr[i+3] - 128) + 128))/2);
+ /* y1 */
+ *pyuv++ =CLIP(0.299 * (pbgr[i+5] - 128) + 0.587 * (pbgr[i+4] - 128) + 0.114 * (pbgr[i+3] - 128) + 128);
+ /* v*/
+ *pyuv++ =CLIP(((0.615 * (pbgr[i+2] - 128) - 0.515 * (pbgr[i+1] - 128) - 0.100 * (pbgr[i] - 128) + 128) +
+ (0.615 * (pbgr[i+5] - 128) - 0.515 * (pbgr[i+4] - 128) - 0.100 * (pbgr[i+3] - 128) + 128))/2);
+ }
+}
+
+/*use in utils.c for jpeg decoding 420 planar to 422
+* args:
+* out: pointer to data output of idct (macroblocks yyyy u v)
+* pic: pointer to picture buffer (yuyv)
+* width: picture width
+*/
+void yuv420pto422(int * out,unsigned char *pic,int width)
+{
+ int j, k;
+ unsigned char *pic0, *pic1;
+ int *outy, *outu, *outv;
+ int outy1 = 0;
+ int outy2 = 8;
+
+ //yyyyuv
+ pic0 = pic;
+ pic1 = pic + width;
+ outy = out;
+ outu = out + 64 * 4;
+ outv = out + 64 * 5;
+ for (j = 0; j < 8; j++)
+ {
+ for (k = 0; k < 8; k++)
+ {
+ if( k == 4)
+ {
+ outy1 += 56;
+ outy2 += 56;
+ }
+ *pic0++ = CLIP(outy[outy1]); //y1 line 1
+ *pic0++ = CLIP(128 + *outu); //u line 1-2
+ *pic0++ = CLIP(outy[outy1+1]); //y2 line 1
+ *pic0++ = CLIP(128 + *outv); //v line 1-2
+ *pic1++ = CLIP(outy[outy2]); //y1 line 2
+ *pic1++ = CLIP(128 + *outu); //u line 1-2
+ *pic1++ = CLIP(outy[outy2+1]); //y2 line 2
+ *pic1++ = CLIP(128 + *outv); //v line 1-2
+ outy1 +=2; outy2 += 2; outu++; outv++;
+ }
+ if(j==3)
+ {
+ outy = out + 128;
+ }
+ else
+ {
+ outy += 16;
+ }
+ outy1 = 0;
+ outy2 = 8;
+ pic0 += 2 * (width -16);
+ pic1 += 2 * (width -16);
+ }
+}
+
+/*use in utils.c for jpeg decoding 422 planar to 422
+* args:
+* out: pointer to data output of idct (macroblocks yyyy u v)
+* pic: pointer to picture buffer (yuyv)
+* width: picture width
+*/
+void yuv422pto422(int * out,unsigned char *pic,int width)
+{
+ int j, k;
+ unsigned char *pic0, *pic1;
+ int *outy, *outu, *outv;
+ int outy1 = 0;
+ int outy2 = 8;
+ int outu1 = 0;
+ int outv1 = 0;
+
+ //yyyyuv
+ pic0 = pic;
+ pic1 = pic + width;
+ outy = out;
+ outu = out + 64 * 4;
+ outv = out + 64 * 5;
+ for (j = 0; j < 4; j++)
+ {
+ for (k = 0; k < 8; k++)
+ {
+ if( k == 4)
+ {
+ outy1 += 56;
+ outy2 += 56;
+ }
+ *pic0++ = CLIP(outy[outy1]); //y1 line 1
+ *pic0++ = CLIP(128 + outu[outu1]); //u line 1
+ *pic0++ = CLIP(outy[outy1+1]); //y2 line 1
+ *pic0++ = CLIP(128 + outv[outv1]); //v line 1
+ *pic1++ = CLIP(outy[outy2]); //y1 line 2
+ *pic1++ = CLIP(128 + outu[outu1+8]);//u line 2
+ *pic1++ = CLIP(outy[outy2+1]); //y2 line 2
+ *pic1++ = CLIP(128 + outv[outv1+8]);//v line 2
+ outv1 += 1; outu1 += 1;
+ outy1 +=2; outy2 +=2;
+ }
+ outy += 16;outu +=8; outv +=8;
+ outv1 = 0; outu1=0;
+ outy1 = 0;
+ outy2 = 8;
+ pic0 += 2 * (width -16);
+ pic1 += 2 * (width -16);
+ }
+}
+#if 1
+/*use in utils.c for jpeg decoding 422 planar to 420
+* args:
+* out: pointer to data output of idct (macroblocks yyyy u v)
+* pic: pointer to picture buffer (yuyv)
+* width: picture width
+*/
+void yuv422pto420(int * out,unsigned char *pic,int width, unsigned char *uv)
+{
+ int j, k;
+ unsigned char *pic0, *pic1;
+ int *outy, *outu, *outv;
+ int *outy1 ;
+ int *outy2 ;
+ int *outu1 ;
+ int *outv1 ;
+
+ //yyyyuv
+ pic0 = pic;
+ pic1 = pic + width;
+ outy = out;
+ outu = out + 64 * 4;
+ outv = out + 64 * 5;
+
+ for (j = 0; j < 4; j++)
+ {
+ outy1 = outy;
+ outy2 = outy+8;
+ outv1 = outv;
+ outu1 = outu;
+
+ for (k = 0; k < 2; k++)
+ {
+ asm volatile(
+ "mov r0,#0 \n\t"
+ "vdup.u32 d30, r0 \n\t"
+ "mov r0,#255 \n\t"
+ "vdup.u32 d31, r0 \n\t"
+
+ /////////////////////////////line1
+ "mov r0, #256 @256=64*4\n\t"
+ "vld4.32 {d26,d27,d28,d29}, [%[outy1]], r0 \n\t"
+ "vmax.s32 d26, d26, d30 \n\t"
+ "vmin.s32 d26, d26, d31 \n\t"
+ "vmax.s32 d27, d27, d30 \n\t"
+ "vmin.s32 d27, d27, d31 \n\t"
+ "vmax.s32 d28, d28, d30 \n\t"
+ "vmin.s32 d28, d28, d31 \n\t"
+ "vmax.s32 d29, d29, d30 \n\t"
+ "vmin.s32 d29, d29, d31 \n\t"
+ "vst4.8 {d26[0],d27[0],d28[0],d29[0]}, [%[pic0]]! \n\t"
+ "vst4.8 {d26[4],d27[4],d28[4],d29[4]}, [%[pic0]]! \n\t"
+
+ /////////////////////////////line2
+ "vld4.32 {d26,d27,d28,d29}, [%[outy2]],r0 \n\t"
+ "vmax.s32 d26, d26, d30 \n\t"
+ "vmin.s32 d26, d26, d31 \n\t"
+ "vmax.s32 d27, d27, d30 \n\t"
+ "vmin.s32 d27, d27, d31 \n\t"
+ "vmax.s32 d28, d28, d30 \n\t"
+ "vmin.s32 d28, d28, d31 \n\t"
+ "vmax.s32 d29, d29, d30 \n\t"
+ "vmin.s32 d29, d29, d31 \n\t"
+ "vst4.8 {d26[0],d27[0],d28[0],d29[0]}, [%[pic1]]! \n\t"
+ "vst4.8 {d26[4],d27[4],d28[4],d29[4]}, [%[pic1]]! \n\t"
+
+ //////////////////////////////uv
+ "mov r0, #16 @16=4*4 \n\t"
+ "vld4.32 {d22,d24,d26,d28}, [%[outv1]], r0 \n\t"
+ "vld4.32 {d23,d25,d27,d29}, [%[outu1]], r0 \n\t"
+
+ "mov r0, #128 \n\t"
+ "vdup.u32 d30, r0 \n\t"
+ "vqadd.s32 d22, d22, d30 \n\t"
+ "vqadd.s32 d23, d23, d30 \n\t"
+ "vqadd.s32 d24, d24, d30 \n\t"
+ "vqadd.s32 d25, d25, d30 \n\t"
+ "vqadd.s32 d26, d26, d30 \n\t"
+ "vqadd.s32 d27, d27, d30 \n\t"
+ "vqadd.s32 d28, d28, d30 \n\t"
+ "vqadd.s32 d29, d29, d30 \n\t"
+
+ "mov r0, #0 \n\t"
+ "vdup.u32 d30, r0 \n\t"
+
+ "vmax.s32 d22, d22, d30 \n\t"
+ "vmin.s32 d22, d22, d31 \n\t"
+ "vmax.s32 d24, d24, d30 \n\t"
+ "vmin.s32 d24, d24, d31 \n\t"
+ "vmax.s32 d26, d26, d30 \n\t"
+ "vmin.s32 d26, d26, d31 \n\t"
+ "vmax.s32 d28, d28, d30 \n\t"
+ "vmin.s32 d28, d28, d31 \n\t"
+
+ "vmax.s32 d23, d23, d30 \n\t"
+ "vmin.s32 d23, d23, d31 \n\t"
+ "vmax.s32 d25, d25, d30 \n\t"
+ "vmin.s32 d25, d25, d31 \n\t"
+ "vmax.s32 d27, d27, d30 \n\t"
+ "vmin.s32 d27, d27, d31 \n\t"
+ "vmax.s32 d29, d29, d30 \n\t"
+ "vmin.s32 d29, d29, d31 \n\t"
+
+ "vst4.8 {d22[0],d23[0],d24[0],d25[0]}, [%[uv]]! \n\t"
+ "vst4.8 {d26[0],d27[0],d28[0],d29[0]}, [%[uv]]! \n\t"
+//////////////////////////////
+
+ "4:@end \n\t"
+ : [outy1] "+r" (outy1), [outy2] "+r" (outy2),
+ [pic0] "+r" (pic0), [pic1] "+r" (pic1),
+ [outu1] "+r" (outu1), [outv1] "+r" (outv1),
+ [uv] "+r" (uv)
+ : [width] "r" (width)
+ : "cc", "memory", "r0","r1", "r2", "r4", "q0", "q1"
+ );
+ }
+ outy += 16;outu +=8; outv +=8;
+ pic0 += 2 * (width - 8);
+ pic1 += 2 * (width - 8);
+ uv += width - 16;
+ }
+}
+#else
+void yuv422pto420(int * out,unsigned char *pic,int width, unsigned char *uv)
+{
+ int j, k;
+ unsigned char *pic0, *pic1;
+ int *outy, *outu, *outv;
+ int outy1 = 0;
+ int outy2 = 8;
+ int outu1 = 0;
+ int outv1 = 0;
+
+ //yyyyuv
+ pic0 = pic;
+ pic1 = pic + width;
+ outy = out;
+ outu = out + 64 * 4;
+ outv = out + 64 * 5;
+ for (j = 0; j < 4; j++)
+ {
+ for (k = 0; k < 8; k++)
+ {
+ if( k == 4)
+ {
+ outy1 += 56;
+ outy2 += 56;
+ }
+ *pic0++ = CLIP(outy[outy1]); //y1 line 1
+ *pic0++ = CLIP(outy[outy1+1]); //y2 line 1
+ *pic1++ = CLIP(outy[outy2]); //y1 line 2
+ *pic1++ = CLIP(outy[outy2+1]); //y2 line 2
+
+ *uv++ = CLIP(128 + outv[outv1]); //v line 1
+ *uv++ = CLIP(128 + outu[outu1]); //u line 1
+ outv1 += 1; outu1 += 1;
+ outy1 +=2; outy2 +=2;
+ }
+ outy += 16;outu +=8; outv +=8;
+ outv1 = 0; outu1=0;
+ outy1 = 0;
+ outy2 = 8;
+ pic0 += 2 * (width - 8);
+ pic1 += 2 * (width - 8);
+ uv += width - 16;
+ }
+
+}
+#endif
+/*use in utils.c for jpeg decoding 444 planar to 422
+* args:
+* out: pointer to data output of idct (macroblocks yyyy u v)
+* pic: pointer to picture buffer (yuyv)
+* width: picture width
+*/
+void yuv444pto422(int * out,unsigned char *pic,int width)
+{
+ int j, k;
+ unsigned char *pic0, *pic1;
+ int *outy, *outu, *outv;
+ int outy1 = 0;
+ int outy2 = 8;
+ int outu1 = 0;
+ int outv1 = 0;
+
+ //yyyyuv
+ pic0 = pic;
+ pic1 = pic + width;
+ outy = out;
+ outu = out + 64 * 4; // Ooops where did i invert ??
+ outv = out + 64 * 5;
+ for (j = 0; j < 4; j++)
+ {
+ for (k = 0; k < 4; k++)
+ {
+ *pic0++ =CLIP( outy[outy1]); //y1 line 1
+ *pic0++ =CLIP( 128 + outu[outu1]); //u line 1
+ *pic0++ =CLIP( outy[outy1+1]); //y2 line 1
+ *pic0++ =CLIP( 128 + outv[outv1]); //v line 1
+ *pic1++ =CLIP( outy[outy2]); //y1 line 2
+ *pic1++ =CLIP( 128 + outu[outu1+8]);//u line 2
+ *pic1++ =CLIP( outy[outy2+1]); //y2 line 2
+ *pic1++ =CLIP( 128 + outv[outv1+8]);//v line 2
+ outv1 += 2; outu1 += 2;
+ outy1 +=2; outy2 +=2;
+ }
+ outy += 16;outu +=16; outv +=16;
+ outv1 = 0; outu1=0;
+ outy1 = 0;
+ outy2 = 8;
+ pic0 += 2 * (width -8);
+ pic1 += 2 * (width -8);
+ }
+}
+
+/*use in utils.c for jpeg decoding 400 planar to 422
+* args:
+* out: pointer to data output of idct (macroblocks yyyy )
+* pic: pointer to picture buffer (yuyv)
+* width: picture width
+*/
+void yuv400pto422(int * out,unsigned char *pic,int width)
+{
+ int j, k;
+ unsigned char *pic0, *pic1;
+ int *outy ;
+ int outy1 = 0;
+ int outy2 = 8;
+ pic0 = pic;
+ pic1 = pic + width;
+ outy = out;
+
+ //yyyy
+ for (j = 0; j < 4; j++)
+ {
+ for (k = 0; k < 4; k++)
+ {
+ *pic0++ = CLIP(outy[outy1]); //y1 line 1
+ *pic0++ = 128 ; //u
+ *pic0++ = CLIP(outy[outy1+1]);//y2 line 1
+ *pic0++ = 128 ; //v
+ *pic1++ = CLIP(outy[outy2]); //y1 line 2
+ *pic1++ = 128 ; //u
+ *pic1++ = CLIP(outy[outy2+1]);//y2 line 2
+ *pic1++ = 128 ; //v
+ outy1 +=2; outy2 +=2;
+ }
+ outy += 16;
+ outy1 = 0;
+ outy2 = 8;
+ pic0 += 2 * (width -8);
+ pic1 += 2 * (width -8);
+ }
+}
+
diff --git a/mjpeg/jpegdec.c b/mjpeg/jpegdec.c
new file mode 100755
index 0000000..cfecaf4
--- a/dev/null
+++ b/mjpeg/jpegdec.c
@@ -0,0 +1,1003 @@
+/*******************************************************************************#
+# guvcview http://guvcview.sourceforge.net #
+# #
+# Paulo Assis <pj.assis@gmail.com> #
+# Nobuhiro Iwamatsu <iwamatsu@nigauri.org> #
+# Add UYVY color support(Macbook iSight) #
+# #
+# This program is free software; you can redistribute it and/or modify #
+# it under the terms of the GNU General Public License as published by #
+# the Free Software Foundation; either version 2 of the License, or #
+# (at your option) any later version. #
+# #
+# This program is distributed in the hope that it will be useful, #
+# but WITHOUT ANY WARRANTY; without even the implied warranty of #
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
+# GNU General Public License for more details. #
+# #
+# You should have received a copy of the GNU General Public License #
+# along with this program; if not, write to the Free Software #
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA #
+# #
+********************************************************************************/
+
+/*******************************************************************************#
+# #
+# MJpeg decoding and frame capture taken from luvcview #
+# #
+# #
+********************************************************************************/
+
+/* support for internationalization - i18n */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CameraHAL_MJPEGDecode"
+//reinclude because of a bug with the log macros
+#include <utils/Log.h>
+#include "DebugUtils.h"
+#include "jutils.h"
+#include "huffman.h"
+#include "colorspaces.h"
+#include <linux/videodev.h>
+
+/*********************************/
+
+static int huffman_init(void);
+
+static void decode_mcus
+ __P((struct in *, int *, int, struct scan *, int *));
+
+static int dec_readmarker __P((struct in *));
+
+static void dec_makehuff
+ __P((struct dec_hufftbl *, int *, BYTE *));
+
+static void setinput __P((struct in *, BYTE *));
+/*********************************/
+
+#undef PREC
+#define PREC int
+
+static void idctqtab __P((BYTE *, PREC *));
+
+inline static void idct(int *in, int *out, int *quant, long off, int max);
+
+/*********************************/
+//static void col221111 __P((int *, unsigned char *, int));
+
+typedef void (*ftopict) (int * out, BYTE *pic, int width) ;
+
+/*********************************/
+static BYTE *datap;
+
+static int getbyte(void)
+{
+ return *datap++;
+}
+
+static int getword(void)
+{
+ int c1, c2;
+ c1 = *datap++;
+ c2 = *datap++;
+ return c1 << 8 | c2;
+}
+
+struct comp
+{
+ int cid;
+ int hv;
+ int tq;
+};
+
+#define MAXCOMP 4
+struct jpginfo
+{
+ int nc; /* number of components */
+ int ns; /* number of scans */
+ int dri; /* restart interval */
+ int nm; /* mcus til next marker */
+ int rm; /* next restart marker */
+};
+
+static struct jpginfo info;
+static struct comp comps[MAXCOMP];
+
+static struct scan dscans[MAXCOMP];
+
+static unsigned char quant[4][64];
+
+static struct dec_hufftbl dhuff[4];
+
+#define dec_huffdc (dhuff + 0)
+#define dec_huffac (dhuff + 2)
+
+static struct in in;
+
+/*read jpeg tables (huffman and quantization)
+* args:
+* till: Marker (frame - SOF0 scan - SOS)
+* isDHT: flag indicating the presence of huffman tables (if 0 must use default ones - MJPG frame)
+*/
+static int readtables(int till, int *isDHT)
+{
+ int m, l, i, j, lq, pq, tq;
+ int tc, th, tt;
+
+ for (;;)
+ {
+ if (getbyte() != 0xff)
+ return -1;
+ if ((m = getbyte()) == till)
+ break;
+
+ switch (m)
+ {
+ case 0xc2:
+ return 0;
+ /*read quantization tables (Lqt and Cqt)*/
+ case M_DQT:
+ lq = getword();
+ while (lq > 2)
+ {
+ pq = getbyte();
+ /*Lqt=0x00 Cqt=0x01*/
+ tq = pq & 15;
+ if (tq > 3)
+ return -1;
+ pq >>= 4;
+ if (pq != 0)
+ return -1;
+ for (i = 0; i < 64; i++)
+ quant[tq][i] = getbyte();
+ lq -= 64 + 1;
+ }
+ break;
+ /*read huffman table*/
+ case M_DHT:
+ l = getword();
+ while (l > 2)
+ {
+ int hufflen[16], k;
+ BYTE huffvals[256];
+
+ tc = getbyte();
+ th = tc & 15;
+ tc >>= 4;
+ tt = tc * 2 + th;
+ if (tc > 1 || th > 1)
+ return -1;
+
+ for (i = 0; i < 16; i++)
+ hufflen[i] = getbyte();
+ l -= 1 + 16;
+ k = 0;
+ for (i = 0; i < 16; i++)
+ {
+ for (j = 0; j < hufflen[i]; j++)
+ huffvals[k++] = getbyte();
+ l -= hufflen[i];
+ }
+ dec_makehuff(dhuff + tt, hufflen, huffvals);
+ }
+ /* has huffman tables defined (JPEG)*/
+ *isDHT= 1;
+ break;
+ /*restart interval*/
+ case M_DRI:
+ l = getword();
+ info.dri = getword();
+ break;
+
+ default:
+ l = getword();
+ while (l-- > 2)
+ getbyte();
+ break;
+ }
+ }
+ return 0;
+}
+
+static void dec_initscans(void)
+{
+ int i;
+
+ info.nm = info.dri + 1;
+ info.rm = M_RST0;
+ for (i = 0; i < info.ns; i++)
+ dscans[i].dc = 0;
+}
+
+static int dec_checkmarker(void)
+{
+ int i;
+
+ if (dec_readmarker(&in) != info.rm)
+ return -1;
+ info.nm = info.dri;
+ info.rm = (info.rm + 1) & ~0x08;
+ for (i = 0; i < info.ns; i++)
+ dscans[i].dc = 0;
+ return 0;
+}
+
+/*jpeg decode
+* args:
+* pic: pointer to picture data ( decoded image - yuyv format)
+* buf: pointer to input data ( compressed jpeg )
+* with: picture width
+* height: picture height
+*/
+int jpeg_decode(BYTE **pic, BYTE *buf, int width, int height,unsigned int outformat)
+{
+ struct jpeg_decdata *decdata;
+ int i=0, j=0, m=0, tac=0, tdc=0;
+ int intwidth=0, intheight=0;
+ int mcusx=0, mcusy=0, mx=0, my=0;
+ int ypitch=0 ,xpitch=0,bpp=0,pitch=0,x=0,y=0;
+ int mb=0;
+ int max[6];
+ ftopict convert;
+ int err = 0;
+ int isInitHuffman = 0;
+ decdata = (struct jpeg_decdata *)malloc(sizeof(struct jpeg_decdata));
+
+ for(i=0;i<6;i++)
+ max[i]=0;
+
+ if (!decdata)
+ {
+ err = -1;
+ goto error;
+ }
+ if ((buf == NULL)||(*pic == NULL))
+ {
+ err = -1;
+ goto error;
+ }
+ datap = buf;
+ /*check SOI (0xFFD8)*/
+ if (getbyte() != 0xff)
+ {
+ err = ERR_NO_SOI;
+ goto error;
+ }
+ if (getbyte() != M_SOI)
+ {
+ err = ERR_NO_SOI;
+ goto error;
+ }
+ /*read tables - if exist, up to start frame marker (0xFFC0)*/
+ if (readtables(M_SOF0, &isInitHuffman))
+ {
+ err = ERR_BAD_TABLES;
+ goto error;
+ }
+ getword(); /*header lenght*/
+ i = getbyte(); /*precision (8 bit)*/
+ if (i != 8)
+ {
+ err = ERR_NOT_8BIT;
+ goto error;
+ }
+ intheight = getword(); /*height*/
+ intwidth = getword(); /*width */
+ if ((intheight & 7) || (intwidth & 7)) /*must be even*/
+ {
+ err = ERR_BAD_WIDTH_OR_HEIGHT;
+ goto error;
+ }
+ info.nc = getbyte(); /*number of components*/
+ if (info.nc > MAXCOMP)
+ {
+ err = ERR_TOO_MANY_COMPPS;
+ goto error;
+ }
+ /*for each component*/
+ for (i = 0; i < info.nc; i++)
+ {
+ int h, v;
+ comps[i].cid = getbyte(); /*component id*/
+ comps[i].hv = getbyte();
+ v = comps[i].hv & 15; /*vertical sampling */
+ h = comps[i].hv >> 4; /*horizontal sampling */
+ comps[i].tq = getbyte(); /*quantization table used*/
+ if (h > 3 || v > 3)
+ {
+ err = ERR_ILLEGAL_HV;
+ goto error;
+ }
+ if (comps[i].tq > 3)
+ {
+ err = ERR_QUANT_TABLE_SELECTOR;
+ goto error;
+ }
+ }
+ /*read tables - if exist, up to start of scan marker (0xFFDA)*/
+ if (readtables(M_SOS,&isInitHuffman))
+ {
+ err = ERR_BAD_TABLES;
+ goto error;
+ }
+ getword(); /* header lenght */
+ info.ns = getbyte(); /* number of scans */
+ if (!info.ns)
+ {
+ err = ERR_NOT_YCBCR_221111;
+ goto error;
+ }
+ /*for each scan*/
+ for (i = 0; i < info.ns; i++)
+ {
+ dscans[i].cid = getbyte(); /*component id*/
+ tdc = getbyte();
+ tac = tdc & 15; /*ac table*/
+ tdc >>= 4; /*dc table*/
+ if (tdc > 1 || tac > 1)
+ {
+ err = ERR_QUANT_TABLE_SELECTOR;
+ goto error;
+ }
+ for (j = 0; j < info.nc; j++)
+ if (comps[j].cid == dscans[i].cid)
+ break;
+ if (j == info.nc)
+ {
+ err = ERR_UNKNOWN_CID_IN_SCAN;
+ goto error;
+ }
+ dscans[i].hv = comps[j].hv;
+ dscans[i].tq = comps[j].tq;
+ dscans[i].hudc.dhuff = dec_huffdc + tdc;
+ dscans[i].huac.dhuff = dec_huffac + tac;
+ }
+
+ i = getbyte(); /*0 */
+ j = getbyte(); /*63*/
+ m = getbyte(); /*0 */
+
+ if (i != 0 || j != 63 || m != 0)
+ {
+ CAMHAL_LOGDA("hmm FW error,not seq DCT ??\n");
+ }
+
+ /*build huffman tables*/
+ if(!isInitHuffman)
+ {
+ if(huffman_init() < 0)
+ return -ERR_BAD_TABLES;
+ }
+ /*
+ if (dscans[0].cid != 1 || dscans[1].cid != 2 || dscans[2].cid != 3)
+ {
+ err = ERR_NOT_YCBCR_221111;
+ goto error;
+ }
+
+ if (dscans[1].hv != 0x11 || dscans[2].hv != 0x11)
+ {
+ err = ERR_NOT_YCBCR_221111;
+ goto error;
+ }
+ */
+ /* if internal width and external are not the same or heigth too
+ and pic not allocated realloc the good size and mark the change
+ need 1 macroblock line more ?? */
+ if (intwidth != width)
+ {
+ err = ERR_WIDTH_MISMATCH;
+ goto error;
+ }
+
+ if (intheight != height)
+ {
+ err = ERR_HEIGHT_MISMATCH;
+ goto error;
+ }
+
+ switch (dscans[0].hv)
+ {
+ case 0x22: // 411
+ mb=6;
+ mcusx = width >> 4;
+ mcusy = height >> 4;
+ bpp=2;
+ xpitch = 16 * bpp;
+ pitch = width * bpp; // YUYV out
+ ypitch = 16 * pitch;
+ convert = yuv420pto422; //choose the right conversion function
+ break;
+ case 0x21: //422
+ mb=4;
+ mcusx = width >> 4;
+ mcusy = height >> 3;
+ bpp=2;
+ xpitch = 16 * bpp;
+ pitch = width * bpp; // YUYV out
+ ypitch = 8 * pitch;
+ convert = yuv422pto422; //choose the right conversion function
+ break;
+ case 0x11: //444
+ mcusx = width >> 3;
+ mcusy = height >> 3;
+ bpp=2;
+ xpitch = 8 * bpp;
+ pitch = width * bpp; // YUYV out
+ ypitch = 8 * pitch;
+ if (info.ns==1)
+ {
+ mb = 1;
+ convert = yuv400pto422; //choose the right conversion function
+ }
+ else
+ {
+ mb=3;
+ convert = yuv444pto422; //choose the right conversion function
+ }
+ break;
+ default:
+ err = ERR_NOT_YCBCR_221111;
+ goto error;
+ break;
+ }
+
+ idctqtab(quant[dscans[0].tq], decdata->dquant[0]);
+ idctqtab(quant[dscans[1].tq], decdata->dquant[1]);
+ idctqtab(quant[dscans[2].tq], decdata->dquant[2]);
+ setinput(&in, datap);
+ dec_initscans();
+
+ dscans[0].next = 2;
+ dscans[1].next = 1;
+ dscans[2].next = 0; /* 4xx encoding */
+ for (my = 0,y=0; my < mcusy; my++,y+=ypitch)
+ {
+ for (mx = 0,x=0; mx < mcusx; mx++,x+=xpitch)
+ {
+ if (info.dri && !--info.nm){
+ if (dec_checkmarker())
+ {
+ err = ERR_WRONG_MARKER;
+ goto error;
+ }
+ }
+ switch (mb)
+ {
+ case 6:
+ decode_mcus(&in, decdata->dcts, mb, dscans, max);
+ idct(decdata->dcts, decdata->out, decdata->dquant[0],
+ IFIX(128.5), max[0]);
+ idct(decdata->dcts + 64, decdata->out + 64,
+ decdata->dquant[0], IFIX(128.5), max[1]);
+ idct(decdata->dcts + 128, decdata->out + 128,
+ decdata->dquant[0], IFIX(128.5), max[2]);
+ idct(decdata->dcts + 192, decdata->out + 192,
+ decdata->dquant[0], IFIX(128.5), max[3]);
+ idct(decdata->dcts + 256, decdata->out + 256,
+ decdata->dquant[1], IFIX(0.5), max[4]);
+ idct(decdata->dcts + 320, decdata->out + 320,
+ decdata->dquant[2], IFIX(0.5), max[5]);
+ break;
+
+ case 4:
+ decode_mcus(&in, decdata->dcts, mb, dscans, max);
+ idct(decdata->dcts, decdata->out, decdata->dquant[0],
+ IFIX(128.5), max[0]);
+ idct(decdata->dcts + 64, decdata->out + 64,
+ decdata->dquant[0], IFIX(128.5), max[1]);
+ idct(decdata->dcts + 128, decdata->out + 256,
+ decdata->dquant[1], IFIX(0.5), max[4]);
+ idct(decdata->dcts + 192, decdata->out + 320,
+ decdata->dquant[2], IFIX(0.5), max[5]);
+ break;
+
+ case 3:
+ decode_mcus(&in, decdata->dcts, mb, dscans, max);
+ idct(decdata->dcts, decdata->out, decdata->dquant[0],
+ IFIX(128.5), max[0]);
+ idct(decdata->dcts + 64, decdata->out + 256,
+ decdata->dquant[1], IFIX(0.5), max[4]);
+ idct(decdata->dcts + 128, decdata->out + 320,
+ decdata->dquant[2], IFIX(0.5), max[5]);
+ break;
+
+ case 1:
+ decode_mcus(&in, decdata->dcts, mb, dscans, max);
+ idct(decdata->dcts, decdata->out, decdata->dquant[0],
+ IFIX(128.5), max[0]);
+ break;
+ } // switch enc411
+ yuv422pto420(decdata->out,*pic+my*width*8+mx*16,width,*pic+width*height+my*width*8/2+mx*16); //convert to 420
+ }
+ }
+
+ m = dec_readmarker(&in);
+ if (m != M_EOI)
+ {
+ err = ERR_NO_EOI;
+ goto error;
+ }
+ free(decdata);
+ return 0;
+error:
+ CAMHAL_LOGDB("decode failed:%d",err);
+ free(decdata);
+ return err;
+}
+
+/****************************************************************/
+/************** huffman decoder ***************/
+/****************************************************************/
+static int huffman_init(void)
+{
+ int tc, th, tt;
+ unsigned char *ptr= (unsigned char *) JPEGHuffmanTable ;
+ int i, j, l;
+ l = JPG_HUFFMAN_TABLE_LENGTH ;
+ while (l > 0)
+ {
+ int hufflen[16], k;
+ unsigned char huffvals[256];
+
+ tc = *ptr++;
+ th = tc & 15;
+ tc >>= 4;
+ tt = tc * 2 + th;
+ if (tc > 1 || th > 1)
+ return -ERR_BAD_TABLES;
+ for (i = 0; i < 16; i++)
+ hufflen[i] = *ptr++;
+ l -= 1 + 16;
+ k = 0;
+ for (i = 0; i < 16; i++)
+ {
+ for (j = 0; j < hufflen[i]; j++)
+ huffvals[k++] = *ptr++;
+ l -= hufflen[i];
+ }
+ dec_makehuff(dhuff + tt, hufflen, huffvals);
+ }
+ return 0;
+}
+
+static int fillbits __P((struct in *, int, unsigned int));
+static int dec_rec2
+__P((struct in *, struct dec_hufftbl *, int *, int, int));
+
+static void setinput(in, p)
+struct in *in;
+unsigned char *p;
+{
+ in->p = p;
+ in->left = 0;
+ in->bits = 0;
+ in->marker = 0;
+}
+
+static int fillbits(in, le, bi)
+struct in *in;
+int le;
+unsigned int bi;
+{
+ int b, m;
+
+ if (in->marker)
+ {
+ if (le <= 16)
+ in->bits = bi << 16, le += 16;
+ return le;
+ }
+ while (le <= 24)
+ {
+ b = *in->p++;
+ if (b == 0xff && (m = *in->p++) != 0)
+ {
+ if (m == M_EOF)
+ {
+ if (in->func && (m = in->func(in->data)) == 0)
+ continue;
+ }
+ in->marker = m;
+ if (le <= 16)
+ bi = bi << 16, le += 16;
+ break;
+ }
+ bi = bi << 8 | b;
+ le += 8;
+ }
+ in->bits = bi; /* tmp... 2 return values needed */
+ return le;
+}
+
+static int dec_readmarker(in)
+struct in *in;
+{
+ int m;
+
+ in->left = fillbits(in, in->left, in->bits);
+ if ((m = in->marker) == 0)
+ return 0;
+ in->left = 0;
+ in->marker = 0;
+ return m;
+}
+
+#define LEBI_DCL int le, bi
+#define LEBI_GET(in) (le = in->left, bi = in->bits)
+#define LEBI_PUT(in) (in->left = le, in->bits = bi)
+
+#define GETBITS(in, n) ( \
+ (le < (n) ? le = fillbits(in, le, bi), bi = in->bits : 0), \
+ (le -= (n)), \
+ bi >> le & ((1 << (n)) - 1) \
+)
+
+#define UNGETBITS(in, n) ( \
+ le += (n) \
+)
+
+
+static int dec_rec2(in, hu, runp, c, i)
+ struct in *in;
+ struct dec_hufftbl *hu;
+ int *runp;
+ int c, i;
+{
+ LEBI_DCL;
+
+ LEBI_GET(in);
+ if (i)
+ {
+ UNGETBITS(in, i & 127);
+ *runp = i >> 8 & 15;
+ i >>= 16;
+ }
+ else
+ {
+ for (i = DECBITS;
+ (c = ((c << 1) | GETBITS(in, 1))) >= (hu->maxcode[i]); i++);
+ if (i >= 16)
+ {
+ in->marker = M_BADHUFF;
+ return 0;
+ }
+ i = hu->vals[hu->valptr[i] + c - hu->maxcode[i - 1] * 2];
+ *runp = i >> 4;
+ i &= 15;
+ }
+ if (i == 0)
+ { /* sigh, 0xf0 is 11 bit */
+ LEBI_PUT(in);
+ return 0;
+ }
+ /* receive part */
+ c = GETBITS(in, i);
+ if (c < (1 << (i - 1)))
+ c += (-1 << i) + 1;
+ LEBI_PUT(in);
+ return c;
+}
+
+#define DEC_REC(in, hu, r, i) ( \
+ r = GETBITS(in, DECBITS), \
+ i = hu->llvals[r], \
+ i & 128 ? \
+ ( \
+ UNGETBITS(in, i & 127), \
+ r = i >> 8 & 15, \
+ i >> 16 \
+ ) \
+ : \
+ ( \
+ LEBI_PUT(in), \
+ i = dec_rec2(in, hu, &r, r, i), \
+ LEBI_GET(in), \
+ i \
+ ) \
+)
+
+static void decode_mcus(in, dct, n, sc, maxp)
+ struct in *in;
+ int *dct;
+ int n;
+ struct scan *sc;
+ int *maxp;
+{
+ struct dec_hufftbl *hu;
+ int i = 0, r = 0, t = 0;
+ LEBI_DCL;
+
+ memset(dct, 0, n * 64 * sizeof(*dct));
+ LEBI_GET(in);
+ while (n-- > 0)
+ {
+ hu = sc->hudc.dhuff;
+ *dct++ = (sc->dc += DEC_REC(in, hu, r, t));
+
+ hu = sc->huac.dhuff;
+ i = 63;
+ while (i > 0)
+ {
+ t = DEC_REC(in, hu, r, t);
+ if (t == 0 && r == 0)
+ {
+ dct += i;
+ break;
+ }
+ dct += r;
+ *dct++ = t;
+ i -= r + 1;
+ }
+ *maxp++ = 64 - i;
+ if (n == sc->next)
+ sc++;
+ }
+ LEBI_PUT(in);
+}
+
+static void dec_makehuff(hu, hufflen, huffvals)
+ struct dec_hufftbl *hu;
+ int *hufflen;
+ unsigned char *huffvals;
+{
+ int code, k, i, j, d, x, c, v;
+ for (i = 0; i < (1 << DECBITS); i++)
+ hu->llvals[i] = 0;
+
+ /*
+ * llvals layout:
+ *
+ * value v already known, run r, backup u bits:
+ * vvvvvvvvvvvvvvvv 0000 rrrr 1 uuuuuuu
+ * value unknown, size b bits, run r, backup u bits:
+ * 000000000000bbbb 0000 rrrr 0 uuuuuuu
+ * value and size unknown:
+ * 0000000000000000 0000 0000 0 0000000
+ */
+ code = 0;
+ k = 0;
+ for (i = 0; i < 16; i++, code <<= 1)
+ { /* sizes */
+ hu->valptr[i] = k;
+ for (j = 0; j < hufflen[i]; j++)
+ {
+ hu->vals[k] = *huffvals++;
+ if (i < DECBITS)
+ {
+ c = code << (DECBITS - 1 - i);
+ v = hu->vals[k] & 0x0f; /* size */
+ for (d = 1 << (DECBITS - 1 - i); --d >= 0;)
+ {
+ if (v + i < DECBITS)
+ { /* both fit in table */
+ x = d >> (DECBITS - 1 - v - i);
+ if (v && x < (1 << (v - 1)))
+ x += (-1 << v) + 1;
+ x = x << 16 | (hu->vals[k] & 0xf0) << 4 |
+ (DECBITS - (i + 1 + v)) | 128;
+ }
+ else
+ x = v << 16 | (hu->vals[k] & 0xf0) << 4 |
+ (DECBITS - (i + 1));
+ hu->llvals[c | d] = x;
+ }
+ }
+ code++;
+ k++;
+ }
+ hu->maxcode[i] = code;
+ }
+ hu->maxcode[16] = 0x20000; /* always terminate decode */
+}
+
+/****************************************************************/
+/************** idct ***************/
+/****************************************************************/
+
+#define IMULT(a, b) (((a) * (b)) >> ISHIFT)
+#define ITOINT(a) ((a) >> ISHIFT)
+
+#define S22 ((PREC)IFIX(2 * 0.382683432))
+#define C22 ((PREC)IFIX(2 * 0.923879532))
+#define IC4 ((PREC)IFIX(1 / 0.707106781))
+
+//zigzag order used by idct
+static unsigned char zig2[64] = {
+ 0, 2, 3, 9, 10, 20, 21, 35,
+ 14, 16, 25, 31, 39, 46, 50, 57,
+ 5, 7, 12, 18, 23, 33, 37, 48,
+ 27, 29, 41, 44, 52, 55, 59, 62,
+ 15, 26, 30, 40, 45, 51, 56, 58,
+ 1, 4, 8, 11, 19, 22, 34, 36,
+ 28, 42, 43, 53, 54, 60, 61, 63,
+ 6, 13, 17, 24, 32, 38, 47, 49
+};
+
+/*inverse dct for jpeg decoding
+* args:
+* in: pointer to input data ( mcu - after huffman decoding)
+* out: pointer to data with output of idct (to be filled)
+* quant: pointer to quantization data tables
+* off: offset value (128.5 or 0.5)
+* max: maximum input mcu index?
+*/
+inline static void idct(int *in, int *out, int *quant, long off, int max)
+{
+ long t0, t1, t2, t3, t4, t5, t6, t7; // t ;
+ long tmp0, tmp1, tmp2, tmp3, tmp4, tmp5, tmp6;
+ long tmp[64], *tmpp;
+ int i, j, te;
+ unsigned char *zig2p;
+
+ t0 = off;
+ if (max == 1) //single color mcu
+ {
+ t0 += in[0] * quant[0]; //only DC available
+ for (i = 0; i < 64; i++) // fill mcu with DC value
+ out[i] = ITOINT(t0);
+ return;
+ }
+ zig2p = zig2;
+ tmpp = tmp;
+ for (i = 0; i < 8; i++) //apply quantization table in zigzag order
+ {
+ j = *zig2p++;
+ t0 += in[j] * (long) quant[j];
+ j = *zig2p++;
+ t5 = in[j] * (long) quant[j];
+ j = *zig2p++;
+ t2 = in[j] * (long) quant[j];
+ j = *zig2p++;
+ t7 = in[j] * (long) quant[j];
+ j = *zig2p++;
+ t1 = in[j] * (long) quant[j];
+ j = *zig2p++;
+ t4 = in[j] * (long) quant[j];
+ j = *zig2p++;
+ t3 = in[j] * (long) quant[j];
+ j = *zig2p++;
+ t6 = in[j] * (long) quant[j];
+
+
+ if ((t1 | t2 | t3 | t4 | t5 | t6 | t7) == 0)
+ {
+ tmpp[0 * 8] = t0; //DC
+ tmpp[1 * 8] = t0;
+ tmpp[2 * 8] = t0;
+ tmpp[3 * 8] = t0;
+ tmpp[4 * 8] = t0;
+ tmpp[5 * 8] = t0;
+ tmpp[6 * 8] = t0;
+ tmpp[7 * 8] = t0;
+
+ tmpp++;
+ t0 = 0;
+ continue;
+ }
+ //IDCT;
+ tmp0 = t0 + t1;
+ t1 = t0 - t1;
+ tmp2 = t2 - t3;
+ t3 = t2 + t3;
+ tmp2 = IMULT(tmp2, IC4) - t3;
+ tmp3 = tmp0 + t3;
+ t3 = tmp0 - t3;
+ tmp1 = t1 + tmp2;
+ tmp2 = t1 - tmp2;
+ tmp4 = t4 - t7;
+ t7 = t4 + t7;
+ tmp5 = t5 + t6;
+ t6 = t5 - t6;
+ tmp6 = tmp5 - t7;
+ t7 = tmp5 + t7;
+ tmp5 = IMULT(tmp6, IC4);
+ tmp6 = IMULT((tmp4 + t6), S22);
+ tmp4 = IMULT(tmp4, (C22 - S22)) + tmp6;
+ t6 = IMULT(t6, (C22 + S22)) - tmp6;
+ t6 = t6 - t7;
+ t5 = tmp5 - t6;
+ t4 = tmp4 - t5;
+
+ tmpp[0 * 8] = tmp3 + t7; //t0;
+ tmpp[1 * 8] = tmp1 + t6; //t1;
+ tmpp[2 * 8] = tmp2 + t5; //t2;
+ tmpp[3 * 8] = t3 + t4; //t3;
+ tmpp[4 * 8] = t3 - t4; //t4;
+ tmpp[5 * 8] = tmp2 - t5; //t5;
+ tmpp[6 * 8] = tmp1 - t6; //t6;
+ tmpp[7 * 8] = tmp3 - t7; //t7;
+ tmpp++;
+ t0 = 0;
+ }
+ for (i = 0, j = 0; i < 8; i++)
+ {
+ t0 = tmp[j + 0];
+ t1 = tmp[j + 1];
+ t2 = tmp[j + 2];
+ t3 = tmp[j + 3];
+ t4 = tmp[j + 4];
+ t5 = tmp[j + 5];
+ t6 = tmp[j + 6];
+ t7 = tmp[j + 7];
+ if ((t1 | t2 | t3 | t4 | t5 | t6 | t7) == 0)
+ {
+ te = ITOINT(t0);
+ out[j + 0] = te;
+ out[j + 1] = te;
+ out[j + 2] = te;
+ out[j + 3] = te;
+ out[j + 4] = te;
+ out[j + 5] = te;
+ out[j + 6] = te;
+ out[j + 7] = te;
+ j += 8;
+ continue;
+ }
+ //IDCT;
+ tmp0 = t0 + t1;
+ t1 = t0 - t1;
+ tmp2 = t2 - t3;
+ t3 = t2 + t3;
+ tmp2 = IMULT(tmp2, IC4) - t3;
+ tmp3 = tmp0 + t3;
+ t3 = tmp0 - t3;
+ tmp1 = t1 + tmp2;
+ tmp2 = t1 - tmp2;
+ tmp4 = t4 - t7;
+ t7 = t4 + t7;
+ tmp5 = t5 + t6;
+ t6 = t5 - t6;
+ tmp6 = tmp5 - t7;
+ t7 = tmp5 + t7;
+ tmp5 = IMULT(tmp6, IC4);
+ tmp6 = IMULT((tmp4 + t6), S22);
+ tmp4 = IMULT(tmp4, (C22 - S22)) + tmp6;
+ t6 = IMULT(t6, (C22 + S22)) - tmp6;
+ t6 = t6 - t7;
+ t5 = tmp5 - t6;
+ t4 = tmp4 - t5;
+
+ out[j + 0] = ITOINT(tmp3 + t7);
+ out[j + 1] = ITOINT(tmp1 + t6);
+ out[j + 2] = ITOINT(tmp2 + t5);
+ out[j + 3] = ITOINT(t3 + t4);
+ out[j + 4] = ITOINT(t3 - t4);
+ out[j + 5] = ITOINT(tmp2 - t5);
+ out[j + 6] = ITOINT(tmp1 - t6);
+ out[j + 7] = ITOINT(tmp3 - t7);
+ j += 8;
+ }
+}
+
+static unsigned char zig[64] = {
+ 0, 1, 5, 6, 14, 15, 27, 28,
+ 2, 4, 7, 13, 16, 26, 29, 42,
+ 3, 8, 12, 17, 25, 30, 41, 43,
+ 9, 11, 18, 24, 31, 40, 44, 53,
+ 10, 19, 23, 32, 39, 45, 52, 54,
+ 20, 22, 33, 38, 46, 51, 55, 60,
+ 21, 34, 37, 47, 50, 56, 59, 61,
+ 35, 36, 48, 49, 57, 58, 62, 63
+};
+
+//coef used in idct
+static PREC aaidct[8] = {
+ IFIX(0.3535533906), IFIX(0.4903926402),
+ IFIX(0.4619397663), IFIX(0.4157348062),
+ IFIX(0.3535533906), IFIX(0.2777851165),
+ IFIX(0.1913417162), IFIX(0.0975451610)
+};
+
+
+static void idctqtab(qin, qout)
+ unsigned char *qin;
+ PREC *qout;
+{
+ int i, j;
+
+ for (i = 0; i < 8; i++)
+ for (j = 0; j < 8; j++)
+ qout[zig[i * 8 + j]] = qin[zig[i * 8 + j]] *
+ IMULT(aaidct[i], aaidct[j]);
+}
+
diff --git a/vircam/V4LCamAdpt.cpp b/vircam/V4LCamAdpt.cpp
index f7ff790..c2b08f4 100755
--- a/vircam/V4LCamAdpt.cpp
+++ b/vircam/V4LCamAdpt.cpp
@@ -1302,7 +1302,6 @@ int V4LCamAdpt::previewThread()
frame.mLength = width*height*2;
memcpy(dest,src,frame.mLength);
}else if(DEFAULT_PREVIEW_PIXEL_FORMAT == V4L2_PIX_FMT_NV21){ //420sp
-
frame.mLength = width*height*3/2;
if ( CameraFrame::PIXEL_FMT_NV21 == mPixelFormat){
memcpy(dest,src,frame.mLength);
@@ -1612,33 +1611,25 @@ int V4LCamAdpt::pictureThread()
mVideoInfo->buf.length, mVideoInfo->buf.bytesused);
if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_RGB24){ // rgb24
-
frame.mLength = width*height*3;
frame.mQuirks = CameraFrame::ENCODE_RAW_RGB24_TO_JPEG
| CameraFrame::HAS_EXIF_DATA;
memcpy(dest,src,mVideoInfo->buf.length);
-
}else if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV){ // 422I
-
frame.mLength = width*height*2;
frame.mQuirks = CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG
| CameraFrame::HAS_EXIF_DATA;
memcpy(dest, src, mVideoInfo->buf.length);
-
}else if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_NV21){ // 420sp
-
frame.mLength = width*height*3/2;
frame.mQuirks = CameraFrame::ENCODE_RAW_YUV420SP_TO_JPEG
| CameraFrame::HAS_EXIF_DATA;
memcpy(dest,src,mVideoInfo->buf.length);
-
}else{ //default case
-
frame.mLength = width*height*3;
frame.mQuirks = CameraFrame::ENCODE_RAW_RGB24_TO_JPEG
| CameraFrame::HAS_EXIF_DATA;
memcpy(dest, src, mVideoInfo->buf.length);
-
}
notifyShutterSubscribers();