summaryrefslogtreecommitdiff
authortellen.yu <tellen.yu@amlogic.com>2013-12-11 02:19:29 (GMT)
committer tellen.yu <tellen.yu@amlogic.com>2013-12-11 02:19:29 (GMT)
commit0a0c9740659dd3aaad18c6a764844dadb171c93b (patch)
tree53b80b7ab6e4dccf8f2b8131c3e07f31aadb87ec
parented8976d3d7cf337cde57c6aa5683ee8f5efed33d (diff)
parente15de89845b71388aba35d8e4600abeedbfcb2fa (diff)
downloadcamera-0a0c9740659dd3aaad18c6a764844dadb171c93b.zip
camera-0a0c9740659dd3aaad18c6a764844dadb171c93b.tar.gz
camera-0a0c9740659dd3aaad18c6a764844dadb171c93b.tar.bz2
Merge remote-tracking branch 'remotes/amlogic/jb-mr2-amlogic' into kk-amlogic
Diffstat
-rwxr-xr-xAppCallbackNotifier.cpp27
-rwxr-xr-xCameraHal.cpp19
-rwxr-xr-xV4LCameraAdapter/V4LCameraAdapter.cpp104
-rwxr-xr-xinc/CameraHal.h3
-rwxr-xr-xinc/V4LCameraAdapter/V4LCameraAdapter.h6
-rwxr-xr-xinc/VideoMetadata.h4
-rwxr-xr-xinc/mjpeg/colorspaces.h6
-rwxr-xr-xmjpeg/colorspaces.c300
-rwxr-xr-xmjpeg/jpegdec.c38
-rwxr-xr-xutils/util.cpp45
-rwxr-xr-xutils/util.h3
11 files changed, 505 insertions, 50 deletions
diff --git a/AppCallbackNotifier.cpp b/AppCallbackNotifier.cpp
index 3f3b959..491d696 100755
--- a/AppCallbackNotifier.cpp
+++ b/AppCallbackNotifier.cpp
@@ -1072,37 +1072,36 @@ void AppCallbackNotifier::notifyFrame()
void *y_uv[2];
mapper.lock((buffer_handle_t)vBuf, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
- structConvImage input = {frame->mWidth,
- frame->mHeight,
+ structConvImage input = {(int)frame->mWidth,
+ (int)frame->mHeight,
4096,
IC_FORMAT_YCbCr420_lp,
(mmByte *)frame->mYuv[0],
- (mmByte *)frame->mYuv[1],
- frame->mOffset};
+ (mmByte *)(frame->mYuv[0]+frame->mWidth*frame->mHeight),
+ (int)frame->mOffset};
structConvImage output = {mVideoWidth,
mVideoHeight,
4096,
IC_FORMAT_YCbCr420_lp,
(mmByte *)y_uv[0],
- (mmByte *)y_uv[1],
+ (mmByte *)((unsigned)y_uv[0]+mVideoWidth*mVideoHeight),
0};
VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0);
mapper.unlock((buffer_handle_t)vBuf);
- videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource;
- videoMetadataBuffer->handle = (void *)vBuf;
- videoMetadataBuffer->offset = 0;
+ videoMetadataBuffer->metadataBufferType = kMetadataBufferTypeCanvasSource;
+ videoMetadataBuffer->handle= (void *)vBuf;
+ videoMetadataBuffer->canvas = 0;
}
else
{
- videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource;
- videoMetadataBuffer->handle = frame->mBuffer;
- videoMetadataBuffer->offset = frame->mOffset;
+ videoMetadataBuffer->metadataBufferType = kMetadataBufferTypeCanvasSource;
+ videoMetadataBuffer->handle = (void*)frame->mBuffer;
+ videoMetadataBuffer->canvas = frame->mCanvas;
}
-
- CAMHAL_LOGVB("mDataCbTimestamp : frame->mBuffer=0x%x, videoMetadataBuffer=0x%x, videoMedatadaBufferMemory=0x%x",
- frame->mBuffer, videoMetadataBuffer, videoMedatadaBufferMemory);
+ CAMHAL_LOGVB("mDataCbTimestamp : frame->mBuffer=0x%x, videoMetadataBuffer=0x%x, videoMedatadaBufferMemory=0x%x, videoMetadataBuffer->ptr=0x%x, videoMetadataBuffer->canvas_index = 0x%x",
+ frame->mBuffer, videoMetadataBuffer, videoMedatadaBufferMemory,(unsigned)videoMetadataBuffer->handle,videoMetadataBuffer->canvas);
mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME,
videoMedatadaBufferMemory, 0, mCallbackCookie);
diff --git a/CameraHal.cpp b/CameraHal.cpp
index b1a4cf2..bb21846 100755
--- a/CameraHal.cpp
+++ b/CameraHal.cpp
@@ -786,8 +786,23 @@ int CameraHal::setParameters(const CameraParameters& params)
if( (valstr = params.get(CameraParameters::KEY_FOCUS_AREAS)) != NULL )
{
- CAMHAL_LOGEB("Focus areas position set %s", params.get(CameraParameters::KEY_FOCUS_AREAS));
- mParameters.set(CameraParameters::KEY_FOCUS_AREAS, valstr);
+ int x0 = 0;
+ int y0 = 0;
+ int x1 = 0;
+ int y1 = 0;
+ int weight = 0;
+ CAMHAL_LOGDB("Focus areas position set %s", params.get(CameraParameters::KEY_FOCUS_AREAS));
+ sscanf(params.get(CameraParameters::KEY_FOCUS_AREAS),"(%d,%d,%d,%d,%d)",&x0,&y0,&x1,&y1,&weight);
+ if(x0<-1000||y0<-1000||y1>1000||x1>1000||weight<1||weight>1000||x0>=x1||y0>=y1){
+ if(x1==0&&y1==0&&x0==0&&y0==0){
+ mParameters.set(CameraParameters::KEY_FOCUS_AREAS, valstr);
+ }else{
+ CAMHAL_LOGEB("ERROR: Invalid focus area = %s", valstr);
+ ret = -EINVAL;
+ }
+ }else{
+ mParameters.set(CameraParameters::KEY_FOCUS_AREAS, valstr);
+ }
}
if( (valstr = params.get(ExCameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
diff --git a/V4LCameraAdapter/V4LCameraAdapter.cpp b/V4LCameraAdapter/V4LCameraAdapter.cpp
index deebe54..b7d92fa 100755
--- a/V4LCameraAdapter/V4LCameraAdapter.cpp
+++ b/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -165,6 +165,8 @@ status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
return NO_MEMORY;
}
+ memset(mVideoInfo,0,sizeof(struct VideoInfo));
+
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
#ifdef AMLOGIC_TWO_CH_UVC
mCamEncodeIndex = -1;
@@ -214,7 +216,15 @@ status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o");
return -EINVAL;
}
+ mVideoInfo->canvas_mode = false;
+ char* str = strchr((const char *)mVideoInfo->cap.card,'.');
+ if(str){
+ if(!strncmp(str,".canvas",strlen(str))){
+ mVideoInfo->canvas_mode = true;
+ CAMHAL_LOGDB("Camera %d use canvas mode",mSensorIndex);
+ }
+ }
if (strcmp(caps->get(CameraProperties::FACING_INDEX), (const char *) android::ExCameraParameters::FACING_FRONT) == 0)
mbFrontCamera = true;
else
@@ -707,6 +717,24 @@ status_t V4LCameraAdapter::getBuffersFormat(int &width, int &height, int &pixelf
return ret;
}
+status_t V4LCameraAdapter::setCrop(int width, int height)
+{
+ int ret = NO_ERROR;
+ struct v4l2_crop crop;
+
+ memset (&crop, 0, sizeof(crop));
+ crop.c.width = width;
+ crop.c.height = height;
+ ret = ioctl(mCameraHandle, VIDIOC_S_CROP, &crop);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_S_CROP Failed: %s, ret=%d\n", strerror(errno), ret);
+ }
+
+ CAMHAL_LOGIB("crop w=%d, h=%d\n", width, height);
+
+ return ret;
+}
+
status_t V4LCameraAdapter::UseBuffersPreview(void* bufArr, int num)
{
int ret = NO_ERROR;
@@ -744,7 +772,7 @@ status_t V4LCameraAdapter::UseBuffersPreview(void* bufArr, int num)
pixfmt = V4L2_PIX_FMT_YUYV;
mPixelFormat = CameraFrame::PIXEL_FMT_YUYV;
}
-
+
mSensorFormat = pixfmt;
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
if((mUseMJPEG == true)&&(mSupportMJPEG == true)&&(width>=640)&&(height>=480))
@@ -792,6 +820,9 @@ status_t V4LCameraAdapter::UseBuffersPreview(void* bufArr, int num)
return -1;
}
+ if(mVideoInfo->canvas_mode){
+ mVideoInfo->canvas[i] = mVideoInfo->buf.reserved;
+ }
uint32_t *ptr = (uint32_t*) bufArr;
//Associate each Camera internal buffer with the one from Overlay
CAMHAL_LOGDB("mPreviewBufs.add %#x, %d", ptr[i], i);
@@ -820,11 +851,6 @@ status_t V4LCameraAdapter::UseBuffersCapture(void* bufArr, int num)
CAMHAL_LOGDB("num=%d\n", num);
}
- /* This will only be called right before taking a picture, so
- * stop preview now so that we can set buffer format here.
- */
- this->stopPreview();
-
int width, height;
mParams.getPictureSize(&width, &height);
mCaptureWidth = width;
@@ -845,6 +871,14 @@ status_t V4LCameraAdapter::UseBuffersCapture(void* bufArr, int num)
}
mSensorFormat = DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT;
#endif
+
+ setCrop( mCaptureWidth, mCaptureHeight);
+ /* This will only be called right before taking a picture, so
+ * stop preview now so that we can set buffer format here.
+ */
+ this->stopPreview();
+
+
setBuffersFormat(width, height, mSensorFormat);
//First allocate adapter internal buffers at V4L level for Cam
@@ -881,6 +915,8 @@ status_t V4LCameraAdapter::UseBuffersCapture(void* bufArr, int num)
CAMHAL_LOGEB("Unable to map buffer (%s)", strerror(errno));
return -1;
}
+ if(mVideoInfo->canvas_mode)
+ mVideoInfo->canvas[i] = mVideoInfo->buf.reserved;
uint32_t *ptr = (uint32_t*) bufArr;
mCaptureBuf = (camera_memory_t*)ptr[0];
@@ -1146,6 +1182,7 @@ status_t V4LCameraAdapter::stopPreview()
if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0){
CAMHAL_LOGEA("Unmap failed");
}
+ mVideoInfo->canvas[i] = 0;
}
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
@@ -1168,11 +1205,11 @@ status_t V4LCameraAdapter::stopPreview()
return ret;
}
-char * V4LCameraAdapter::GetFrame(int &index)
+char * V4LCameraAdapter::GetFrame(int &index, unsigned int* canvas)
{
int ret;
if(nQueued<=0){
- CAMHAL_LOGEA("GetFrame: No buff for Dequeue");
+ CAMHAL_LOGVA("GetFrame: No buff for Dequeue");
return NULL;
}
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
@@ -1202,6 +1239,10 @@ char * V4LCameraAdapter::GetFrame(int &index)
nDequeued++;
nQueued--;
index = mVideoInfo->buf.index;
+ if(mVideoInfo->canvas_mode)
+ *canvas = mVideoInfo->canvas[mVideoInfo->buf.index];
+ else
+ *canvas = 0;
return (char *)mVideoInfo->mem[mVideoInfo->buf.index];
}
@@ -1330,6 +1371,7 @@ int V4LCameraAdapter::previewThread()
int active_duration = 0;
uint8_t* ptr = NULL;
bool noFrame = true;
+ unsigned int canvas_id = 0;
if (mPreviewing){
int index = -1;
@@ -1341,14 +1383,14 @@ int V4LCameraAdapter::previewThread()
}
}
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
- delay = previewframeduration>>2;
+ delay = 5000;//previewframeduration>>2;
#else
delay = previewframeduration;
#endif
if(mSensorFormat != V4L2_PIX_FMT_MJPEG)
usleep(delay);
- char *fp = this->GetFrame(index);
+ char *fp = this->GetFrame(index, &canvas_id);
if((-1==index)||!fp){
noFrame = true;
@@ -1393,6 +1435,7 @@ int V4LCameraAdapter::previewThread()
mFirstBuff = false;
mCache.index = -1;
mCache.bufPtr == NULL;
+ mCache.canvas = 0;
ptr = (uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(index));
gettimeofday(&previewTime1, NULL);
}else{
@@ -1410,11 +1453,14 @@ int V4LCameraAdapter::previewThread()
fillThisBuffer((uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(mCache.index)), CameraFrame::PREVIEW_FRAME_SYNC);
}
mCache.index = -1;
+ mCache.canvas = 0;
}else if(mCache.index != -1){ //current catch no picture,but have a tmp buf;
fp = mCache.bufPtr;
ptr = (uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(mCache.index));
index = mCache.index;
+ canvas_id = mCache.canvas;
mCache.index = -1;
+ mCache.canvas = 0;
}else{
return 0;
}
@@ -1425,6 +1471,7 @@ int V4LCameraAdapter::previewThread()
fillThisBuffer((uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(mCache.index)), CameraFrame::PREVIEW_FRAME_SYNC);
}
mCache.index = index;
+ mCache.canvas = canvas_id;
}
return 0;
}
@@ -1441,6 +1488,7 @@ int V4LCameraAdapter::previewThread()
mFrameInv = 0;
#endif
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
uint8_t* dest = NULL;
#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
camera_memory_t* VideoCameraBufferMemoryBase = (camera_memory_t*)ptr;
@@ -1472,7 +1520,7 @@ int V4LCameraAdapter::previewThread()
fillThisBuffer((uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(index)), CameraFrame::PREVIEW_FRAME_SYNC);
//CAMHAL_LOGEA("jpeg decode failed");
return -1;
- }
+ }
frame.mLength = width*height*3/2;
}else{
if(DEFAULT_PREVIEW_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV){ // 422I
@@ -1489,9 +1537,17 @@ int V4LCameraAdapter::previewThread()
}
#else
if ( CameraFrame::PIXEL_FMT_NV21 == mPixelFormat){
- memcpy(dest,src,frame.mLength);
+ if (frame.mLength == mVideoInfo->buf.length) {
+ memcpy(dest,src,frame.mLength);
+ }else{
+ nv21_memcpy_align32 (dest, src, width, height);
+ }
}else{
- yv12_adjust_memcpy(dest,src,width,height);
+ if (frame.mLength == mVideoInfo->buf.length) {
+ yv12_adjust_memcpy(dest,src,width,height);
+ } else {
+ yv12_memcpy_align32 (dest, src, width, height);
+ }
}
#endif
}else{ //default case
@@ -1510,9 +1566,9 @@ int V4LCameraAdapter::previewThread()
frame.mOffset = 0;
frame.mYuv[0] = 0;
frame.mYuv[1] = 0;
+ frame.mCanvas = canvas_id;
frame.mWidth = width;
frame.mHeight = height;
- frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
frame.mPixelFmt = mPixelFormat;
ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
if (ret){
@@ -1746,7 +1802,8 @@ int V4LCameraAdapter::pictureThread()
}
int index = 0;
- char *fp = this->GetFrame(index);
+ unsigned int canvas_id = 0;
+ char *fp = this->GetFrame(index,&canvas_id);
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
while((mVideoInfo->buf.length != mVideoInfo->buf.bytesused)&&(dqTryNum>0)){
if(NULL != fp){
@@ -1771,14 +1828,14 @@ int V4LCameraAdapter::pictureThread()
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
usleep( 10000 );
#endif
- fp = this->GetFrame(index);
+ fp = this->GetFrame(index,&canvas_id);
}
#endif
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
while(!fp && (-1 == index) ){
usleep( 10000 );
- fp = this->GetFrame(index);
+ fp = this->GetFrame(index,&canvas_id);
}
#else
if(!fp){
@@ -1824,7 +1881,12 @@ int V4LCameraAdapter::pictureThread()
//convert yuyv to rgb24
yuyv422_to_rgb24(src,dest,width,height);
#else
- memcpy(dest,src,mVideoInfo->buf.length);
+ if (frame.mLength == mVideoInfo->buf.length) {
+ memcpy (dest, src, frame.mLength);
+ }else{
+ rgb24_memcpy( dest, src, width, height);
+ CAMHAL_LOGVB("w*h*3=%d, mLenght=%d\n", width*height*3, mVideoInfo->buf.length);
+ }
#endif
}else if(DEFAULT_IMAGE_CAPTURE_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV){ // 422I
frame.mLength = width*height*2;
@@ -1862,6 +1924,7 @@ int V4LCameraAdapter::pictureThread()
frame.mOffset = 0;
frame.mYuv[0] = 0;
frame.mYuv[1] = 0;
+ frame.mCanvas = canvas_id;
frame.mWidth = width;
frame.mHeight = height;
frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
@@ -1886,6 +1949,7 @@ int V4LCameraAdapter::pictureThread()
if (munmap(mVideoInfo->mem[0], mVideoInfo->buf.length) < 0){
CAMHAL_LOGEA("Unmap failed");
}
+ mVideoInfo->canvas[0] = 0;
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
@@ -1912,6 +1976,7 @@ int V4LCameraAdapter::pictureThread()
}
#endif
+ setCrop( 0, 0); //set to zero and then go preview
// start preview thread again after stopping it in UseBuffersCapture
{
Mutex::Autolock lock(mPreviewBufferLock);
@@ -2508,6 +2573,7 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
const char DEFAULT_PICTURE_FORMAT[] = "jpeg";
const char DEFAULT_PICTURE_SIZE[] = "640x480";
const char PREVIEW_FORMAT_420SP[] = "yuv420sp";
+ const char PREVIEW_FORMAT_420P[] = "yuv420p";
const char PREVIEW_FORMAT_422I[] = "yuv422i-yuyv";
const char DEFAULT_PREVIEW_SIZE[] = "640x480";
const char DEFAULT_NUM_PREV_BUFS[] = "6";
@@ -2603,7 +2669,7 @@ extern "C" void loadCaps(int camera_id, CameraProperties::Properties* params) {
params->set(CameraProperties::PREVIEW_FORMAT,PREVIEW_FORMAT_420SP);
}else{ //default case
//params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS,PREVIEW_FORMAT_420SP);
- params->set(CameraProperties::PREVIEW_FORMAT,PREVIEW_FORMAT_420SP);
+ params->set(CameraProperties::PREVIEW_FORMAT,PREVIEW_FORMAT_420P);
}
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
diff --git a/inc/CameraHal.h b/inc/CameraHal.h
index 5a2eceb..28320b4 100755
--- a/inc/CameraHal.h
+++ b/inc/CameraHal.h
@@ -254,6 +254,7 @@ class CameraFrame
mYuv[0] = 0;
mYuv[1] = 0;
+ mCanvas = 0;
}
//copy constructor
@@ -275,6 +276,7 @@ class CameraFrame
mYuv[0] = frame.mYuv[0];
mYuv[1] = frame.mYuv[1];
+ mCanvas = frame.mCanvas;
}
void *mCookie;
@@ -291,6 +293,7 @@ class CameraFrame
unsigned int mQuirks;
unsigned int mPixelFmt;
unsigned int mYuv[2];
+ unsigned int mCanvas;
///@todo add other member vars like stride etc
};
diff --git a/inc/V4LCameraAdapter/V4LCameraAdapter.h b/inc/V4LCameraAdapter/V4LCameraAdapter.h
index 5deecaf..f961a21 100755
--- a/inc/V4LCameraAdapter/V4LCameraAdapter.h
+++ b/inc/V4LCameraAdapter/V4LCameraAdapter.h
@@ -47,7 +47,9 @@ struct VideoInfo {
struct v4l2_buffer buf;
struct v4l2_requestbuffers rb;
void *mem[NB_BUFFER];
+ unsigned int canvas[NB_BUFFER];
bool isStreaming;
+ bool canvas_mode;
int width;
int height;
int formatIn;
@@ -213,6 +215,7 @@ typedef enum camera_focus_mode_e {
typedef struct cam_cache_buf{
char *bufPtr;
int index;
+ unsigned canvas;
}cache_buf_t;
typedef struct cam_LimitedRate_Item{
@@ -325,12 +328,13 @@ private:
};
status_t setBuffersFormat(int width, int height, int pixelformat);
+ status_t setCrop(int width, int height);
status_t getBuffersFormat(int &width, int &height, int &pixelformat);
//Used for calculation of the average frame rate during preview
status_t recalculateFPS();
- char * GetFrame(int &index);
+ char * GetFrame(int &index, unsigned int* canvas);
int previewThread();
diff --git a/inc/VideoMetadata.h b/inc/VideoMetadata.h
index a1460d6..dd4171d 100755
--- a/inc/VideoMetadata.h
+++ b/inc/VideoMetadata.h
@@ -23,9 +23,9 @@
typedef struct
{
- int metadataBufferType;
+ unsigned int metadataBufferType;
void* handle;
- int offset;
+ unsigned int canvas;
}
video_metadata_t;
diff --git a/inc/mjpeg/colorspaces.h b/inc/mjpeg/colorspaces.h
index 8787c4f..3479568 100755
--- a/inc/mjpeg/colorspaces.h
+++ b/inc/mjpeg/colorspaces.h
@@ -254,6 +254,12 @@ yuv420pto422(int * out,unsigned char *pic,int width);
void
yuv422pto422(int * out,unsigned char *pic,int width);
+void
+yuv420pto420sp(int * out,addr *pic,int width);
+
+void
+yuv420pto420p(int * out,addr *pic,int width);
+
void
yuv422pto420sp(int * out, addr *pic,int width);
diff --git a/mjpeg/colorspaces.c b/mjpeg/colorspaces.c
index 9fdce8f..723505c 100755
--- a/mjpeg/colorspaces.c
+++ b/mjpeg/colorspaces.c
@@ -1398,6 +1398,302 @@ void yuv422pto422(int * out,unsigned char *pic,int width)
}
}
+void yuv420pto420sp(int * out, addr *pic, int width)
+{
+ int j, k;
+ unsigned char *pic0, *pic1, *uv;
+ int *outy, *outu, *outv;
+ int *outy1 ;
+ int *outy2 ;
+ int *outu1 ;
+ int *outv1 ;
+
+ pic0 = pic->y;
+ pic1 = pic->y + width;
+ uv = pic->v;
+ outy = out;
+ outu = out + 64 * 4;
+ outv = out + 64 * 5;
+
+ for (j = 0; j < 8; j++)
+ {
+ outy1 = outy;
+ outy2 = outy+8;
+ outv1 = outv;
+ outu1 = outu;
+
+ {
+ asm volatile(
+ "mov r0,#0 \n\t"
+ "vdup.u32 d30, r0 \n\t"
+ "mov r0,#255 \n\t"
+ "vdup.u32 d31, r0 \n\t"
+
+ /*** line1 ***/
+ "mov r0, #256 @256=64*4\n\t"
+ "vld4.32 {d26,d27,d28,d29}, [%[outy1]], r0 \n\t"
+ "vmax.s32 d26, d26, d30 \n\t"
+ "vmin.s32 d26, d26, d31 \n\t"
+ "vmax.s32 d27, d27, d30 \n\t"
+ "vmin.s32 d27, d27, d31 \n\t"
+ "vmax.s32 d28, d28, d30 \n\t"
+ "vmin.s32 d28, d28, d31 \n\t"
+ "vmax.s32 d29, d29, d30 \n\t"
+ "vmin.s32 d29, d29, d31 \n\t"
+ "vst4.8 {d26[0],d27[0],d28[0],d29[0]}, [%[pic0]]! \n\t"
+ "vst4.8 {d26[4],d27[4],d28[4],d29[4]}, [%[pic0]]! \n\t"
+
+ /*** mb 2 ***/
+ "vld4.32 {d26,d27,d28,d29}, [%[outy1]] \n\t"
+ "vmax.s32 d26, d26, d30 \n\t"
+ "vmin.s32 d26, d26, d31 \n\t"
+ "vmax.s32 d27, d27, d30 \n\t"
+ "vmin.s32 d27, d27, d31 \n\t"
+ "vmax.s32 d28, d28, d30 \n\t"
+ "vmin.s32 d28, d28, d31 \n\t"
+ "vmax.s32 d29, d29, d30 \n\t"
+ "vmin.s32 d29, d29, d31 \n\t"
+ "vst4.8 {d26[0],d27[0],d28[0],d29[0]}, [%[pic0]]! \n\t"
+ "vst4.8 {d26[4],d27[4],d28[4],d29[4]}, [%[pic0]]! \n\t"
+
+ /*** line2 ***/
+ "vld4.32 {d26,d27,d28,d29}, [%[outy2]],r0 \n\t"
+ "vmax.s32 d26, d26, d30 \n\t"
+ "vmin.s32 d26, d26, d31 \n\t"
+ "vmax.s32 d27, d27, d30 \n\t"
+ "vmin.s32 d27, d27, d31 \n\t"
+ "vmax.s32 d28, d28, d30 \n\t"
+ "vmin.s32 d28, d28, d31 \n\t"
+ "vmax.s32 d29, d29, d30 \n\t"
+ "vmin.s32 d29, d29, d31 \n\t"
+ "vst4.8 {d26[0],d27[0],d28[0],d29[0]}, [%[pic1]]! \n\t"
+ "vst4.8 {d26[4],d27[4],d28[4],d29[4]}, [%[pic1]]! \n\t"
+
+ /*** mb2 ***/
+ "vld4.32 {d26,d27,d28,d29}, [%[outy2]] \n\t"
+ "vmax.s32 d26, d26, d30 \n\t"
+ "vmin.s32 d26, d26, d31 \n\t"
+ "vmax.s32 d27, d27, d30 \n\t"
+ "vmin.s32 d27, d27, d31 \n\t"
+ "vmax.s32 d28, d28, d30 \n\t"
+ "vmin.s32 d28, d28, d31 \n\t"
+ "vmax.s32 d29, d29, d30 \n\t"
+ "vmin.s32 d29, d29, d31 \n\t"
+ "vst4.8 {d26[0],d27[0],d28[0],d29[0]}, [%[pic1]]! \n\t"
+ "vst4.8 {d26[4],d27[4],d28[4],d29[4]}, [%[pic1]]! \n\t"
+
+ /*** uv ***/
+ "mov r0, #16 @16=4*4 \n\t"
+ "vld4.32 {d22,d24,d26,d28}, [%[outv1]], r0 \n\t"
+ "vld4.32 {d23,d25,d27,d29}, [%[outu1]], r0 \n\t"
+
+ "mov r0, #128 \n\t"
+ "vdup.u32 d30, r0 \n\t"
+ "vqadd.s32 d22, d22, d30 \n\t"
+ "vqadd.s32 d23, d23, d30 \n\t"
+ "vqadd.s32 d24, d24, d30 \n\t"
+ "vqadd.s32 d25, d25, d30 \n\t"
+ "vqadd.s32 d26, d26, d30 \n\t"
+ "vqadd.s32 d27, d27, d30 \n\t"
+ "vqadd.s32 d28, d28, d30 \n\t"
+ "vqadd.s32 d29, d29, d30 \n\t"
+
+ "mov r0, #0 \n\t"
+ "vdup.u32 d30, r0 \n\t"
+
+ "vmax.s32 d22, d22, d30 \n\t"
+ "vmin.s32 d22, d22, d31 \n\t"
+ "vmax.s32 d24, d24, d30 \n\t"
+ "vmin.s32 d24, d24, d31 \n\t"
+ "vmax.s32 d26, d26, d30 \n\t"
+ "vmin.s32 d26, d26, d31 \n\t"
+ "vmax.s32 d28, d28, d30 \n\t"
+ "vmin.s32 d28, d28, d31 \n\t"
+
+ "vmax.s32 d23, d23, d30 \n\t"
+ "vmin.s32 d23, d23, d31 \n\t"
+ "vmax.s32 d25, d25, d30 \n\t"
+ "vmin.s32 d25, d25, d31 \n\t"
+ "vmax.s32 d27, d27, d30 \n\t"
+ "vmin.s32 d27, d27, d31 \n\t"
+ "vmax.s32 d29, d29, d30 \n\t"
+ "vmin.s32 d29, d29, d31 \n\t"
+
+ "vst4.8 {d22[0],d23[0],d24[0],d25[0]}, [%[uv]]! \n\t"
+ "vst4.8 {d26[0],d27[0],d28[0],d29[0]}, [%[uv]]! \n\t"
+ "vst4.8 {d22[4],d23[4],d24[4],d25[4]}, [%[uv]]! \n\t"
+ "vst4.8 {d26[4],d27[4],d28[4],d29[4]}, [%[uv]]! \n\t"
+ //////////////////////////////
+
+ "4:@end \n\t"
+ : [outy1] "+r" (outy1), [outy2] "+r" (outy2),
+ [pic0] "+r" (pic0), [pic1] "+r" (pic1),
+ [outu1] "+r" (outu1), [outv1] "+r" (outv1),
+ [uv] "+r" (uv)
+ : [width] "r" (width)
+ : "cc", "memory", "r0","q11", "q12", "q13","q14","q15"
+ );
+ }
+ if(j == 3)
+ outy += 80;
+ else
+ outy += 16;
+ outu +=8; outv +=8;
+ pic0 += 2 * (width - 8);
+ pic1 += 2 * (width - 8);
+ uv += width - 16;
+ }
+}
+
+void yuv420pto420p(int * out, addr *pic, int width)
+{
+ int j, k;
+ unsigned char *pic0, *pic1, *u, *v;
+ int *outy, *outu, *outv;
+ int *outy1 ;
+ int *outy2 ;
+ int *outu1 ;
+ int *outv1 ;
+
+ pic0 = pic->y;
+ pic1 = pic->y + width;
+ v = pic->v;
+ u = pic->u;
+ outy = out;
+ outu = out + 64 * 4;
+ outv = out + 64 * 5;
+
+ for (j = 0; j < 8; j++)
+ {
+ outy1 = outy;
+ outy2 = outy+8;
+ outv1 = outv;
+ outu1 = outu;
+
+ {
+ asm volatile(
+ "mov r0,#0 \n\t"
+ "vdup.u32 d30, r0 \n\t"
+ "mov r0,#255 \n\t"
+ "vdup.u32 d31, r0 \n\t"
+
+ /*** line1 ***/
+ "mov r0, #256 @256=64*4\n\t"
+ "vld4.32 {d26,d27,d28,d29}, [%[outy1]], r0 \n\t"
+ "vmax.s32 d26, d26, d30 \n\t"
+ "vmin.s32 d26, d26, d31 \n\t"
+ "vmax.s32 d27, d27, d30 \n\t"
+ "vmin.s32 d27, d27, d31 \n\t"
+ "vmax.s32 d28, d28, d30 \n\t"
+ "vmin.s32 d28, d28, d31 \n\t"
+ "vmax.s32 d29, d29, d30 \n\t"
+ "vmin.s32 d29, d29, d31 \n\t"
+ "vst4.8 {d26[0],d27[0],d28[0],d29[0]}, [%[pic0]]! \n\t"
+ "vst4.8 {d26[4],d27[4],d28[4],d29[4]}, [%[pic0]]! \n\t"
+
+ /*** mb 2 ***/
+ "vld4.32 {d26,d27,d28,d29}, [%[outy1]] \n\t"
+ "vmax.s32 d26, d26, d30 \n\t"
+ "vmin.s32 d26, d26, d31 \n\t"
+ "vmax.s32 d27, d27, d30 \n\t"
+ "vmin.s32 d27, d27, d31 \n\t"
+ "vmax.s32 d28, d28, d30 \n\t"
+ "vmin.s32 d28, d28, d31 \n\t"
+ "vmax.s32 d29, d29, d30 \n\t"
+ "vmin.s32 d29, d29, d31 \n\t"
+ "vst4.8 {d26[0],d27[0],d28[0],d29[0]}, [%[pic0]]! \n\t"
+ "vst4.8 {d26[4],d27[4],d28[4],d29[4]}, [%[pic0]]! \n\t"
+
+ /*** line2 ***/
+ "vld4.32 {d26,d27,d28,d29}, [%[outy2]],r0 \n\t"
+ "vmax.s32 d26, d26, d30 \n\t"
+ "vmin.s32 d26, d26, d31 \n\t"
+ "vmax.s32 d27, d27, d30 \n\t"
+ "vmin.s32 d27, d27, d31 \n\t"
+ "vmax.s32 d28, d28, d30 \n\t"
+ "vmin.s32 d28, d28, d31 \n\t"
+ "vmax.s32 d29, d29, d30 \n\t"
+ "vmin.s32 d29, d29, d31 \n\t"
+ "vst4.8 {d26[0],d27[0],d28[0],d29[0]}, [%[pic1]]! \n\t"
+ "vst4.8 {d26[4],d27[4],d28[4],d29[4]}, [%[pic1]]! \n\t"
+
+ /*** mb2 ***/
+ "vld4.32 {d26,d27,d28,d29}, [%[outy2]] \n\t"
+ "vmax.s32 d26, d26, d30 \n\t"
+ "vmin.s32 d26, d26, d31 \n\t"
+ "vmax.s32 d27, d27, d30 \n\t"
+ "vmin.s32 d27, d27, d31 \n\t"
+ "vmax.s32 d28, d28, d30 \n\t"
+ "vmin.s32 d28, d28, d31 \n\t"
+ "vmax.s32 d29, d29, d30 \n\t"
+ "vmin.s32 d29, d29, d31 \n\t"
+ "vst4.8 {d26[0],d27[0],d28[0],d29[0]}, [%[pic1]]! \n\t"
+ "vst4.8 {d26[4],d27[4],d28[4],d29[4]}, [%[pic1]]! \n\t"
+
+ /*** uv ***/
+ "mov r0, #16 @16=4*4 \n\t"
+ "vld4.32 {d22,d23,d24,d25}, [%[outv1]], r0 \n\t"
+ "vld4.32 {d26,d27,d28,d29}, [%[outu1]], r0 \n\t"
+
+ "mov r0, #128 \n\t"
+ "vdup.u32 d30, r0 \n\t"
+ "vqadd.s32 d22, d22, d30 \n\t"
+ "vqadd.s32 d23, d23, d30 \n\t"
+ "vqadd.s32 d24, d24, d30 \n\t"
+ "vqadd.s32 d25, d25, d30 \n\t"
+ "vqadd.s32 d26, d26, d30 \n\t"
+ "vqadd.s32 d27, d27, d30 \n\t"
+ "vqadd.s32 d28, d28, d30 \n\t"
+ "vqadd.s32 d29, d29, d30 \n\t"
+
+ "mov r0, #0 \n\t"
+ "vdup.u32 d30, r0 \n\t"
+
+ "vmax.s32 d22, d22, d30 \n\t"
+ "vmin.s32 d22, d22, d31 \n\t"
+ "vmax.s32 d23, d23, d30 \n\t"
+ "vmin.s32 d23, d23, d31 \n\t"
+ "vmax.s32 d24, d24, d30 \n\t"
+ "vmin.s32 d24, d24, d31 \n\t"
+ "vmax.s32 d25, d25, d30 \n\t"
+ "vmin.s32 d25, d25, d31 \n\t"
+
+ "vmax.s32 d26, d26, d30 \n\t"
+ "vmin.s32 d26, d26, d31 \n\t"
+ "vmax.s32 d27, d27, d30 \n\t"
+ "vmin.s32 d27, d27, d31 \n\t"
+ "vmax.s32 d28, d28, d30 \n\t"
+ "vmin.s32 d28, d28, d31 \n\t"
+ "vmax.s32 d29, d29, d30 \n\t"
+ "vmin.s32 d29, d29, d31 \n\t"
+
+ "vst4.8 {d22[0],d23[0],d24[0],d25[0]}, [%[v]]! \n\t"
+ "vst4.8 {d22[4],d23[4],d24[4],d25[4]}, [%[v]]! \n\t"
+ "vst4.8 {d26[0],d27[0],d28[0],d29[0]}, [%[u]]! \n\t"
+ "vst4.8 {d26[4],d27[4],d28[4],d29[4]}, [%[u]]! \n\t"
+ //////////////////////////////
+
+ "4:@end \n\t"
+ : [outy1] "+r" (outy1), [outy2] "+r" (outy2),
+ [pic0] "+r" (pic0), [pic1] "+r" (pic1),
+ [outu1] "+r" (outu1), [outv1] "+r" (outv1),
+ [u] "+r" (u), [v] "+r" (v)
+ : [width] "r" (width)
+ : "cc", "memory", "r0","q11","q12","q13","q14","q15"
+ );
+ }
+ if(j == 3)
+ outy += 80;
+ else
+ outy += 16;
+ outu += 8; outv += 8;
+ pic0 += 2 * (width - 8);
+ pic1 += 2 * (width - 8);
+ u += width / 2 - 8;
+ v += width / 2 - 8;
+ }
+}
+
void yuv422pto420sp(int * out, addr *pic, int width)
{
int j, k;
@@ -1505,7 +1801,7 @@ void yuv422pto420sp(int * out, addr *pic, int width)
[outu1] "+r" (outu1), [outv1] "+r" (outv1),
[uv] "+r" (uv)
: [width] "r" (width)
- : "cc", "memory", "r0","r1", "r2", "r4", "q0", "q1"
+ : "cc", "memory", "r0","q11","q12","q13","q14","q15"
);
}
outy += 16;outu +=8; outv +=8;
@@ -1621,7 +1917,7 @@ void yuv422pto420p(int * out, addr *pic, int width)
[outu1] "+r" (outu1), [outv1] "+r" (outv1),
[v] "+r" (v),[u] "+r" (u)
: [width] "r" (width)
- : "cc", "memory", "r0","r1", "r2", "r4", "q0", "q1"
+ : "cc", "memory", "r0","q11", "q12", "q13","q14","q15"
);
}
outy += 16;outu +=8; outv +=8;
diff --git a/mjpeg/jpegdec.c b/mjpeg/jpegdec.c
index 1f2166b..d30653c 100755
--- a/mjpeg/jpegdec.c
+++ b/mjpeg/jpegdec.c
@@ -242,8 +242,11 @@ int jpeg_decode(BYTE **pic, BYTE *buf, int width, int height, unsigned int outfo
ftopict convert;
int err = 0;
int isInitHuffman = 0;
+ int mb_x,mb_y;
+ int mc_x,mc_y;
+ int down_sampling = 1;
decdata = (struct jpeg_decdata *)malloc(sizeof(struct jpeg_decdata));
-
+ memset(&info,0x0,sizeof(info));
for(i=0;i<6;i++)
max[i]=0;
@@ -397,7 +400,6 @@ int jpeg_decode(BYTE **pic, BYTE *buf, int width, int height, unsigned int outfo
goto error;
}
- int stride = 0;
switch (dscans[0].hv)
{
case 0x22: // 411
@@ -408,9 +410,18 @@ int jpeg_decode(BYTE **pic, BYTE *buf, int width, int height, unsigned int outfo
xpitch = 16 * bpp;
pitch = width * bpp; // YUYV out
ypitch = 16 * pitch;
- //convert = yuv420pto422; //choose the right conversion function
- err = ERR_NOT_SUPPORTED;
- goto error;
+ mb_x = 16;
+ mb_y = 16;
+ mc_y = 8;
+ if(outformat == V4L2_PIX_FMT_NV21){
+ convert = yuv420pto420sp; //choose the right conversion function
+ mc_x = 16;
+ down_sampling = 1;
+ }else{
+ convert = yuv420pto420p;
+ mc_x = 8;
+ down_sampling = 2;
+ }
break;
case 0x21: //422
mb=4;
@@ -420,12 +431,17 @@ int jpeg_decode(BYTE **pic, BYTE *buf, int width, int height, unsigned int outfo
xpitch = 16 * bpp;
pitch = width * bpp; // YUYV out
ypitch = 8 * pitch;
+ mb_x = 16;
+ mb_y = 8;
+ mc_y = 8;
if(outformat == V4L2_PIX_FMT_NV21){
convert = yuv422pto420sp; //choose the right conversion function
- stride = 2;
+ mc_x = 16;
+ down_sampling = 2;
}else{
convert = yuv422pto420p;
- stride = 4;
+ mc_x = 8;
+ down_sampling = 4;
}
break;
case 0x11: //444
@@ -439,11 +455,13 @@ int jpeg_decode(BYTE **pic, BYTE *buf, int width, int height, unsigned int outfo
{
mb = 1;
//convert = yuv400pto422; //choose the right conversion function
+ CAMHAL_LOGEA("Format YUV400 Not Supproted");
}
else
{
mb=3;
//convert = yuv444pto422; //choose the right conversion function
+ CAMHAL_LOGEA("Format YUV444 Not Supproted");
}
err = ERR_NOT_SUPPORTED;
goto error;
@@ -522,9 +540,9 @@ int jpeg_decode(BYTE **pic, BYTE *buf, int width, int height, unsigned int outfo
break;
} // switch enc411
- paddr.y = *pic+my*width*8+mx*16;
- paddr.v = *pic+width*height+my*width*8/stride+mx*32/stride;
- paddr.u = *pic+width*height*5/4+my*width*8/stride+mx*32/stride;
+ paddr.y = *pic + my * width * mb_y + mx * mb_x;
+ paddr.v = *pic + width * height + my * width * mc_y / down_sampling + mx * mc_x;
+ paddr.u = *pic + width * height*5/4 + my * width * mc_y / down_sampling + mx * mc_x;
convert(decdata->out,&paddr,width);
}
}
diff --git a/utils/util.cpp b/utils/util.cpp
index 652c463..069392c 100755
--- a/utils/util.cpp
+++ b/utils/util.cpp
@@ -322,6 +322,51 @@ void yuyv_to_yv12(unsigned char *src, unsigned char *dst, int width, int height)
}
}
#endif
+void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
+{
+ int stride = (width + 31) & ( ~31);
+ int w, h;
+ for (h=0; h<height; h++)
+ {
+ memcpy( dst, src, width*3);
+ dst += width*3;
+ src += stride*3;
+ }
+}
+
+void nv21_memcpy_align32(unsigned char *dst, unsigned char *src, int width, int height)
+{
+ int stride = (width + 31) & ( ~31);
+ int w, h;
+ for (h=0; h<height*3/2; h++)
+ {
+ memcpy( dst, src, width);
+ dst += width;
+ src += stride;
+ }
+}
+
+void yv12_memcpy_align32(unsigned char *dst, unsigned char *src, int width, int height)
+{
+ int new_width = (width + 63) & ( ~63);
+ int stride;
+ int w, h;
+ for (h=0; h<height; h++)
+ {
+ memcpy( dst, src, width);
+ dst += width;
+ src += new_width;
+ }
+
+ stride = ALIGN(width/2, 16);
+ for (h=0; h<height; h++)
+ {
+ memcpy( dst, src, width/2);
+ dst += stride;
+ src += new_width/2;
+ }
+}
+
void yv12_adjust_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
{
//width should be an even number.
diff --git a/utils/util.h b/utils/util.h
index a4f2fc9..f0ab08c 100755
--- a/utils/util.h
+++ b/utils/util.h
@@ -9,4 +9,7 @@ void yuyv422_to_rgb24(unsigned char *buf, unsigned char *rgb, int width, int hei
void yuyv422_to_nv21(unsigned char *bufsrc, unsigned char *bufdest, int width, int height);
void yv12_adjust_memcpy(unsigned char *dst, unsigned char *src, int width, int height);
void yuyv_to_yv12(unsigned char *src, unsigned char *dst, int width, int height);
+void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height);
+void nv21_memcpy_align32(unsigned char *dst, unsigned char *src, int width, int height);
+void yv12_memcpy_align32(unsigned char *dst, unsigned char *src, int width, int height);
#endif /* AML_CAMERA_HARDWARE_INCLUDE_*/