summaryrefslogtreecommitdiff
authorbrian.zhu <brian.zhu@amlogic.com>2013-11-12 18:24:55 (GMT)
committer brian.zhu <brian.zhu@amlogic.com>2013-11-12 18:24:55 (GMT)
commitdc002f933939e2008f09f01bb82fb67fc83df1d6 (patch)
treee65d8c78c643bff2809abc49d44fc013d35a8c80
parent1fd225699330322f35e78ea065ef7a9efc90de0b (diff)
downloadcamera-dc002f933939e2008f09f01bb82fb67fc83df1d6.zip
camera-dc002f933939e2008f09f01bb82fb67fc83df1d6.tar.gz
camera-dc002f933939e2008f09f01bb82fb67fc83df1d6.tar.bz2
enable meta data mode for video recoder. add canvas mode
Diffstat
-rwxr-xr-xAppCallbackNotifier.cpp27
-rwxr-xr-xV4LCameraAdapter/V4LCameraAdapter.cpp46
-rwxr-xr-xinc/CameraHal.h3
-rwxr-xr-xinc/V4LCameraAdapter/V4LCameraAdapter.h5
-rwxr-xr-xinc/VideoMetadata.h4
5 files changed, 60 insertions, 25 deletions
diff --git a/AppCallbackNotifier.cpp b/AppCallbackNotifier.cpp
index 3f3b959..491d696 100755
--- a/AppCallbackNotifier.cpp
+++ b/AppCallbackNotifier.cpp
@@ -1072,37 +1072,36 @@ void AppCallbackNotifier::notifyFrame()
void *y_uv[2];
mapper.lock((buffer_handle_t)vBuf, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
- structConvImage input = {frame->mWidth,
- frame->mHeight,
+ structConvImage input = {(int)frame->mWidth,
+ (int)frame->mHeight,
4096,
IC_FORMAT_YCbCr420_lp,
(mmByte *)frame->mYuv[0],
- (mmByte *)frame->mYuv[1],
- frame->mOffset};
+ (mmByte *)(frame->mYuv[0]+frame->mWidth*frame->mHeight),
+ (int)frame->mOffset};
structConvImage output = {mVideoWidth,
mVideoHeight,
4096,
IC_FORMAT_YCbCr420_lp,
(mmByte *)y_uv[0],
- (mmByte *)y_uv[1],
+ (mmByte *)((unsigned)y_uv[0]+mVideoWidth*mVideoHeight),
0};
VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0);
mapper.unlock((buffer_handle_t)vBuf);
- videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource;
- videoMetadataBuffer->handle = (void *)vBuf;
- videoMetadataBuffer->offset = 0;
+ videoMetadataBuffer->metadataBufferType = kMetadataBufferTypeCanvasSource;
+ videoMetadataBuffer->handle= (void *)vBuf;
+ videoMetadataBuffer->canvas = 0;
}
else
{
- videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource;
- videoMetadataBuffer->handle = frame->mBuffer;
- videoMetadataBuffer->offset = frame->mOffset;
+ videoMetadataBuffer->metadataBufferType = kMetadataBufferTypeCanvasSource;
+ videoMetadataBuffer->handle = (void*)frame->mBuffer;
+ videoMetadataBuffer->canvas = frame->mCanvas;
}
-
- CAMHAL_LOGVB("mDataCbTimestamp : frame->mBuffer=0x%x, videoMetadataBuffer=0x%x, videoMedatadaBufferMemory=0x%x",
- frame->mBuffer, videoMetadataBuffer, videoMedatadaBufferMemory);
+ CAMHAL_LOGVB("mDataCbTimestamp : frame->mBuffer=0x%x, videoMetadataBuffer=0x%x, videoMedatadaBufferMemory=0x%x, videoMetadataBuffer->ptr=0x%x, videoMetadataBuffer->canvas_index = 0x%x",
+ frame->mBuffer, videoMetadataBuffer, videoMedatadaBufferMemory,(unsigned)videoMetadataBuffer->handle,videoMetadataBuffer->canvas);
mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME,
videoMedatadaBufferMemory, 0, mCallbackCookie);
diff --git a/V4LCameraAdapter/V4LCameraAdapter.cpp b/V4LCameraAdapter/V4LCameraAdapter.cpp
index ff15314..2e27dcc 100755
--- a/V4LCameraAdapter/V4LCameraAdapter.cpp
+++ b/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -165,6 +165,8 @@ status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
return NO_MEMORY;
}
+ memset(mVideoInfo,0,sizeof(struct VideoInfo));
+
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
#ifdef AMLOGIC_TWO_CH_UVC
mCamEncodeIndex = -1;
@@ -214,7 +216,15 @@ status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o");
return -EINVAL;
}
+ mVideoInfo->canvas_mode = false;
+ char* str = strchr((const char *)mVideoInfo->cap.card,'.');
+ if(str){
+ if(!strncmp(str,".canvas",strlen(str))){
+ mVideoInfo->canvas_mode = true;
+ CAMHAL_LOGDB("Camera %d use canvas mode",mSensorIndex);
+ }
+ }
if (strcmp(caps->get(CameraProperties::FACING_INDEX), (const char *) android::ExCameraParameters::FACING_FRONT) == 0)
mbFrontCamera = true;
else
@@ -792,6 +802,9 @@ status_t V4LCameraAdapter::UseBuffersPreview(void* bufArr, int num)
return -1;
}
+ if(mVideoInfo->canvas_mode){
+ mVideoInfo->canvas[i] = mVideoInfo->buf.reserved;
+ }
uint32_t *ptr = (uint32_t*) bufArr;
//Associate each Camera internal buffer with the one from Overlay
CAMHAL_LOGDB("mPreviewBufs.add %#x, %d", ptr[i], i);
@@ -881,6 +894,8 @@ status_t V4LCameraAdapter::UseBuffersCapture(void* bufArr, int num)
CAMHAL_LOGEB("Unable to map buffer (%s)", strerror(errno));
return -1;
}
+ if(mVideoInfo->canvas_mode)
+ mVideoInfo->canvas[i] = mVideoInfo->buf.reserved;
uint32_t *ptr = (uint32_t*) bufArr;
mCaptureBuf = (camera_memory_t*)ptr[0];
@@ -1146,6 +1161,7 @@ status_t V4LCameraAdapter::stopPreview()
if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0){
CAMHAL_LOGEA("Unmap failed");
}
+ mVideoInfo->canvas[i] = 0;
}
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
@@ -1168,11 +1184,11 @@ status_t V4LCameraAdapter::stopPreview()
return ret;
}
-char * V4LCameraAdapter::GetFrame(int &index)
+char * V4LCameraAdapter::GetFrame(int &index, unsigned int* canvas)
{
int ret;
if(nQueued<=0){
- CAMHAL_LOGEA("GetFrame: No buff for Dequeue");
+ CAMHAL_LOGVA("GetFrame: No buff for Dequeue");
return NULL;
}
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
@@ -1202,6 +1218,10 @@ char * V4LCameraAdapter::GetFrame(int &index)
nDequeued++;
nQueued--;
index = mVideoInfo->buf.index;
+ if(mVideoInfo->canvas_mode)
+ *canvas = mVideoInfo->canvas[mVideoInfo->buf.index];
+ else
+ *canvas = 0;
return (char *)mVideoInfo->mem[mVideoInfo->buf.index];
}
@@ -1330,6 +1350,7 @@ int V4LCameraAdapter::previewThread()
int active_duration = 0;
uint8_t* ptr = NULL;
bool noFrame = true;
+ unsigned int canvas_id = 0;
if (mPreviewing){
int index = -1;
@@ -1341,14 +1362,14 @@ int V4LCameraAdapter::previewThread()
}
}
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
- delay = previewframeduration>>2;
+ delay = 5000;//previewframeduration>>2;
#else
delay = previewframeduration;
#endif
if(mSensorFormat != V4L2_PIX_FMT_MJPEG)
usleep(delay);
- char *fp = this->GetFrame(index);
+ char *fp = this->GetFrame(index, &canvas_id);
if((-1==index)||!fp){
noFrame = true;
@@ -1393,6 +1414,7 @@ int V4LCameraAdapter::previewThread()
mFirstBuff = false;
mCache.index = -1;
mCache.bufPtr == NULL;
+ mCache.canvas = 0;
ptr = (uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(index));
gettimeofday(&previewTime1, NULL);
}else{
@@ -1410,11 +1432,14 @@ int V4LCameraAdapter::previewThread()
fillThisBuffer((uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(mCache.index)), CameraFrame::PREVIEW_FRAME_SYNC);
}
mCache.index = -1;
+ mCache.canvas = 0;
}else if(mCache.index != -1){ //current catch no picture,but have a tmp buf;
fp = mCache.bufPtr;
ptr = (uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(mCache.index));
index = mCache.index;
+ canvas_id = mCache.canvas;
mCache.index = -1;
+ mCache.canvas = 0;
}else{
return 0;
}
@@ -1425,6 +1450,7 @@ int V4LCameraAdapter::previewThread()
fillThisBuffer((uint8_t*) mPreviewBufs.keyAt(mPreviewIdxs.valueFor(mCache.index)), CameraFrame::PREVIEW_FRAME_SYNC);
}
mCache.index = index;
+ mCache.canvas = canvas_id;
}
return 0;
}
@@ -1441,6 +1467,7 @@ int V4LCameraAdapter::previewThread()
mFrameInv = 0;
#endif
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
uint8_t* dest = NULL;
#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
camera_memory_t* VideoCameraBufferMemoryBase = (camera_memory_t*)ptr;
@@ -1510,9 +1537,9 @@ int V4LCameraAdapter::previewThread()
frame.mOffset = 0;
frame.mYuv[0] = 0;
frame.mYuv[1] = 0;
+ frame.mCanvas = canvas_id;
frame.mWidth = width;
frame.mHeight = height;
- frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
frame.mPixelFmt = mPixelFormat;
ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
if (ret){
@@ -1746,7 +1773,8 @@ int V4LCameraAdapter::pictureThread()
}
int index = 0;
- char *fp = this->GetFrame(index);
+ unsigned int canvas_id = 0;
+ char *fp = this->GetFrame(index,&canvas_id);
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
while((mVideoInfo->buf.length != mVideoInfo->buf.bytesused)&&(dqTryNum>0)){
if(NULL != fp){
@@ -1771,14 +1799,14 @@ int V4LCameraAdapter::pictureThread()
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
usleep( 10000 );
#endif
- fp = this->GetFrame(index);
+ fp = this->GetFrame(index,&canvas_id);
}
#endif
#ifdef AMLOGIC_CAMERA_NONBLOCK_SUPPORT
while(!fp && (-1 == index) ){
usleep( 10000 );
- fp = this->GetFrame(index);
+ fp = this->GetFrame(index,&canvas_id);
}
#else
if(!fp){
@@ -1862,6 +1890,7 @@ int V4LCameraAdapter::pictureThread()
frame.mOffset = 0;
frame.mYuv[0] = 0;
frame.mYuv[1] = 0;
+ frame.mCanvas = canvas_id;
frame.mWidth = width;
frame.mHeight = height;
frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
@@ -1886,6 +1915,7 @@ int V4LCameraAdapter::pictureThread()
if (munmap(mVideoInfo->mem[0], mVideoInfo->buf.length) < 0){
CAMHAL_LOGEA("Unmap failed");
}
+ mVideoInfo->canvas[0] = 0;
#ifdef AMLOGIC_USB_CAMERA_SUPPORT
mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
diff --git a/inc/CameraHal.h b/inc/CameraHal.h
index 5a2eceb..28320b4 100755
--- a/inc/CameraHal.h
+++ b/inc/CameraHal.h
@@ -254,6 +254,7 @@ class CameraFrame
mYuv[0] = 0;
mYuv[1] = 0;
+ mCanvas = 0;
}
//copy constructor
@@ -275,6 +276,7 @@ class CameraFrame
mYuv[0] = frame.mYuv[0];
mYuv[1] = frame.mYuv[1];
+ mCanvas = frame.mCanvas;
}
void *mCookie;
@@ -291,6 +293,7 @@ class CameraFrame
unsigned int mQuirks;
unsigned int mPixelFmt;
unsigned int mYuv[2];
+ unsigned int mCanvas;
///@todo add other member vars like stride etc
};
diff --git a/inc/V4LCameraAdapter/V4LCameraAdapter.h b/inc/V4LCameraAdapter/V4LCameraAdapter.h
index 5deecaf..ac36e9b 100755
--- a/inc/V4LCameraAdapter/V4LCameraAdapter.h
+++ b/inc/V4LCameraAdapter/V4LCameraAdapter.h
@@ -47,7 +47,9 @@ struct VideoInfo {
struct v4l2_buffer buf;
struct v4l2_requestbuffers rb;
void *mem[NB_BUFFER];
+ unsigned int canvas[NB_BUFFER];
bool isStreaming;
+ bool canvas_mode;
int width;
int height;
int formatIn;
@@ -213,6 +215,7 @@ typedef enum camera_focus_mode_e {
typedef struct cam_cache_buf{
char *bufPtr;
int index;
+ unsigned canvas;
}cache_buf_t;
typedef struct cam_LimitedRate_Item{
@@ -330,7 +333,7 @@ private:
//Used for calculation of the average frame rate during preview
status_t recalculateFPS();
- char * GetFrame(int &index);
+ char * GetFrame(int &index, unsigned int* canvas);
int previewThread();
diff --git a/inc/VideoMetadata.h b/inc/VideoMetadata.h
index a1460d6..dd4171d 100755
--- a/inc/VideoMetadata.h
+++ b/inc/VideoMetadata.h
@@ -23,9 +23,9 @@
typedef struct
{
- int metadataBufferType;
+ unsigned int metadataBufferType;
void* handle;
- int offset;
+ unsigned int canvas;
}
video_metadata_t;