summaryrefslogtreecommitdiff
authorLawrence Mok <lawrence.mok@amlogic.com>2011-11-23 22:39:05 (GMT)
committer Lawrence Mok <lawrence.mok@amlogic.com>2011-11-23 22:39:05 (GMT)
commitdd30781f1a6b43d95d34a901af8a52155335000a (patch)
treefa93de8b04e10b25d1bd0992b79d0440732e3a4d
parent02d7d1e930c2bd00f7a382fbf87cabc412543727 (diff)
downloadcamera-dd30781f1a6b43d95d34a901af8a52155335000a.zip
camera-dd30781f1a6b43d95d34a901af8a52155335000a.tar.gz
camera-dd30781f1a6b43d95d34a901af8a52155335000a.tar.bz2
Replace with new camera HAL.
Diffstat
-rw-r--r--ANativeWindowDisplayAdapter.cpp1242
-rwxr-xr-xAmlogicCameraHardware.cpp721
-rwxr-xr-xAmlogicCameraHardware.h183
-rw-r--r--[-rwxr-xr-x]Android.mk99
-rw-r--r--AppCallbackNotifier.cpp1740
-rw-r--r--BaseCameraAdapter.cpp2312
-rw-r--r--CameraHal.cpp3516
-rw-r--r--CameraHalCommon.cpp121
-rw-r--r--CameraHalUtilClasses.cpp362
-rw-r--r--CameraHal_Module.cpp682
-rw-r--r--CameraParameters.cpp193
-rw-r--r--CameraProperties.cpp122
-rw-r--r--Encoder_libjpeg.cpp462
-rw-r--r--MemoryManager.cpp227
-rw-r--r--NV12_resize.c307
-rw-r--r--SensorListener.cpp233
-rw-r--r--TICameraParameters.cpp202
-rwxr-xr-xV4L2/CameraSetting.cpp226
-rwxr-xr-xV4L2/CameraSetting.h43
-rwxr-xr-xV4L2/OpCameraHardware.c223
-rwxr-xr-xV4L2/V4L2Camera.cpp776
-rwxr-xr-xV4L2/V4L2Camera.h56
-rwxr-xr-xV4L2/amlogic_camera_para.h148
-rw-r--r--V4LCameraAdapter/V4LCameraAdapter.cpp796
-rw-r--r--inc/ANativeWindowDisplayAdapter.h189
-rw-r--r--inc/BaseCameraAdapter.h272
-rw-r--r--inc/CameraHal.h1264
-rw-r--r--inc/CameraProperties.h199
-rwxr-xr-xinc/Encoder_libjpeg.h174
-rw-r--r--inc/General3A_Settings.h280
-rw-r--r--inc/NV12_resize.h148
-rw-r--r--inc/SensorListener.h101
-rw-r--r--inc/TICameraParameters.h242
-rw-r--r--inc/V4LCameraAdapter/V4LCameraAdapter.h160
-rw-r--r--inc/VideoMetadata.h32
-rwxr-xr-xjpegenc/amljpeg_enc.c240
-rwxr-xr-xjpegenc/amljpeg_enc.h27
-rwxr-xr-xjpegenc/jconfig.h156
-rwxr-xr-xjpegenc/jmorecfg.h387
-rwxr-xr-xjpegenc/jpeglib.h1100
-rwxr-xr-xutil.cpp285
-rwxr-xr-xutil.h10
-rwxr-xr-xutils/Android.mk34
-rw-r--r--utils/DebugUtils.h36
-rw-r--r--utils/ErrorUtils.cpp142
-rw-r--r--utils/ErrorUtils.h52
-rwxr-xr-xutils/MessageQueue.cpp415
-rwxr-xr-xutils/MessageQueue.h107
-rw-r--r--utils/Semaphore.cpp232
-rw-r--r--utils/Semaphore.h59
50 files changed, 16719 insertions, 4616 deletions
diff --git a/ANativeWindowDisplayAdapter.cpp b/ANativeWindowDisplayAdapter.cpp
new file mode 100644
index 0000000..a868407
--- a/dev/null
+++ b/ANativeWindowDisplayAdapter.cpp
@@ -0,0 +1,1242 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+
+#define LOG_TAG "CameraHAL"
+
+#include "ANativeWindowDisplayAdapter.h"
+//#include <OMX_IVCommon.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+//#include <hal_public.h>
+
+#define LOG_NDEBUG 0
+#undef LOG_TAG
+#define LOG_TAG "ANativeW"
+#undef LOG_FUNCTION_NAME
+#undef LOG_FUNCTION_NAME_EXIT
+#define LOG_FUNCTION_NAME LOGV("%d: %s() ENTER", __LINE__, __FUNCTION__);
+#define LOG_FUNCTION_NAME_EXIT LOGV("%d: %s() EXIT", __LINE__, __FUNCTION__);
+
+namespace android {
+
+///Constant declarations
+///@todo Check the time units
+const int ANativeWindowDisplayAdapter::DISPLAY_TIMEOUT = 1000; // seconds
+
+//Suspends buffers after given amount of failed dq's
+const int ANativeWindowDisplayAdapter::FAILED_DQS_TO_SUSPEND = 3;
+
+
+const char* getPixFormatConstant(const char* parameters_format)
+{
+ const char* pixFormat;
+
+ if ( parameters_format != NULL )
+ {
+ if (strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ {
+ CAMHAL_LOGVA("CbYCrY format selected");
+ pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I;
+ }
+ else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
+ {
+ // TODO(XXX): We are treating YV12 the same as YUV420SP
+ CAMHAL_LOGVA("YUV420SP format selected");
+ pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+ else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ {
+ CAMHAL_LOGVA("RGB565 format selected");
+ pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_RGB565;
+ }
+ else
+ {
+ CAMHAL_LOGEA("Invalid format, CbYCrY format selected as default");
+ pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I;
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEA("Preview format is NULL, defaulting to CbYCrY");
+ pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I;
+ }
+
+ return pixFormat;
+}
+
+const size_t getBufSize(const char* parameters_format, int width, int height)
+{
+ int buf_size;
+
+ if ( parameters_format != NULL ) {
+ if (strcmp(parameters_format,
+ (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ buf_size = width * height * 2;
+ }
+ else if((strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) ||
+ (strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420P) == 0)) {
+ buf_size = width * height * 3 / 2;
+ }
+ else if(strcmp(parameters_format,
+ (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ buf_size = width * height * 2;
+ } else {
+ CAMHAL_LOGEA("Invalid format");
+ buf_size = 0;
+ }
+ } else {
+ CAMHAL_LOGEA("Preview format is NULL");
+ buf_size = 0;
+ }
+
+ return buf_size;
+}
+/*--------------------ANativeWindowDisplayAdapter Class STARTS here-----------------------------*/
+
+
+/**
+ * Display Adapter class STARTS here..
+ */
+ANativeWindowDisplayAdapter::ANativeWindowDisplayAdapter():mDisplayThread(NULL),
+ mDisplayState(ANativeWindowDisplayAdapter::DISPLAY_INIT),
+ mDisplayEnabled(false),
+ mBufferCount(0)
+
+
+
+{
+ LOG_FUNCTION_NAME;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ mShotToShot = false;
+ mStartCapture.tv_sec = 0;
+ mStartCapture.tv_usec = 0;
+ mStandbyToShot.tv_sec = 0;
+ mStandbyToShot.tv_usec = 0;
+ mMeasureStandby = false;
+#endif
+
+ mPixelFormat = NULL;
+ mBufferHandleMap = NULL;
+ mGrallocHandleMap = NULL;
+ mOffsetsMap = NULL;
+ mFrameProvider = NULL;
+ mANativeWindow = NULL;
+
+ mFrameWidth = 0;
+ mFrameHeight = 0;
+ mPreviewWidth = 0;
+ mPreviewHeight = 0;
+
+ mSuspend = false;
+ mFailedDQs = 0;
+
+ mPaused = false;
+ mXOff = 0;
+ mYOff = 0;
+ mFirstInit = false;
+
+ mFD = -1;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+ANativeWindowDisplayAdapter::~ANativeWindowDisplayAdapter()
+{
+ Semaphore sem;
+ TIUTILS::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ ///If Frame provider exists
+ if (mFrameProvider) {
+ // Unregister with the frame provider
+ mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES);
+ delete mFrameProvider;
+ mFrameProvider = NULL;
+ }
+
+ ///The ANativeWindow object will get destroyed here
+ destroy();
+
+ ///If Display thread exists
+ if(mDisplayThread.get())
+ {
+ ///Kill the display thread
+ sem.Create();
+ msg.command = DisplayThread::DISPLAY_EXIT;
+
+ // Send the semaphore to signal once the command is completed
+ msg.arg1 = &sem;
+
+ ///Post the message to display thread
+ mDisplayThread->msgQ().put(&msg);
+
+ ///Wait for the ACK - implies that the thread is now started and waiting for frames
+ sem.Wait();
+
+ // Exit and cleanup the thread
+ mDisplayThread->requestExitAndWait();
+
+ // Delete the display thread
+ mDisplayThread.clear();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+status_t ANativeWindowDisplayAdapter::initialize()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Create the display thread
+ mDisplayThread = new DisplayThread(this);
+ if ( !mDisplayThread.get() )
+ {
+ CAMHAL_LOGEA("Couldn't create display thread");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_MEMORY;
+ }
+
+ ///Start the display thread
+ status_t ret = mDisplayThread->run("DisplayThread", PRIORITY_URGENT_DISPLAY);
+ if ( ret != NO_ERROR )
+ {
+ CAMHAL_LOGEA("Couldn't run display thread");
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+int ANativeWindowDisplayAdapter::setPreviewWindow(preview_stream_ops_t* window)
+{
+ LOG_FUNCTION_NAME;
+ ///Note that Display Adapter cannot work without a valid window object
+ if ( !window)
+ {
+ CAMHAL_LOGEA("NULL window object passed to DisplayAdapter");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ ///Destroy the existing window object, if it exists
+ destroy();
+
+ ///Move to new window obj
+ mANativeWindow = window;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int ANativeWindowDisplayAdapter::setFrameProvider(FrameNotifier *frameProvider)
+{
+ LOG_FUNCTION_NAME;
+
+ // Check for NULL pointer
+ if ( !frameProvider ) {
+ CAMHAL_LOGEA("NULL passed for frame provider");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ //Release any previous frame providers
+ if ( NULL != mFrameProvider ) {
+ delete mFrameProvider;
+ }
+
+ /** Dont do anything here, Just save the pointer for use when display is
+ actually enabled or disabled
+ */
+ mFrameProvider = new FrameProvider(frameProvider, this, frameCallbackRelay);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int ANativeWindowDisplayAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == errorNotifier )
+ {
+ CAMHAL_LOGEA("Invalid Error Notifier reference");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mErrorNotifier = errorNotifier;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+status_t ANativeWindowDisplayAdapter::setSnapshotTimeRef(struct timeval *refTime)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != refTime )
+ {
+ Mutex::Autolock lock(mLock);
+ memcpy(&mStartCapture, refTime, sizeof(struct timeval));
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+#endif
+
+
+int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct timeval *refTime, S3DParameters *s3dParams)
+{
+ Semaphore sem;
+ TIUTILS::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mDisplayEnabled )
+ {
+ CAMHAL_LOGDA("Display is already enabled");
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+ }
+
+#if 0 //TODO: s3d is not part of bringup...will reenable
+ if (s3dParams)
+ mOverlay->set_s3d_params(s3dParams->mode, s3dParams->framePacking,
+ s3dParams->order, s3dParams->subSampling);
+#endif
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ if ( NULL != refTime )
+ {
+ Mutex::Autolock lock(mLock);
+ memcpy(&mStandbyToShot, refTime, sizeof(struct timeval));
+ mMeasureStandby = true;
+ }
+
+#endif
+
+ //Send START_DISPLAY COMMAND to display thread. Display thread will start and then wait for a message
+ sem.Create();
+ msg.command = DisplayThread::DISPLAY_START;
+
+ // Send the semaphore to signal once the command is completed
+ msg.arg1 = &sem;
+
+ ///Post the message to display thread
+ mDisplayThread->msgQ().put(&msg);
+
+ ///Wait for the ACK - implies that the thread is now started and waiting for frames
+ sem.Wait();
+
+ // Register with the frame provider for frames
+ mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+
+ mDisplayEnabled = true;
+ mPreviewWidth = width;
+ mPreviewHeight = height;
+
+ CAMHAL_LOGVB("mPreviewWidth = %d mPreviewHeight = %d", mPreviewWidth, mPreviewHeight);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+int ANativeWindowDisplayAdapter::disableDisplay(bool cancel_buffer)
+{
+ status_t ret = NO_ERROR;
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+
+ LOG_FUNCTION_NAME;
+
+ if(!mDisplayEnabled)
+ {
+ CAMHAL_LOGDA("Display is already disabled");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+
+ // Unregister with the frame provider here
+ mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ mFrameProvider->removeFramePointers();
+
+ if ( NULL != mDisplayThread.get() )
+ {
+ //Send STOP_DISPLAY COMMAND to display thread. Display thread will stop and dequeue all messages
+ // and then wait for message
+ Semaphore sem;
+ sem.Create();
+ TIUTILS::Message msg;
+ msg.command = DisplayThread::DISPLAY_STOP;
+
+ // Send the semaphore to signal once the command is completed
+ msg.arg1 = &sem;
+
+ ///Post the message to display thread
+ mDisplayThread->msgQ().put(&msg);
+
+ ///Wait for the ACK for display to be disabled
+
+ sem.Wait();
+
+ }
+
+ Mutex::Autolock lock(mLock);
+ {
+ ///Reset the display enabled flag
+ mDisplayEnabled = false;
+
+ ///Reset the offset values
+ mXOff = 0;
+ mYOff = 0;
+
+ ///Reset the frame width and height values
+ mFrameWidth =0;
+ mFrameHeight = 0;
+ mPreviewWidth = 0;
+ mPreviewHeight = 0;
+
+ if(cancel_buffer)
+ {
+ // Return the buffers to ANativeWindow here, the mFramesWithCameraAdapterMap is also cleared inside
+ returnBuffersToWindow();
+ }
+ else
+ {
+ mANativeWindow = NULL;
+ // Clear the frames with camera adapter map
+ mFramesWithCameraAdapterMap.clear();
+ }
+
+
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+status_t ANativeWindowDisplayAdapter::pauseDisplay(bool pause)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ {
+ Mutex::Autolock lock(mLock);
+ mPaused = pause;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+
+void ANativeWindowDisplayAdapter::destroy()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Check if the display is disabled, if not disable it
+ if ( mDisplayEnabled )
+ {
+ CAMHAL_LOGDA("WARNING: Calling destroy of Display adapter when display enabled. Disabling display..");
+ disableDisplay(false);
+ }
+
+ mBufferCount = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+// Implementation of inherited interfaces
+void* ANativeWindowDisplayAdapter::allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs)
+{
+ LOG_FUNCTION_NAME;
+ status_t err;
+ int i = -1;
+ const int lnumBufs = numBufs;
+ mBufferHandleMap = new buffer_handle_t*[lnumBufs];
+ mGrallocHandleMap = new native_handle_t*[lnumBufs]; //IMG_native_handle_t*[lnumBufs];
+ int undequeued = 0;
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ Rect bounds;
+
+
+ if ( NULL == mANativeWindow ) {
+ return NULL;
+ }
+
+ // Set gralloc usage bits for window.
+ err = mANativeWindow->set_usage(mANativeWindow, CAMHAL_GRALLOC_USAGE);
+ if (err != 0) {
+ LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ return NULL;
+ }
+
+ CAMHAL_LOGDB("Number of buffers set to ANativeWindow %d", numBufs);
+ ///Set the number of buffers needed for camera preview
+ err = mANativeWindow->set_buffer_count(mANativeWindow, numBufs);
+ if (err != 0) {
+ LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ return NULL;
+ }
+ CAMHAL_LOGDB("Configuring %d buffers for ANativeWindow", numBufs);
+ mBufferCount = numBufs;
+
+
+ // Set window geometry
+ err = mANativeWindow->set_buffers_geometry(
+ mANativeWindow,
+ width,
+ height,
+ HAL_PIXEL_FORMAT_YCrCb_420_SP); //NV21
+
+ if (err != 0) {
+ LOGE("native_window_set_buffers_geometry failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ return NULL;
+ }
+
+ ///We just return the buffers from ANativeWindow, if the width and height are same, else (vstab, vnf case)
+ ///re-allocate buffers using ANativeWindow and then get them
+ ///@todo - Re-allocate buffers for vnf and vstab using the width, height, format, numBufs etc
+ if ( mBufferHandleMap == NULL )
+ {
+ CAMHAL_LOGEA("Couldn't create array for ANativeWindow buffers");
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+ }
+
+ mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeued);
+
+ for ( i=0; i < mBufferCount; i++ )
+ {
+ native_handle_t** hndl2hndl; //IMG_native_handle_t** hndl2hndl;
+ native_handle_t* handle; //IMG_native_handle_t* handle;
+ int stride; // dummy variable to get stride
+ // TODO(XXX): Do we need to keep stride information in camera hal?
+
+ err = mANativeWindow->dequeue_buffer(mANativeWindow, (buffer_handle_t**) &hndl2hndl, &stride);
+
+ if (err != 0) {
+ CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ goto fail;
+ }
+
+ handle = *hndl2hndl;
+
+ mBufferHandleMap[i] = (buffer_handle_t*) hndl2hndl;
+ mGrallocHandleMap[i] = handle;
+ mFramesWithCameraAdapterMap.add((int) mGrallocHandleMap[i], i);
+
+ bytes = getBufSize(format, width, height);
+
+ }
+
+ // lock the initial queueable buffers
+ bounds.left = 0;
+ bounds.top = 0;
+ bounds.right = width;
+ bounds.bottom = height;
+
+ for( i = 0; i < mBufferCount-undequeued; i++ )
+ {
+ void *y_uv[2];
+
+ mANativeWindow->lock_buffer(mANativeWindow, mBufferHandleMap[i]);
+
+ mapper.lock((buffer_handle_t) mGrallocHandleMap[i], CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mFrameProvider->addFramePointers(mGrallocHandleMap[i] , y_uv);
+ }
+
+ // return the rest of the buffers back to ANativeWindow
+ for(i = (mBufferCount-undequeued); i >= 0 && i < mBufferCount; i++)
+ {
+ err = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[i]);
+ if (err != 0) {
+ CAMHAL_LOGEB("cancel_buffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ goto fail;
+ }
+ mFramesWithCameraAdapterMap.removeItem((int) mGrallocHandleMap[i]);
+ //LOCK UNLOCK TO GET YUV POINTERS
+ void *y_uv[2];
+ mapper.lock((buffer_handle_t) mGrallocHandleMap[i], CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+ mFrameProvider->addFramePointers(mGrallocHandleMap[i] , y_uv);
+ mapper.unlock((buffer_handle_t) mGrallocHandleMap[i]);
+ }
+
+ mFirstInit = true;
+ mPixelFormat = getPixFormatConstant(format);
+ mFrameWidth = width;
+ mFrameHeight = height;
+
+ return mGrallocHandleMap;
+
+ fail:
+ // need to cancel buffers if any were dequeued
+ for (int start = 0; start < i && i > 0; start++) {
+ int err = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[start]);
+ if (err != 0) {
+ CAMHAL_LOGEB("cancelBuffer failed w/ error 0x%08x", err);
+ break;
+ }
+ mFramesWithCameraAdapterMap.removeItem((int) mGrallocHandleMap[start]);
+ }
+
+ freeBuffer(mGrallocHandleMap);
+
+ CAMHAL_LOGEA("Error occurred, performing cleanup");
+
+ if ( NULL != mErrorNotifier.get() )
+ {
+ mErrorNotifier->errorNotify(-ENOMEM);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+
+}
+
+uint32_t * ANativeWindowDisplayAdapter::getOffsets()
+{
+ const int lnumBufs = mBufferCount;
+
+ LOG_FUNCTION_NAME;
+
+ // TODO(XXX): Need to remove getOffsets from the API. No longer needed
+
+ if ( NULL == mANativeWindow )
+ {
+ CAMHAL_LOGEA("mANativeWindow reference is missing");
+ goto fail;
+ }
+
+ if( mBufferHandleMap == NULL)
+ {
+ CAMHAL_LOGEA("Buffers not allocated yet!!");
+ goto fail;
+ }
+
+ if(mOffsetsMap == NULL)
+ {
+ mOffsetsMap = new uint32_t[lnumBufs];
+ for(int i = 0; i < mBufferCount; i++)
+ {
+ //IMG_native_handle_t* handle = (IMG_native_handle_t*) *(mBufferHandleMap[i]);
+ native_handle_t* handle = (native_handle_t*) *(mBufferHandleMap[i]);
+ mOffsetsMap[i] = 0;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mOffsetsMap;
+
+ fail:
+
+ if ( NULL != mOffsetsMap )
+ {
+ delete [] mOffsetsMap;
+ mOffsetsMap = NULL;
+ }
+
+ if ( NULL != mErrorNotifier.get() )
+ {
+ mErrorNotifier->errorNotify(-ENOSYS);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NULL;
+}
+
+int ANativeWindowDisplayAdapter::maxQueueableBuffers(unsigned int& queueable)
+{
+ LOG_FUNCTION_NAME;
+ int ret = NO_ERROR;
+ int undequeued = 0;
+
+ if(mBufferCount == 0)
+ {
+ ret = -ENOSYS;
+ goto end;
+ }
+
+ if(!mANativeWindow)
+ {
+ ret = -ENOSYS;
+ goto end;
+ }
+
+ ret = mANativeWindow->get_min_undequeued_buffer_count(mANativeWindow, &undequeued);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("get_min_undequeued_buffer_count failed: %s (%d)", strerror(-ret), -ret);
+
+ if ( ENODEV == ret ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ return -ret;
+ }
+
+ queueable = mBufferCount - undequeued;
+
+ end:
+ return ret;
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+int ANativeWindowDisplayAdapter::getFd()
+{
+ LOG_FUNCTION_NAME;
+
+ if(mFD == -1)
+ {
+ //IMG_native_handle_t* handle = (IMG_native_handle_t*) *(mBufferHandleMap[0]);
+ native_handle_t* handle = (native_handle_t*) *(mBufferHandleMap[0]);
+ // TODO: should we dup the fd? not really necessary and another thing for ANativeWindow
+ // to manage and close...
+ //mFD = dup(handle->fd[0]);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mFD;
+
+}
+
+status_t ANativeWindowDisplayAdapter::returnBuffersToWindow()
+{
+ status_t ret = NO_ERROR;
+
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ //Give the buffers back to display here - sort of free it
+ if (mANativeWindow)
+ for(unsigned int i = 0; i < mFramesWithCameraAdapterMap.size(); i++) {
+ int value = mFramesWithCameraAdapterMap.valueAt(i);
+
+ // unlock buffer before giving it up
+ mapper.unlock((buffer_handle_t) mGrallocHandleMap[value]);
+
+ ret = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[value]);
+ if ( ENODEV == ret ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ return -ret;
+ } else if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEB("cancel_buffer() failed: %s (%d)",
+ strerror(-ret),
+ -ret);
+ return -ret;
+ }
+ }
+ else
+ LOGE("mANativeWindow is NULL");
+
+ ///Clear the frames with camera adapter map
+ mFramesWithCameraAdapterMap.clear();
+
+ return ret;
+
+}
+
+int ANativeWindowDisplayAdapter::freeBuffer(void* buf)
+{
+ LOG_FUNCTION_NAME;
+
+ int *buffers = (int *) buf;
+ status_t ret = NO_ERROR;
+
+ Mutex::Autolock lock(mLock);
+
+ if((int *)mGrallocHandleMap != buffers)
+ {
+ CAMHAL_LOGEA("CameraHal passed wrong set of buffers to free!!!");
+ if (mGrallocHandleMap != NULL)
+ delete []mGrallocHandleMap;
+ mGrallocHandleMap = NULL;
+ }
+
+
+ returnBuffersToWindow();
+
+ if ( NULL != buf )
+ {
+ delete [] buffers;
+ mGrallocHandleMap = NULL;
+ }
+
+ if( mBufferHandleMap != NULL)
+ {
+ delete [] mBufferHandleMap;
+ mBufferHandleMap = NULL;
+ }
+
+ if ( NULL != mOffsetsMap )
+ {
+ delete [] mOffsetsMap;
+ mOffsetsMap = NULL;
+ }
+
+ if( mFD != -1)
+ {
+ close(mFD); // close duped handle
+ mFD = -1;
+ }
+
+ return NO_ERROR;
+}
+
+
+bool ANativeWindowDisplayAdapter::supportsExternalBuffering()
+{
+ return false;
+}
+
+int ANativeWindowDisplayAdapter::useBuffers(void *bufArr, int num)
+{
+ return NO_ERROR;
+}
+
+void ANativeWindowDisplayAdapter::displayThread()
+{
+ bool shouldLive = true;
+ int timeout = 0;
+ status_t ret;
+
+ LOG_FUNCTION_NAME;
+
+ while(shouldLive)
+ {
+ ret = TIUTILS::MessageQueue::waitForMsg(&mDisplayThread->msgQ()
+ , &mDisplayQ
+ , NULL
+ , ANativeWindowDisplayAdapter::DISPLAY_TIMEOUT);
+
+ if ( !mDisplayThread->msgQ().isEmpty() )
+ {
+ ///Received a message from CameraHal, process it
+ shouldLive = processHalMsg();
+
+ }
+ else if( !mDisplayQ.isEmpty())
+ {
+ if ( mDisplayState== ANativeWindowDisplayAdapter::DISPLAY_INIT )
+ {
+
+ ///If display adapter is not started, continue
+ continue;
+
+ }
+ else
+ {
+ TIUTILS::Message msg;
+ ///Get the dummy msg from the displayQ
+ if(mDisplayQ.get(&msg)!=NO_ERROR)
+ {
+ CAMHAL_LOGEA("Error in getting message from display Q");
+ continue;
+ }
+
+ // There is a frame from ANativeWindow for us to dequeue
+ // We dequeue and return the frame back to Camera adapter
+ if(mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_STARTED)
+ {
+ handleFrameReturn();
+ }
+
+ if (mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_EXITED)
+ {
+ ///we exit the thread even though there are frames still to dequeue. They will be dequeued
+ ///in disableDisplay
+ shouldLive = false;
+ }
+ }
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+bool ANativeWindowDisplayAdapter::processHalMsg()
+{
+ TIUTILS::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+
+ mDisplayThread->msgQ().get(&msg);
+ bool ret = true, invalidCommand = false;
+
+ switch ( msg.command )
+ {
+
+ case DisplayThread::DISPLAY_START:
+
+ CAMHAL_LOGDA("Display thread received DISPLAY_START command from Camera HAL");
+ mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_STARTED;
+
+ break;
+
+ case DisplayThread::DISPLAY_STOP:
+
+ ///@bug There is no API to disable SF without destroying it
+ ///@bug Buffers might still be w/ display and will get displayed
+ ///@remarks Ideal seqyence should be something like this
+ ///mOverlay->setParameter("enabled", false);
+ CAMHAL_LOGDA("Display thread received DISPLAY_STOP command from Camera HAL");
+ mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_STOPPED;
+
+ break;
+
+ case DisplayThread::DISPLAY_EXIT:
+
+ CAMHAL_LOGDA("Display thread received DISPLAY_EXIT command from Camera HAL.");
+ CAMHAL_LOGDA("Stopping display thread...");
+ mDisplayState = ANativeWindowDisplayAdapter::DISPLAY_EXITED;
+ ///Note that the SF can have pending buffers when we disable the display
+ ///This is normal and the expectation is that they may not be displayed.
+ ///This is to ensure that the user experience is not impacted
+ ret = false;
+ break;
+
+ default:
+
+ CAMHAL_LOGEB("Invalid Display Thread Command 0x%x.", msg.command);
+ invalidCommand = true;
+
+ break;
+ }
+
+ ///Signal the semaphore if it is sent as part of the message
+ if ( ( msg.arg1 ) && ( !invalidCommand ) )
+ {
+
+ CAMHAL_LOGDA("+Signalling display semaphore");
+ Semaphore &sem = *((Semaphore*)msg.arg1);
+
+ sem.Signal();
+
+ CAMHAL_LOGDA("-Signalling display semaphore");
+ }
+
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+
+status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::DisplayFrame &dispFrame)
+{
+ status_t ret = NO_ERROR;
+ uint32_t actualFramesWithDisplay = 0;
+ android_native_buffer_t *buffer = NULL;
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ int i;
+
+ LOG_FUNCTION_NAME;
+ ///@todo Do cropping based on the stabilized frame coordinates
+ ///@todo Insert logic to drop frames here based on refresh rate of
+ ///display or rendering rate whichever is lower
+ ///Queue the buffer to overlay
+
+ if (!mGrallocHandleMap || !dispFrame.mBuffer) {
+ CAMHAL_LOGEA("NULL sent to PostFrame");
+ return -EINVAL;
+ }
+
+ for ( i = 0; i < mBufferCount; i++ )
+ {
+ if ( ((int) dispFrame.mBuffer ) == (int)mGrallocHandleMap[i] )
+ {
+ break;
+ }
+ }
+
+ if ( mDisplayState == ANativeWindowDisplayAdapter::DISPLAY_STARTED &&
+ (!mPaused || CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType) &&
+ !mSuspend)
+ {
+ Mutex::Autolock lock(mLock);
+ uint32_t xOff = (dispFrame.mOffset% PAGE_SIZE);
+ uint32_t yOff = (dispFrame.mOffset / PAGE_SIZE);
+
+ // Set crop only if current x and y offsets do not match with frame offsets
+ if((mXOff!=xOff) || (mYOff!=yOff))
+ {
+ CAMHAL_LOGDB("Offset %d xOff = %d, yOff = %d", dispFrame.mOffset, xOff, yOff);
+ uint8_t bytesPerPixel;
+ ///Calculate bytes per pixel based on the pixel format
+ if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ {
+ bytesPerPixel = 2;
+ }
+ else if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ {
+ bytesPerPixel = 2;
+ }
+ else if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
+ {
+ bytesPerPixel = 1;
+ }
+ else
+ {
+ bytesPerPixel = 1;
+ }
+
+ CAMHAL_LOGVB(" crop.left = %d crop.top = %d crop.right = %d crop.bottom = %d",
+ xOff/bytesPerPixel, yOff , (xOff/bytesPerPixel)+mPreviewWidth, yOff+mPreviewHeight);
+ // We'll ignore any errors here, if the surface is
+ // already invalid, we'll know soon enough.
+ mANativeWindow->set_crop(mANativeWindow, xOff/bytesPerPixel, yOff,
+ (xOff/bytesPerPixel)+mPreviewWidth, yOff+mPreviewHeight);
+
+ ///Update the current x and y offsets
+ mXOff = xOff;
+ mYOff = yOff;
+ }
+
+ // unlock buffer before sending to display
+ mapper.unlock((buffer_handle_t) mGrallocHandleMap[i]);
+ ret = mANativeWindow->enqueue_buffer(mANativeWindow, mBufferHandleMap[i]);
+ if (ret != 0) {
+ LOGE("Surface::queueBuffer returned error %d", ret);
+ }
+
+ mFramesWithCameraAdapterMap.removeItem((int) dispFrame.mBuffer);
+
+
+ // HWComposer has not minimum buffer requirement. We should be able to dequeue
+ // the buffer immediately
+ TIUTILS::Message msg;
+ mDisplayQ.put(&msg);
+
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ if ( mMeasureStandby )
+ {
+ CameraHal::PPM("Standby to first shot: Sensor Change completed - ", &mStandbyToShot);
+ mMeasureStandby = false;
+ }
+ else if (CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType)
+ {
+ CameraHal::PPM("Shot to snapshot: ", &mStartCapture);
+ mShotToShot = true;
+ }
+ else if ( mShotToShot )
+ {
+ CameraHal::PPM("Shot to shot: ", &mStartCapture);
+ mShotToShot = false;
+ }
+#endif
+
+ }
+ else
+ {
+ Mutex::Autolock lock(mLock);
+
+ // unlock buffer before giving it up
+ mapper.unlock((buffer_handle_t) mGrallocHandleMap[i]);
+
+ // cancel buffer and dequeue another one
+ ret = mANativeWindow->cancel_buffer(mANativeWindow, mBufferHandleMap[i]);
+ if (ret != 0) {
+ LOGE("Surface::queueBuffer returned error %d", ret);
+ }
+
+ mFramesWithCameraAdapterMap.removeItem((int) dispFrame.mBuffer);
+
+ TIUTILS::Message msg;
+ mDisplayQ.put(&msg);
+ ret = NO_ERROR;
+ }
+
+ return ret;
+}
+
+
+bool ANativeWindowDisplayAdapter::handleFrameReturn()
+{
+ status_t err;
+ buffer_handle_t* buf;
+ int i = 0;
+ int stride; // dummy variable to get stride
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ Rect bounds;
+ void *y_uv[2];
+
+ // TODO(XXX): Do we need to keep stride information in camera hal?
+
+ LOG_FUNCTION_NAME;
+ if ( NULL == mANativeWindow ) {
+ return false;
+ }
+
+ err = mANativeWindow->dequeue_buffer(mANativeWindow, &buf, &stride);
+ if (err != 0) {
+ CAMHAL_LOGEB("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ return false;
+ }
+
+ err = mANativeWindow->lock_buffer(mANativeWindow, buf);
+ if (err != 0) {
+ CAMHAL_LOGEB("lockbuffer failed: %s (%d)", strerror(-err), -err);
+
+ if ( ENODEV == err ) {
+ CAMHAL_LOGEA("Preview surface abandoned!");
+ mANativeWindow = NULL;
+ }
+
+ return false;
+ }
+
+ for(i = 0; i < mBufferCount; i++)
+ {
+ if (mBufferHandleMap[i] == buf)
+ break;
+ }
+
+ // lock buffer before sending to FrameProvider for filling
+ bounds.left = 0;
+ bounds.top = 0;
+ bounds.right = mFrameWidth;
+ bounds.bottom = mFrameHeight;
+
+ int lock_try_count = 0;
+ while (mapper.lock((buffer_handle_t) mGrallocHandleMap[i], CAMHAL_GRALLOC_USAGE, bounds, y_uv) < 0){
+ if (++lock_try_count > LOCK_BUFFER_TRIES){
+ if ( NULL != mErrorNotifier.get() ){
+ mErrorNotifier->errorNotify(CAMERA_ERROR_UNKNOWN);
+ }
+ return false;
+ }
+ CAMHAL_LOGEA("Gralloc Lock FrameReturn Error: Sleeping 15ms");
+ usleep(15000);
+ }
+
+ mFramesWithCameraAdapterMap.add((int) mGrallocHandleMap[i], i);
+
+ CAMHAL_LOGVB("handleFrameReturn: found graphic buffer %d of %d", i, mBufferCount-1);
+ mFrameProvider->returnFrame( (void*)mGrallocHandleMap[i], CameraFrame::PREVIEW_FRAME_SYNC);
+ return true;
+}
+
+void ANativeWindowDisplayAdapter::frameCallbackRelay(CameraFrame* caFrame)
+{
+
+ if ( NULL != caFrame )
+ {
+ if ( NULL != caFrame->mCookie )
+ {
+ ANativeWindowDisplayAdapter *da = (ANativeWindowDisplayAdapter*) caFrame->mCookie;
+ da->frameCallback(caFrame);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid Cookie in Camera Frame = %p, Cookie = %p", caFrame, caFrame->mCookie);
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid Camera Frame = %p", caFrame);
+ }
+
+}
+
+void ANativeWindowDisplayAdapter::frameCallback(CameraFrame* caFrame)
+{
+ ///Call queueBuffer of overlay in the context of the callback thread
+ DisplayFrame df;
+ df.mBuffer = caFrame->mBuffer;
+ df.mType = (CameraFrame::FrameType) caFrame->mFrameType;
+ df.mOffset = caFrame->mOffset;
+ df.mWidthStride = caFrame->mAlignment;
+ df.mLength = caFrame->mLength;
+ df.mWidth = caFrame->mWidth;
+ df.mHeight = caFrame->mHeight;
+ PostFrame(df);
+}
+
+
+/*--------------------ANativeWindowDisplayAdapter Class ENDS here-----------------------------*/
+
+};
+
diff --git a/AmlogicCameraHardware.cpp b/AmlogicCameraHardware.cpp
deleted file mode 100755
index 1ac698d..0000000
--- a/AmlogicCameraHardware.cpp
+++ b/dev/null
@@ -1,721 +0,0 @@
-/*
-**
-** Copyright 2008, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-#define LOG_NDEBUG 0
-#define NDEBUG 0
-
-#define LOG_TAG "AmlogicCameraHardware"
-
-#include <utils/Log.h>
-
-#include "AmlogicCameraHardware.h"
-#include <utils/threads.h>
-#include <cutils/properties.h>
-#include <camera/CameraHardwareInterface.h>
-
-#include <stdio.h>
-#include <unistd.h>
-#include <stdlib.h>
-#include <fcntl.h>
-#include <linux/fb.h>
-
-#ifdef AMLOGIC_FLASHLIGHT_SUPPORT
-extern "C" {
-int set_flash(bool mode);
-}
-#endif
-
-#include <util.h>
-namespace android
-{
-
-//for OverLay
-//============================================
-#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
-#ifndef FBIOPUT_OSD_SRCCOLORKEY
-#define FBIOPUT_OSD_SRCCOLORKEY 0x46fb
-#endif
-#ifndef FBIOPUT_OSD_SRCKEY_ENABLE
-#define FBIOPUT_OSD_SRCKEY_ENABLE 0x46fa
-#endif
-#ifndef FBIOPUT_OSD_SET_GBL_ALPHA
-#define FBIOPUT_OSD_SET_GBL_ALPHA 0x4500
-#endif
-
-int SYS_enable_colorkey(short key_rgb565)
-{
- int ret = -1;
- int fd_fb0 = open("/dev/graphics/fb0", O_RDWR);
- if (fd_fb0 >= 0)
- {
- uint32_t myKeyColor = key_rgb565;
- uint32_t myKeyColor_en = 1;
- printf("enablecolorkey color=%#x\n", myKeyColor);
- ret = ioctl(fd_fb0, FBIOPUT_OSD_SRCCOLORKEY, &myKeyColor);
- ret += ioctl(fd_fb0, FBIOPUT_OSD_SRCKEY_ENABLE, &myKeyColor_en);
- close(fd_fb0);
- }
- return ret;
-}
-
-int SYS_disable_colorkey()
-{
- int ret = -1;
- int fd_fb0 = open("/dev/graphics/fb0", O_RDWR);
- if (fd_fb0 >= 0)
- {
- uint32_t myKeyColor_en = 0;
- ret = ioctl(fd_fb0, FBIOPUT_OSD_SRCKEY_ENABLE, &myKeyColor_en);
- close(fd_fb0);
- }
- return ret;
-}
-
-static void write_sys_int(const char *path, int val)
-{
- char cmd[16];
- int fd = open(path, O_RDWR);
-
- if(fd >= 0) {
- sprintf(cmd, "%d", val);
- write(fd, cmd, strlen(cmd));
- close(fd);
- }
-}
-
-static void write_sys_string(const char *path, const char *s)
-{
- int fd = open(path, O_RDWR);
-
- if(fd >= 0) {
- write(fd, s, strlen(s));
- close(fd);
- }
-}
-
-#define DISABLE_VIDEO "/sys/class/video/disable_video"
-#define ENABLE_AVSYNC "/sys/class/tsync/enable"
-#define ENABLE_BLACKOUT "/sys/class/video/blackout_policy"
-#define TSYNC_EVENT "/sys/class/tsync/event"
-#define VIDEO_ZOOM "/sys/class/video/zoom"
-
-static int SYS_enable_nextvideo()
-{
- write_sys_int(DISABLE_VIDEO, 2);
- return 0;
-}
-
-static int SYS_close_video()
-{
- write_sys_int(DISABLE_VIDEO, 1);
- return 0;
-}
-
-static int SYS_open_video()
-{
- write_sys_int(DISABLE_VIDEO, 0);
- return 0;
-}
-
-static int SYS_disable_avsync()
-{
- write_sys_int(ENABLE_AVSYNC, 0);
- return 0;
-}
-
-static int SYS_disable_video_pause()
-{
- write_sys_string(TSYNC_EVENT, "VIDEO_PAUSE:0x0");
- return 0;
-}
-
-static int SYS_set_zoom(int zoom)
-{
- write_sys_int(VIDEO_ZOOM, zoom);
- return 0;
-}
-
-static int SYS_reset_zoom(void)
-{
- write_sys_int(VIDEO_ZOOM, 100);
- return 0;
-}
-#else
-static int SYS_enable_nextvideo()
-{
- return 0;
-}
-
-static int SYS_close_video()
-{
- return 0;
-}
-
-static int SYS_open_video()
-{
- return 0;
-}
-
-static int SYS_disable_avsync()
-{
- return 0;
-}
-
-static int SYS_disable_video_pause()
-{
- return 0;
-}
-
-static int SYS_set_zoom(int zoom)
-{
- return 0;
-}
-
-static int SYS_reset_zoom(void)
-{
- return 0;
-}
-#endif
-
-
-
-extern CameraInterface* HAL_GetCameraInterface(int Id);
-
-AmlogicCameraHardware::AmlogicCameraHardware(int camid)
- : mParameters(),
- mHeap(0),
- mPreviewHeap(0),
- mRawHeap(0),
- mPreviewFrameSize(0),
- mHeapSize(0),
- mNotifyCb(0),
- mDataCb(0),
- mDataCbTimestamp(0),
- mCallbackCookie(0),
- mMsgEnabled(0),
- mCurrentPreviewFrame(0),
- mRecordEnable(0),
- mState(0)
-{
- LOGD("Current camera is %d",camid);
-#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
- SYS_disable_avsync();
- SYS_disable_video_pause();
- SYS_enable_nextvideo();
-#else
- mRecordHeap = NULL;
-#endif
- mCamera = HAL_GetCameraInterface(camid);
- mCamera->Open();
- initDefaultParameters();
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- mCamera->Close();
-#endif
-}
-
-AmlogicCameraHardware::~AmlogicCameraHardware()
-{
- mCamera->Close();
- delete mCamera;
- mCamera = NULL;
-#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
- SYS_enable_nextvideo();
- SYS_reset_zoom();
- SYS_disable_colorkey();
-#endif
- LOGV("~AmlogicCameraHardware ");
-}
-
-void AmlogicCameraHardware::initDefaultParameters()
-{ //call the camera to return the parameter
- CameraParameters pParameters;
- mCamera->InitParameters(pParameters);
- setParameters(pParameters);
-}
-
-
-void AmlogicCameraHardware::initHeapLocked()
-{
- // Create raw heap.
- int picture_width, picture_height;
- mParameters.getPictureSize(&picture_width, &picture_height);
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- mRawHeap = new MemoryHeapBase(picture_width * 2 * picture_height);
-#else
- mRawHeap = new MemoryHeapBase(picture_width * 3 * picture_height);
-#endif
-
- int preview_width, preview_height;
- mParameters.getPreviewSize(&preview_width, &preview_height);
- // LOGD("initHeapLocked: preview size=%dx%d", preview_width, preview_height);
-
- // Note that we enforce yuv422 in setParameters().
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- int how_big = preview_width * preview_height * 2;
-#else
- int how_big = preview_width * preview_height * 3 / 2;
-#endif
-
- // If we are being reinitialized to the same size as before, no
- // work needs to be done.
- if (how_big == mHeapSize)
- return;
- mHeapSize = how_big;
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- mPreviewFrameSize = how_big;
-#endif
-
- // Make a new mmap'ed heap that can be shared across processes.
- // use code below to test with pmem
- mHeap = new MemoryHeapBase(mHeapSize * kBufferCount);
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- mPreviewHeap = new MemoryHeapBase(mPreviewFrameSize * kBufferCount);
-#endif
- // Make an IMemory for each frame so that we can reuse them in callbacks.
- for (int i = 0; i < kBufferCount; i++) {
- mBuffers[i] = new MemoryBase(mHeap, i * mHeapSize, mHeapSize);
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- mPreviewBuffers[i] = new MemoryBase(mPreviewHeap, i * mPreviewFrameSize, mPreviewFrameSize);
-#endif
- }
- #ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
- mRecordBufCount = kBufferCount;
-
- #else
- #ifdef AMLOGIC_USB_CAMERA_SUPPORT
- int RecordFrameSize = mHeapSize*3/4;
- #else
- int RecordFrameSize = mHeapSize;
- #endif
- mRecordHeap = new MemoryHeapBase(RecordFrameSize * kBufferCount);
- // Make an IMemory for each frame so that we can reuse them in callbacks.
- for (int i = 0; i < kBufferCount; i++) {
- mRecordBuffers[i] = new MemoryBase(mRecordHeap, i * RecordFrameSize, RecordFrameSize);
- }
- #endif
-}
-
-
-sp<IMemoryHeap> AmlogicCameraHardware::getPreviewHeap() const
-{
- //LOGV("getPreviewHeap");
- return mPreviewHeap;
-}
-
-sp<IMemoryHeap> AmlogicCameraHardware::getRawHeap() const
-{
- //LOGV("getRawHeap");
- return mRawHeap;
-}
-
-status_t AmlogicCameraHardware::setPreviewWindow(const sp<ANativeWindow>& buf) {
- return NO_ERROR;
-}
-
-void AmlogicCameraHardware::setCallbacks(notify_callback notify_cb,
- data_callback data_cb,
- data_callback_timestamp data_cb_timestamp,
- void* user)
-{
- Mutex::Autolock lock(mLock);
- mNotifyCb = notify_cb;
- mDataCb = data_cb;
- mDataCbTimestamp = data_cb_timestamp;
- mCallbackCookie = user;
-}
-
-void AmlogicCameraHardware::enableMsgType(int32_t msgType)
-{
- LOGD("Enable msgtype %d",msgType);
- Mutex::Autolock lock(mLock);
- mMsgEnabled |= msgType;
-}
-
-void AmlogicCameraHardware::disableMsgType(int32_t msgType)
-{
- Mutex::Autolock lock(mLock);
- mMsgEnabled &= ~msgType;
-}
-
-bool AmlogicCameraHardware::msgTypeEnabled(int32_t msgType)
-{
- Mutex::Autolock lock(mLock);
- return (mMsgEnabled & msgType);
-}
-
-#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
-status_t AmlogicCameraHardware::setOverlay(const sp<Overlay> &overlay)
-{
- LOGD("AMLOGIC CAMERA setOverlay");
-
- if (overlay != NULL) {
- SYS_enable_colorkey(0);
- }
-
- return NO_ERROR;
-}
-#endif
-
-// ---------------------------------------------------------------------------
-
-int AmlogicCameraHardware::previewThread()
-{
- mLock.lock();
- // the attributes below can change under our feet...
- int previewFrameRate = mParameters.getPreviewFrameRate();
- // Find the offset within the heap of the current buffer.
- ssize_t offset = mCurrentPreviewFrame * mHeapSize;
- sp<MemoryHeapBase> heap = mHeap;
- sp<MemoryBase> buffer = mBuffers[mCurrentPreviewFrame];
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- sp<MemoryHeapBase> previewheap = mPreviewHeap;
- sp<MemoryBase> previewbuffer = mPreviewBuffers[mCurrentPreviewFrame];
-#endif
- mLock.unlock();
- if (buffer != 0) {
- int width,height;
- mParameters.getPreviewSize(&width, &height);
-#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
- int delay = (int)(1000000.0f / float(previewFrameRate)) >> 1;
- if (mRecordBufCount <= 0) {
- LOGD("Recording buffer underflow");
- usleep(delay);
- return NO_ERROR;
- }
-#endif
- //get preview frames data
- void *base = heap->getBase();
- uint8_t *frame = ((uint8_t *)base) + offset;
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- uint8_t *previewframe = ((uint8_t *)previewheap->getBase()) + offset;
-#endif
- //get preview frame
- {
- if(mCamera->GetPreviewFrame(frame) != NO_ERROR)//special case for first preview frame
- return NO_ERROR;
- if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)
- {
- //LOGD("Return preview frame");
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- yuyv422_to_rgb16((unsigned char *)frame,(unsigned char *)previewframe,
- width, height);
- mDataCb(CAMERA_MSG_PREVIEW_FRAME, previewbuffer, mCallbackCookie);
-#else
- mDataCb(CAMERA_MSG_PREVIEW_FRAME, buffer, mCallbackCookie);
-#endif
- }
- }
-
- //get Record frames data
- if(mMsgEnabled & CAMERA_MSG_VIDEO_FRAME)
- {
-#ifndef AMLOGIC_CAMERA_OVERLAY_SUPPORT
- sp<MemoryHeapBase> reocrdheap = mRecordHeap;
- sp<MemoryBase> recordbuffer = mRecordBuffers[mCurrentPreviewFrame];
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- ssize_t recordoffset = offset*3/4;
- uint8_t *recordframe = ((uint8_t *)reocrdheap->getBase()) + recordoffset;
- yuyv422_to_nv21((unsigned char *)frame,(unsigned char *)recordframe,width,height);
-#else
- ssize_t recordoffset = offset;
- uint8_t *recordframe = ((uint8_t *)reocrdheap->getBase()) + recordoffset;
- convert_rgb16_to_nv21(frame,recordframe,width,height);
-#endif
- mDataCbTimestamp(systemTime(),CAMERA_MSG_VIDEO_FRAME, recordbuffer, mCallbackCookie);
-#else
- android_atomic_dec(&mRecordBufCount);
- //when use overlay, the preview format is the same as record
- mDataCbTimestamp(systemTime(),CAMERA_MSG_VIDEO_FRAME, buffer, mCallbackCookie);
-#endif
- }
-
- mCurrentPreviewFrame = (mCurrentPreviewFrame + 1) % kBufferCount;
- }
- return NO_ERROR;
-}
-
-status_t AmlogicCameraHardware::startPreview()
-{
- LOGD("AMLOGIC CAMERA startPreview");
- Mutex::Autolock lock(mLock);
- if (mPreviewThread != 0) {
- // already running
- return INVALID_OPERATION;
- }
- mCamera->StartPreview();
- mPreviewThread = new PreviewThread(this);
- return NO_ERROR;
-}
-
-void AmlogicCameraHardware::stopPreview()
-{
- LOGV("AMLOGIC CAMERA stopPreview");
- sp<PreviewThread> previewThread;
-
- { // scope for the lock
- Mutex::Autolock lock(mLock);
- previewThread = mPreviewThread;
- }
-
- // don't hold the lock while waiting for the thread to quit
- if (previewThread != 0) {
- previewThread->requestExitAndWait();
- }
- mCamera->StopPreview();
-
- Mutex::Autolock lock(mLock);
- mPreviewThread.clear();
-}
-
-bool AmlogicCameraHardware::previewEnabled() {
- return mPreviewThread.get() != 0;
-}
-
-status_t AmlogicCameraHardware::startRecording()
-{
- LOGE("AmlogicCameraHardware::startRecording()");
- mCamera->StartRecord();
- mRecordEnable = true;
- return NO_ERROR;
-}
-
-void AmlogicCameraHardware::stopRecording()
-{
- mCamera->StopRecord();
- mRecordEnable = false;
-}
-
-bool AmlogicCameraHardware::recordingEnabled()
-{
- return mRecordEnable;
-}
-
-void AmlogicCameraHardware::releaseRecordingFrame(const sp<IMemory>& mem)
-{
-#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
- android_atomic_inc(&mRecordBufCount);
-#endif
-// LOGD("AmlogicCameraHardware::releaseRecordingFrame, %d", mRecordBufCount);
-}
-
-// ---------------------------------------------------------------------------
-
-int AmlogicCameraHardware::beginAutoFocusThread(void *cookie)
-{
- AmlogicCameraHardware *c = (AmlogicCameraHardware *)cookie;
- //should add wait focus end
- return c->autoFocusThread();
-}
-
-int AmlogicCameraHardware::autoFocusThread()
-{
- mCamera->StartFocus();
- if (mMsgEnabled & CAMERA_MSG_FOCUS)
- {
- LOGD("return focus end msg");
- mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie);
- }
- mStateLock.lock();
- mState &= ~PROCESS_FOCUS;
- mStateLock.unlock();
- return NO_ERROR;
-}
-
-status_t AmlogicCameraHardware::autoFocus()
-{
- status_t ret = NO_ERROR;
- Mutex::Autolock lock(mLock);
- if(mStateLock.tryLock() == 0)
- {
- if((mState&PROCESS_FOCUS) == 0)
- {
- if (createThread(beginAutoFocusThread, this) == false)
- {
- ret = UNKNOWN_ERROR;
- }
- else
- mState |= PROCESS_FOCUS;
- }
- mStateLock.unlock();
- }
-
- return ret;
-}
-
-status_t AmlogicCameraHardware::cancelAutoFocus()
-{
- Mutex::Autolock lock(mLock);
- return mCamera->StopFocus();
-}
-
-/*static*/ int AmlogicCameraHardware::beginPictureThread(void *cookie)
-{
- AmlogicCameraHardware *c = (AmlogicCameraHardware *)cookie;
- return c->pictureThread();
-}
-
-int AmlogicCameraHardware::pictureThread()
-{
- int w, h,jpegsize = 0;
- mParameters.getPictureSize(&w, &h);
- if (mMsgEnabled & CAMERA_MSG_SHUTTER)
- mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie);
-#ifdef AMLOGIC_FLASHLIGHT_SUPPORT
- if(w > 640 && h > 480)
- set_flash(true);
-#endif
-#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
- SYS_close_video();
-#endif
- mCamera->TakePicture();
-#ifdef AMLOGIC_FLASHLIGHT_SUPPORT
- if(w > 640 && h > 480)
- set_flash(false);
-#endif
- sp<MemoryBase> mem = NULL;
- sp<MemoryHeapBase> jpgheap = NULL;
- sp<MemoryBase> jpgmem = NULL;
- //Capture picture is RGB 24 BIT
- if (mMsgEnabled & CAMERA_MSG_RAW_IMAGE) {
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- sp<MemoryBase> mem = new MemoryBase(mRawHeap, 0, w * 2 * h);//yuyv422
-#else
- sp<MemoryBase> mem = new MemoryBase(mRawHeap, 0, w * 3 * h);//rgb888
-#endif
- mCamera->GetRawFrame((uint8_t*)mRawHeap->getBase());
- //mDataCb(CAMERA_MSG_RAW_IMAGE, mem, mCallbackCookie);
- }
-
- if (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) {
- jpgheap = new MemoryHeapBase( w * 3 * h);
- mCamera->GetJpegFrame((uint8_t*)jpgheap->getBase(), &jpegsize);
- jpgmem = new MemoryBase(jpgheap, 0, jpegsize);
- //mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, jpgmem, mCallbackCookie);
- }
- mCamera->TakePictureEnd(); // must uninit v4l2 buff first before callback, because the "start preview" may be called .
- if (mMsgEnabled & CAMERA_MSG_RAW_IMAGE) {
- mDataCb(CAMERA_MSG_RAW_IMAGE, mem, mCallbackCookie);
- }
- if (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) {
- mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, jpgmem, mCallbackCookie);
- }
-#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
- SYS_open_video();
-#endif
- return NO_ERROR;
-}
-
-status_t AmlogicCameraHardware::takePicture()
-{
- disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
- stopPreview();
- if (createThread(beginPictureThread, this) == false)
- return -1;
- return NO_ERROR;
-}
-
-
-status_t AmlogicCameraHardware::cancelPicture()
-{
- return NO_ERROR;
-}
-
-status_t AmlogicCameraHardware::dump(int fd, const Vector<String16>& args) const
-{
-/*
- const size_t SIZE = 256;
- char buffer[SIZE];
- String8 result;
- AutoMutex lock(&mLock);
- if (mFakeCamera != 0) {
- mFakeCamera->dump(fd);
- mParameters.dump(fd, args);
- snprintf(buffer, 255, " preview frame(%d), size (%d), running(%s)\n", mCurrentPreviewFrame, mPreviewFrameSize, mPreviewRunning?"true": "false");
- result.append(buffer);
- } else {
- result.append("No camera client yet.\n");
- }
- write(fd, result.string(), result.size());\
-*/
- return NO_ERROR;
-}
-
-status_t AmlogicCameraHardware::setParameters(const CameraParameters& params)
-{
- Mutex::Autolock lock(mLock);
- // to verify parameter
- if (strcmp(params.getPictureFormat(), "jpeg") != 0) {
- LOGE("Only jpeg still pictures are supported");
- return -1;
- }
-
- mParameters = params;
- initHeapLocked();
-#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
- int zoom_level = mParameters.getInt(CameraParameters::KEY_ZOOM);
- char *p = (char *)mParameters.get(CameraParameters::KEY_ZOOM_RATIOS);
-
- if ((p) && (zoom_level >= 0)) {
- int z = (int)strtol(p, &p, 10);
- int i = 0;
- while (i < zoom_level) {
- if (*p != ',') break;
- z = (int)strtol(p+1, &p, 10);
- i++;
- }
-
- SYS_set_zoom(z);
- }
-#endif
- return mCamera->SetParameters(mParameters);//set to the real hardware
-}
-
-CameraParameters AmlogicCameraHardware::getParameters() const
-{
- LOGE("get AmlogicCameraHardware::getParameters()");
- Mutex::Autolock lock(mLock);
- return mParameters;
-}
-
-status_t AmlogicCameraHardware::sendCommand(int32_t command, int32_t arg1,
- int32_t arg2)
-{
- return BAD_VALUE;
-}
-
-void AmlogicCameraHardware::release()
-{
- mCamera->Close();
-#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
- SYS_enable_nextvideo();
- SYS_reset_zoom();
- SYS_disable_colorkey();
-#endif
-}
-
-sp<CameraHardwareInterface> AmlogicCameraHardware::createInstance(int CamId)
-{
- LOGD("AmlogicCameraHardware ::createInstance\n");
- return new AmlogicCameraHardware(CamId);
-}
-
-extern "C" sp<CameraHardwareInterface> HAL_openCameraHardware(int cameraId)
-{
- LOGV("HAL_openCameraHardware() cameraId=%d", cameraId);
- return AmlogicCameraHardware::createInstance(cameraId);
-}
-}; // namespace android
-
diff --git a/AmlogicCameraHardware.h b/AmlogicCameraHardware.h
deleted file mode 100755
index 5b8b301..0000000
--- a/AmlogicCameraHardware.h
+++ b/dev/null
@@ -1,183 +0,0 @@
-
-#ifndef AMLOGIC_HARDWARE_CAMERA_HARDWARE_H
-#define AMLOGIC_HARDWARE_CAMERA_HARDWARE_H
-
-#include <utils/threads.h>
-#include <camera/CameraHardwareInterface.h>
-#include <binder/MemoryBase.h>
-#include <binder/MemoryHeapBase.h>
-
-
-#ifndef AMLOGIC_USB_CAMERA_SUPPORT
-//#define AMLOGIC_CAMERA_OVERLAY_SUPPORT 1
-#endif
-
-namespace android {
-
-class CameraInterface
-{
-public:
- CameraInterface(){};
- virtual ~CameraInterface(){};
- virtual status_t Open() = 0;
- virtual status_t Close() = 0;
- virtual status_t StartPreview() = 0;
- virtual status_t StopPreview() = 0;
- virtual status_t StartRecord() = 0;
- virtual status_t StopRecord() = 0;
- virtual status_t TakePicture() = 0;
- virtual status_t TakePictureEnd() = 0;
- virtual status_t StartFocus(){return 1;};
- virtual status_t StopFocus(){return 1;};
- virtual status_t InitParameters(CameraParameters& pParameters) = 0;
- virtual status_t SetParameters(CameraParameters& pParameters) = 0;
- virtual status_t GetPreviewFrame(uint8_t* framebuf) = 0;
- virtual status_t GetRawFrame(uint8_t* framebuf) = 0;
- virtual status_t GetJpegFrame(uint8_t* framebuf,int* jpegsize) = 0;
-
- virtual int GetCamId() {return 0;};
-};
-
-
-class AmlogicCameraHardware : public CameraHardwareInterface {
-public:
- virtual sp<IMemoryHeap> getPreviewHeap() const;
- virtual sp<IMemoryHeap> getRawHeap() const;
- virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf);
-
- virtual void setCallbacks(notify_callback notify_cb,
- data_callback data_cb,
- data_callback_timestamp data_cb_timestamp,
- void* user);
-
- virtual void enableMsgType(int32_t msgType);
- virtual void disableMsgType(int32_t msgType);
- virtual bool msgTypeEnabled(int32_t msgType);
-
- virtual status_t startPreview();
- virtual void stopPreview();
- virtual bool previewEnabled();
-
- virtual status_t startRecording();
- virtual void stopRecording();
- virtual bool recordingEnabled();
- virtual void releaseRecordingFrame(const sp<IMemory>& mem);
-
- virtual status_t autoFocus();
- virtual status_t cancelAutoFocus();
- virtual status_t takePicture();
- virtual status_t cancelPicture();
- virtual status_t dump(int fd, const Vector<String16>& args) const;
- virtual status_t setParameters(const CameraParameters& params);
- virtual CameraParameters getParameters() const;
- virtual status_t sendCommand(int32_t command, int32_t arg1,
- int32_t arg2);
- virtual void release();
-
- static sp<CameraHardwareInterface> createInstance(int CamId);
-
-#ifdef AMLOGIC_CAMERA_OVERLAY_SUPPORT
- virtual bool useOverlay() {return true;}
- virtual status_t setOverlay(const sp<Overlay> &overlay);
-#else
- virtual bool useOverlay() {return false;}
-
-#endif
-
-private:
- AmlogicCameraHardware(int camid = 0);
- virtual ~AmlogicCameraHardware();
-
- static const int kBufferCount = 6;
-
-#ifndef AMLOGIC_CAMERA_OVERLAY_SUPPORT
- sp<MemoryHeapBase> mRecordHeap;
- sp<MemoryBase> mRecordBuffers[kBufferCount];
-#else
- volatile int32_t mRecordBufCount;
-#endif
-
- class PreviewThread : public Thread {
- AmlogicCameraHardware* mHardware;
- public:
- PreviewThread(AmlogicCameraHardware* hw) :
-#ifdef SINGLE_PROCESS
- // In single process mode this thread needs to be a java thread,
- // since we won't be calling through the binder.
- Thread(true),
-#else
- Thread(false),
-#endif
- mHardware(hw) { }
- virtual void onFirstRef() {
- run("CameraPreviewThread", PRIORITY_URGENT_DISPLAY);
- }
- virtual bool threadLoop() {
- mHardware->previewThread();
- // loop until we need to quit
- return true;
- }
- };
-
- void initDefaultParameters();
- void initHeapLocked();
-
- int previewThread();
-
- static int beginAutoFocusThread(void *cookie);
- int autoFocusThread();
-
- static int beginPictureThread(void *cookie);
- int pictureThread();
-
- mutable Mutex mLock;
- mutable Mutex mStateLock;
-
- int mState;
-
- CameraParameters mParameters;
-
- sp<MemoryHeapBase> mPreviewHeap;
- sp<MemoryHeapBase> mHeap;
- sp<MemoryHeapBase> mRawHeap;
- sp<MemoryBase> mBuffers[kBufferCount];
- sp<MemoryBase> mPreviewBuffers[kBufferCount];
- //FakeCamera *mFakeCamera;
- bool mPreviewRunning;
- int mPreviewFrameSize;
- int mHeapSize;
-
- // protected by mLock
- sp<PreviewThread> mPreviewThread;
-
- //if reord is enable!
- bool mRecordEnable;
-
- notify_callback mNotifyCb;
- data_callback mDataCb;
- data_callback_timestamp mDataCbTimestamp;
- void *mCallbackCookie;
-
- int32_t mMsgEnabled;
-
- // only used from PreviewThread
- int mCurrentPreviewFrame;
-
- CameraInterface* mCamera;
-
-
- enum
- {
- PROCESS_FOCUS = 0x1,
- PROCESS_TAKEPIC = 0x2,
- };
-};
-
-}; // namespace android
-
-
-
-#endif
-
-
-
diff --git a/Android.mk b/Android.mk
index 7e1baf9..012106b 100755..100644
--- a/Android.mk
+++ b/Android.mk
@@ -1,49 +1,78 @@
ifneq ($(strip $(USE_CAMERA_STUB)),true)
-LOCAL_PATH := $(call my-dir)
-include $(CLEAR_VARS)
-LOCAL_MODULE := libcamera
-LOCAL_MODULE_TAGS := optional
+LOCAL_PATH:= $(call my-dir)
-LOCAL_SHARED_LIBRARIES := \
- libutils \
- libcutils \
- liblog \
- libcamera_client \
- libbinder \
- libjpeg \
- libexif
+CAMERA_HAL_SRC := \
+ CameraHal_Module.cpp \
+ CameraHal.cpp \
+ CameraHalUtilClasses.cpp \
+ AppCallbackNotifier.cpp \
+ ANativeWindowDisplayAdapter.cpp \
+ CameraProperties.cpp \
+ MemoryManager.cpp \
+ Encoder_libjpeg.cpp \
+ SensorListener.cpp \
+ NV12_resize.c
-ifeq ($(BOARD_HAVE_FRONT_CAM),true)
- LOCAL_CFLAGS += -DAMLOGIC_FRONT_CAMERA_SUPPORT
-endif
+CAMERA_COMMON_SRC:= \
+ CameraParameters.cpp \
+ TICameraParameters.cpp \
+ CameraHalCommon.cpp
-ifeq ($(BOARD_HAVE_BACK_CAM),true)
- LOCAL_CFLAGS += -DAMLOGIC_BACK_CAMERA_SUPPORT
-endif
+CAMERA_V4L_SRC:= \
+ BaseCameraAdapter.cpp \
+ V4LCameraAdapter/V4LCameraAdapter.cpp
-ifeq ($(BOARD_USE_USB_CAMERA),true)
- LOCAL_CFLAGS += -DAMLOGIC_USB_CAMERA_SUPPORT
- LOCAL_SRC_FILES += util.cpp
-else
-ifeq ($(BOARD_HAVE_MULTI_CAMERAS),true)
- LOCAL_CFLAGS += -DAMLOGIC_MULTI_CAMERA_SUPPORT
-endif
+CAMERA_UTILS_SRC:= \
+ utils/ErrorUtils.cpp \
+ utils/MessageQueue.cpp \
+ utils/Semaphore.cpp
-ifeq ($(BOARD_HAVE_FLASHLIGHT),true)
- LOCAL_CFLAGS += -DAMLOGIC_FLASHLIGHT_SUPPORT
-endif
-endif
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ $(CAMERA_HAL_SRC) \
+ $(CAMERA_V4L_SRC) \
+ $(CAMERA_COMMON_SRC) \
+ $(CAMERA_UTILS_SRC)
LOCAL_C_INCLUDES += \
- external/jpeg
+ $(LOCAL_PATH)/inc/ \
+ $(LOCAL_PATH)/utils \
+ $(LOCAL_PATH)/inc/V4LCameraAdapter \
+ frameworks/base/include/ui \
+ frameworks/base/include/utils \
+ external/jhead/ \
+ external/jpeg/ \
+ hardware/libhardware/modules/gralloc/
+
+ #$(LOCAL_PATH)/../include \
+ #$(LOCAL_PATH)/../hwc \
+ #hardware/ti/omap4xxx/tiler \
+ #hardware/ti/omap4xxx/ion \
+ #frameworks/base/include/media/stagefright/openmax
+
+LOCAL_SHARED_LIBRARIES:= \
+ libui \
+ libbinder \
+ libutils \
+ libcutils \
+ libcamera_client \
+ libexif \
+ libjpeg \
+ libgui
+
+ #libtiutils \
+ #libion \
-#USE V4L2 Camera
-LOCAL_SRC_FILES += jpegenc/amljpeg_enc.c
-LOCAL_SRC_FILES += AmlogicCameraHardware.cpp V4L2/V4L2Camera.cpp V4L2/CameraSetting.cpp V4L2/OpCameraHardware.c
+LOCAL_CFLAGS := -fno-short-enums -DCOPY_IMAGE_BUFFER
+LOCAL_CFLAGS += -DLOG_NDEBUG=0
-#USE FAKECAMERA
-#LOCAL_SRC_FILES += AmlogicCameraHardware.cpp FakeCamera/FakeCamera.cpp
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+LOCAL_MODULE:= camera.amlogic
+LOCAL_MODULE_TAGS:= optional
+#include $(BUILD_HEAPTRACKED_SHARED_LIBRARY)
include $(BUILD_SHARED_LIBRARY)
endif
diff --git a/AppCallbackNotifier.cpp b/AppCallbackNotifier.cpp
new file mode 100644
index 0000000..fce658a
--- a/dev/null
+++ b/AppCallbackNotifier.cpp
@@ -0,0 +1,1740 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+
+#define LOG_TAG "CameraHAL"
+
+
+#include "CameraHal.h"
+#include "VideoMetadata.h"
+#include "Encoder_libjpeg.h"
+//#include <MetadataBufferType.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+#include "NV12_resize.h"
+
+namespace android {
+
+const int AppCallbackNotifier::NOTIFIER_TIMEOUT = -1;
+KeyedVector<void*, sp<Encoder_libjpeg> > gEncoderQueue;
+
+void AppCallbackNotifierEncoderCallback(void* main_jpeg,
+ void* thumb_jpeg,
+ CameraFrame::FrameType type,
+ void* cookie1,
+ void* cookie2,
+ void* cookie3)
+{
+ if (cookie1) {
+ AppCallbackNotifier* cb = (AppCallbackNotifier*) cookie1;
+ cb->EncoderDoneCb(main_jpeg, thumb_jpeg, type, cookie2, cookie3);
+ }
+}
+
+/*--------------------NotificationHandler Class STARTS here-----------------------------*/
+
+void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, CameraFrame::FrameType type, void* cookie1, void* cookie2)
+{
+ camera_memory_t* encoded_mem = NULL;
+ Encoder_libjpeg::params *main_param = NULL, *thumb_param = NULL;
+ size_t jpeg_size;
+ uint8_t* src = NULL;
+ sp<Encoder_libjpeg> encoder = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ camera_memory_t* picture = NULL;
+
+ {
+ Mutex::Autolock lock(mLock);
+
+ if (!main_jpeg) {
+ goto exit;
+ }
+
+ encoded_mem = (camera_memory_t*) cookie1;
+ main_param = (Encoder_libjpeg::params *) main_jpeg;
+ jpeg_size = main_param->jpeg_size;
+ src = main_param->src;
+
+ if(encoded_mem && encoded_mem->data && (jpeg_size > 0)) {
+ if (cookie2) {
+ ExifElementsTable* exif = (ExifElementsTable*) cookie2;
+ Section_t* exif_section = NULL;
+
+ exif->insertExifToJpeg((unsigned char*) encoded_mem->data, jpeg_size);
+
+ if(thumb_jpeg) {
+ thumb_param = (Encoder_libjpeg::params *) thumb_jpeg;
+ exif->insertExifThumbnailImage((const char*)thumb_param->dst,
+ (int)thumb_param->jpeg_size);
+ }
+
+ exif_section = FindSection(M_EXIF);
+
+ if (exif_section) {
+ picture = mRequestMemory(-1, jpeg_size + exif_section->Size, 1, NULL);
+ if (picture && picture->data) {
+ exif->saveJpeg((unsigned char*) picture->data, jpeg_size + exif_section->Size);
+ }
+ }
+ delete exif;
+ cookie2 = NULL;
+ } else {
+ picture = mRequestMemory(-1, jpeg_size, 1, NULL);
+ if (picture && picture->data) {
+ memcpy(picture->data, encoded_mem->data, jpeg_size);
+ }
+ }
+ }
+ } // scope for mutex lock
+
+ if (!mRawAvailable) {
+ dummyRaw();
+ } else {
+ mRawAvailable = false;
+ }
+
+ // Send the callback to the application only if the notifier is started and the message is enabled
+ if(picture && (mNotifierState==AppCallbackNotifier::NOTIFIER_STARTED) &&
+ (mCameraHal->msgTypeEnabled(CAMERA_MSG_COMPRESSED_IMAGE)))
+ {
+ Mutex::Autolock lock(mBurstLock);
+#if 0 //TODO: enable burst mode later
+ if ( mBurst )
+ {
+ `(CAMERA_MSG_BURST_IMAGE, JPEGPictureMemBase, mCallbackCookie);
+ }
+ else
+#endif
+ {
+ mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, picture, 0, NULL, mCallbackCookie);
+ }
+ }
+
+ exit:
+
+ if (main_jpeg) {
+ free(main_jpeg);
+ }
+
+ if (thumb_jpeg) {
+ if (((Encoder_libjpeg::params *) thumb_jpeg)->dst) {
+ free(((Encoder_libjpeg::params *) thumb_jpeg)->dst);
+ }
+ free(thumb_jpeg);
+ }
+
+ if (encoded_mem) {
+ encoded_mem->release(encoded_mem);
+ }
+
+ if (picture) {
+ picture->release(picture);
+ }
+
+ if (cookie2) {
+ delete (ExifElementsTable*) cookie2;
+ }
+
+ if (mNotifierState == AppCallbackNotifier::NOTIFIER_STARTED) {
+ encoder = gEncoderQueue.valueFor(src);
+ if (encoder.get()) {
+ gEncoderQueue.removeItem(src);
+ encoder.clear();
+ }
+ mFrameProvider->returnFrame(src, type);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ * NotificationHandler class
+ */
+
+///Initialization function for AppCallbackNotifier
+status_t AppCallbackNotifier::initialize()
+{
+ LOG_FUNCTION_NAME;
+
+ mMeasurementEnabled = false;
+
+ ///Create the app notifier thread
+ mNotificationThread = new NotificationThread(this);
+ if(!mNotificationThread.get())
+ {
+ CAMHAL_LOGEA("Couldn't create Notification thread");
+ return NO_MEMORY;
+ }
+
+ ///Start the display thread
+ status_t ret = mNotificationThread->run("NotificationThread", PRIORITY_URGENT_DISPLAY);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEA("Couldn't run NotificationThread");
+ mNotificationThread.clear();
+ return ret;
+ }
+
+ mUseMetaDataBufferMode = true;
+ mRawAvailable = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void AppCallbackNotifier::setCallbacks(CameraHal* cameraHal,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ Mutex::Autolock lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+ mCameraHal = cameraHal;
+ mNotifyCb = notify_cb;
+ mDataCb = data_cb;
+ mDataCbTimestamp = data_cb_timestamp;
+ mRequestMemory = get_memory;
+ mCallbackCookie = user;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::setMeasurements(bool enable)
+{
+ Mutex::Autolock lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+ mMeasurementEnabled = enable;
+
+ if ( enable )
+ {
+ mFrameProvider->enableFrameNotification(CameraFrame::FRAME_DATA_SYNC);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+//All sub-components of Camera HAL call this whenever any error happens
+void AppCallbackNotifier::errorNotify(int error)
+{
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGEB("AppCallbackNotifier received error %d", error);
+
+ // If it is a fatal error abort here!
+ if((error == CAMERA_ERROR_FATAL) || (error == CAMERA_ERROR_HARD)) {
+ //We kill media server if we encounter these errors as there is
+ //no point continuing and apps also don't handle errors other
+ //than media server death always.
+ abort();
+ return;
+ }
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_ERROR) ) )
+ {
+ CAMHAL_LOGEB("AppCallbackNotifier mNotifyCb %d", error);
+ mNotifyCb(CAMERA_MSG_ERROR, CAMERA_ERROR_UNKNOWN, 0, mCallbackCookie);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+bool AppCallbackNotifier::notificationThread()
+{
+ bool shouldLive = true;
+ status_t ret;
+
+ LOG_FUNCTION_NAME;
+
+ //CAMHAL_LOGDA("Notification Thread waiting for message");
+ ret = TIUTILS::MessageQueue::waitForMsg(&mNotificationThread->msgQ(),
+ &mEventQ,
+ &mFrameQ,
+ AppCallbackNotifier::NOTIFIER_TIMEOUT);
+
+ //CAMHAL_LOGDA("Notification Thread received message");
+
+ if (mNotificationThread->msgQ().hasMsg()) {
+ ///Received a message from CameraHal, process it
+ CAMHAL_LOGDA("Notification Thread received message from Camera HAL");
+ shouldLive = processMessage();
+ if(!shouldLive) {
+ CAMHAL_LOGDA("Notification Thread exiting.");
+ }
+ }
+
+ if(mEventQ.hasMsg()) {
+ ///Received an event from one of the event providers
+ CAMHAL_LOGDA("Notification Thread received an event from event provider (CameraAdapter)");
+ notifyEvent();
+ }
+
+ if(mFrameQ.hasMsg()) {
+ ///Received a frame from one of the frame providers
+ //CAMHAL_LOGDA("Notification Thread received a frame from frame provider (CameraAdapter)");
+ notifyFrame();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return shouldLive;
+}
+
+void AppCallbackNotifier::notifyEvent()
+{
+ ///Receive and send the event notifications to app
+ TIUTILS::Message msg;
+ LOG_FUNCTION_NAME;
+ {
+ Mutex::Autolock lock(mLock);
+ mEventQ.get(&msg);
+ }
+ bool ret = true;
+ CameraHalEvent *evt = NULL;
+ CameraHalEvent::FocusEventData *focusEvtData;
+ CameraHalEvent::ZoomEventData *zoomEvtData;
+ CameraHalEvent::FaceEventData faceEvtData;
+
+ if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED)
+ {
+ return;
+ }
+
+ switch(msg.command)
+ {
+ case AppCallbackNotifier::NOTIFIER_CMD_PROCESS_EVENT:
+
+ evt = ( CameraHalEvent * ) msg.arg1;
+
+ if ( NULL == evt )
+ {
+ CAMHAL_LOGEA("Invalid CameraHalEvent");
+ return;
+ }
+
+ switch(evt->mEventType)
+ {
+ case CameraHalEvent::EVENT_SHUTTER:
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_SHUTTER) ) )
+ {
+ mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie);
+ }
+ mRawAvailable = false;
+
+ break;
+
+ case CameraHalEvent::EVENT_FOCUS_LOCKED:
+ case CameraHalEvent::EVENT_FOCUS_ERROR:
+
+ focusEvtData = &evt->mEventData->focusEvent;
+ if ( ( focusEvtData->focusLocked ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) )
+ {
+ mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie);
+ mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
+ }
+ else if ( focusEvtData->focusError &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb ) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_FOCUS) ) )
+ {
+ mNotifyCb(CAMERA_MSG_FOCUS, false, 0, mCallbackCookie);
+ mCameraHal->disableMsgType(CAMERA_MSG_FOCUS);
+ }
+
+ break;
+
+ case CameraHalEvent::EVENT_ZOOM_INDEX_REACHED:
+
+ zoomEvtData = &evt->mEventData->zoomEvent;
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_ZOOM) ) )
+ {
+ mNotifyCb(CAMERA_MSG_ZOOM, zoomEvtData->currentZoomIndex, zoomEvtData->targetZoomIndexReached, mCallbackCookie);
+ }
+
+ break;
+
+ case CameraHalEvent::EVENT_FACE:
+
+ faceEvtData = evt->mEventData->faceEvent;
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mNotifyCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA) ) )
+ {
+ // WA for an issue inside CameraService
+ camera_memory_t *tmpBuffer = mRequestMemory(-1, 1, 1, NULL);
+
+ mDataCb(CAMERA_MSG_PREVIEW_METADATA,
+ tmpBuffer,
+ 0,
+ faceEvtData->getFaceResult(),
+ mCallbackCookie);
+
+ faceEvtData.clear();
+
+ if ( NULL != tmpBuffer ) {
+ tmpBuffer->release(tmpBuffer);
+ }
+
+ }
+
+ break;
+
+ case CameraHalEvent::ALL_EVENTS:
+ break;
+ default:
+ break;
+ }
+
+ break;
+ }
+
+ if ( NULL != evt )
+ {
+ delete evt;
+ }
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+static void copy2Dto1D(void *dst,
+ void *src,
+ int width,
+ int height,
+ size_t stride,
+ uint32_t offset,
+ unsigned int bytesPerPixel,
+ size_t length,
+ const char *pixelFormat)
+{
+ unsigned int alignedRow, row;
+ unsigned char *bufferDst, *bufferSrc;
+ unsigned char *bufferDstEnd, *bufferSrcEnd;
+ uint16_t *bufferSrc_UV;
+
+ unsigned int *y_uv = (unsigned int *)src;
+
+ CAMHAL_LOGVB("copy2Dto1D() y= %p ; uv=%p.",y_uv[0], y_uv[1]);
+ CAMHAL_LOGVB("pixelFormat,= %d; offset=%d",*pixelFormat,offset);
+
+ if (pixelFormat!=NULL) {
+ if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ bytesPerPixel = 2;
+ } else if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ bytesPerPixel = 1;
+ bufferDst = ( unsigned char * ) dst;
+ bufferDstEnd = ( unsigned char * ) dst + width*height*bytesPerPixel;
+ bufferSrc = ( unsigned char * ) y_uv[0] + offset;
+ bufferSrcEnd = ( unsigned char * ) ( ( size_t ) y_uv[0] + length + offset);
+ row = width*bytesPerPixel;
+ alignedRow = stride-width;
+ int stride_bytes = stride / 8;
+ uint32_t xOff = offset % stride;
+ uint32_t yOff = offset / stride;
+
+ // going to convert from NV12 here and return
+ // Step 1: Y plane: iterate through each row and copy
+ for ( int i = 0 ; i < height ; i++) {
+ memcpy(bufferDst, bufferSrc, row);
+ bufferSrc += stride;
+ bufferDst += row;
+ if ( ( bufferSrc > bufferSrcEnd ) || ( bufferDst > bufferDstEnd ) ) {
+ break;
+ }
+ }
+
+ bufferSrc_UV = ( uint16_t * ) ((uint8_t*)y_uv[1] + (stride/2)*yOff + xOff);
+
+ if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ uint16_t *bufferDst_UV;
+
+ // Step 2: UV plane: convert NV12 to NV21 by swapping U & V
+ bufferDst_UV = (uint16_t *) (((uint8_t*)dst)+row*height);
+
+ for (int i = 0 ; i < height/2 ; i++, bufferSrc_UV += alignedRow/2) {
+ int n = width;
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #32 \n\t"
+ " blt 1f \n\t"
+ "0: @ 32 byte swap \n\t"
+ " sub %[n], %[n], #32 \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! \n\t"
+ " vswp q0, q1 \n\t"
+ " cmp %[n], #32 \n\t"
+ " vst2.8 {q0,q1},[%[dst]]! \n\t"
+ " bge 0b \n\t"
+ "1: @ Is there enough data? \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 3f \n\t"
+ "2: @ 16 byte swap \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " vswp d0, d1 \n\t"
+ " cmp %[n], #16 \n\t"
+ " vst2.8 {d0,d1},[%[dst]]! \n\t"
+ " bge 2b \n\t"
+ "3: @ Is there enough data? \n\t"
+ " cmp %[n], #8 \n\t"
+ " blt 5f \n\t"
+ "4: @ 8 byte swap \n\t"
+ " sub %[n], %[n], #8 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " vswp d0, d1 \n\t"
+ " cmp %[n], #8 \n\t"
+ " vst2.8 {d0[0],d1[0]},[%[dst]]! \n\t"
+ " bge 4b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst] "+r" (bufferDst_UV), [src] "+r" (bufferSrc_UV), [n] "+r" (n)
+ : [src_stride] "r" (stride_bytes)
+ : "cc", "memory", "q0", "q1"
+ );
+ }
+ } else if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ uint16_t *bufferDst_U;
+ uint16_t *bufferDst_V;
+
+ // Step 2: UV plane: convert NV12 to YV12 by de-interleaving U & V
+ // TODO(XXX): This version of CameraHal assumes NV12 format it set at
+ // camera adapter to support YV12. Need to address for
+ // USBCamera
+
+ bufferDst_V = (uint16_t *) (((uint8_t*)dst)+row*height);
+ bufferDst_U = (uint16_t *) (((uint8_t*)dst)+row*height+row*height/4);
+
+ for (int i = 0 ; i < height/2 ; i++, bufferSrc_UV += alignedRow/2) {
+ int n = width;
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #32 \n\t"
+ " blt 1f \n\t"
+ "0: @ 32 byte swap \n\t"
+ " sub %[n], %[n], #32 \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! \n\t"
+ " cmp %[n], #32 \n\t"
+ " vst1.8 {q1},[%[dst_v]]! \n\t"
+ " vst1.8 {q0},[%[dst_u]]! \n\t"
+ " bge 0b \n\t"
+ "1: @ Is there enough data? \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 3f \n\t"
+ "2: @ 16 byte swap \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " cmp %[n], #16 \n\t"
+ " vst1.8 {d1},[%[dst_v]]! \n\t"
+ " vst1.8 {d0},[%[dst_u]]! \n\t"
+ " bge 2b \n\t"
+ "3: @ Is there enough data? \n\t"
+ " cmp %[n], #8 \n\t"
+ " blt 5f \n\t"
+ "4: @ 8 byte swap \n\t"
+ " sub %[n], %[n], #8 \n\t"
+ " vld2.8 {d0, d1} , [%[src]]! \n\t"
+ " cmp %[n], #8 \n\t"
+ " vst1.8 {d1[0]},[%[dst_v]]! \n\t"
+ " vst1.8 {d0[0]},[%[dst_u]]! \n\t"
+ " bge 4b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst_u] "+r" (bufferDst_U), [dst_v] "+r" (bufferDst_V),
+ [src] "+r" (bufferSrc_UV), [n] "+r" (n)
+ : [src_stride] "r" (stride_bytes)
+ : "cc", "memory", "q0", "q1"
+ );
+ }
+ }
+ return ;
+
+ } else if(strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ bytesPerPixel = 2;
+ }
+ }
+
+ bufferDst = ( unsigned char * ) dst;
+ bufferSrc = ( unsigned char * ) y_uv[0];
+ row = width*bytesPerPixel;
+ alignedRow = ( row + ( stride -1 ) ) & ( ~ ( stride -1 ) );
+
+ //iterate through each row
+ for ( int i = 0 ; i < height ; i++, bufferSrc += alignedRow, bufferDst += row) {
+ memcpy(bufferDst, bufferSrc, row);
+ }
+}
+
+void AppCallbackNotifier::copyAndSendPictureFrame(CameraFrame* frame, int32_t msgType)
+{
+ camera_memory_t* picture = NULL;
+ void *dest = NULL, *src = NULL;
+
+ // scope for lock
+ {
+ Mutex::Autolock lock(mLock);
+
+ if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED) {
+ goto exit;
+ }
+
+ picture = mRequestMemory(-1, frame->mLength, 1, NULL);
+
+ if (NULL != picture) {
+ dest = picture->data;
+ if (NULL != dest) {
+ src = (void *) ((unsigned int) frame->mBuffer + frame->mOffset);
+ memcpy(dest, src, frame->mLength);
+ }
+ }
+ }
+
+ exit:
+ mFrameProvider->returnFrame(frame->mBuffer, (CameraFrame::FrameType) frame->mFrameType);
+
+ if(picture) {
+ if((mNotifierState == AppCallbackNotifier::NOTIFIER_STARTED) &&
+ mCameraHal->msgTypeEnabled(msgType)) {
+ mDataCb(msgType, picture, 0, NULL, mCallbackCookie);
+ }
+ picture->release(picture);
+ }
+}
+
+void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType)
+{
+ camera_memory_t* picture = NULL;
+ void* dest = NULL;
+
+ // scope for lock
+ {
+ Mutex::Autolock lock(mLock);
+
+ if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED) {
+ goto exit;
+ }
+
+ if (!mPreviewMemory || !frame->mBuffer) {
+ CAMHAL_LOGDA("Error! One of the buffer is NULL");
+ goto exit;
+ }
+
+
+ dest = (void*) mPreviewBufs[mPreviewBufCount];
+
+ CAMHAL_LOGVB("%d:copy2Dto1D(%p, %p, %d, %d, %d, %d, %d,%s)",
+ __LINE__,
+ NULL, //buf,
+ frame->mBuffer,
+ frame->mWidth,
+ frame->mHeight,
+ frame->mAlignment,
+ 2,
+ frame->mLength,
+ mPreviewPixelFormat);
+
+ if ( NULL != dest ) {
+ // data sync frames don't need conversion
+ if (CameraFrame::FRAME_DATA_SYNC == frame->mFrameType) {
+ if ( (mPreviewMemory->size / MAX_BUFFERS) >= frame->mLength ) {
+ memcpy(dest, (void*) frame->mBuffer, frame->mLength);
+ } else {
+ memset(dest, 0, (mPreviewMemory->size / MAX_BUFFERS));
+ }
+ } else {
+ if ((NULL == (void*)frame->mYuv[0]) || (NULL == (void*)frame->mYuv[1])){
+ CAMHAL_LOGEA("Error! One of the YUV Pointer is NULL");
+ goto exit;
+ }
+ else{
+ copy2Dto1D(dest,
+ frame->mYuv,
+ frame->mWidth,
+ frame->mHeight,
+ frame->mAlignment,
+ frame->mOffset,
+ 2,
+ frame->mLength,
+ mPreviewPixelFormat);
+ }
+ }
+ }
+ }
+
+ exit:
+ mFrameProvider->returnFrame(frame->mBuffer, (CameraFrame::FrameType) frame->mFrameType);
+
+ if((mNotifierState == AppCallbackNotifier::NOTIFIER_STARTED) &&
+ mCameraHal->msgTypeEnabled(msgType) &&
+ (dest != NULL)) {
+ mDataCb(msgType, mPreviewMemory, mPreviewBufCount, NULL, mCallbackCookie);
+ }
+
+ // increment for next buffer
+ mPreviewBufCount = (mPreviewBufCount + 1) % AppCallbackNotifier::MAX_BUFFERS;
+}
+
+status_t AppCallbackNotifier::dummyRaw()
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == mRequestMemory ) {
+ CAMHAL_LOGEA("Can't allocate memory for dummy raw callback!");
+ return NO_INIT;
+ }
+
+ if ( ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( NULL != mNotifyCb ) ){
+
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE) ) {
+ camera_memory_t *dummyRaw = mRequestMemory(-1, 1, 1, NULL);
+
+ if ( NULL == dummyRaw ) {
+ CAMHAL_LOGEA("Dummy raw buffer allocation failed!");
+ return NO_MEMORY;
+ }
+
+ mDataCb(CAMERA_MSG_RAW_IMAGE, dummyRaw, 0, NULL, mCallbackCookie);
+
+ dummyRaw->release(dummyRaw);
+ } else if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE_NOTIFY) ) {
+ mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+void AppCallbackNotifier::notifyFrame()
+{
+ ///Receive and send the frame notifications to app
+ TIUTILS::Message msg;
+ CameraFrame *frame;
+ MemoryHeapBase *heap;
+ MemoryBase *buffer = NULL;
+ sp<MemoryBase> memBase;
+ void *buf = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ {
+ Mutex::Autolock lock(mLock);
+ if(!mFrameQ.isEmpty()) {
+ mFrameQ.get(&msg);
+ } else {
+ return;
+ }
+ }
+
+ bool ret = true;
+
+ frame = NULL;
+ switch(msg.command)
+ {
+ case AppCallbackNotifier::NOTIFIER_CMD_PROCESS_FRAME:
+
+ frame = (CameraFrame *) msg.arg1;
+ if(!frame)
+ {
+ break;
+ }
+
+ if ( (CameraFrame::RAW_FRAME == frame->mFrameType )&&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( NULL != mNotifyCb ) )
+ {
+
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE) )
+ {
+#ifdef COPY_IMAGE_BUFFER
+ copyAndSendPictureFrame(frame, CAMERA_MSG_RAW_IMAGE);
+#else
+ //TODO: Find a way to map a Tiler buffer to a MemoryHeapBase
+#endif
+ }
+ else {
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE_NOTIFY) ) {
+ mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie);
+ }
+ mFrameProvider->returnFrame(frame->mBuffer,
+ (CameraFrame::FrameType) frame->mFrameType);
+ }
+
+ mRawAvailable = true;
+
+ }
+ else if ( (CameraFrame::IMAGE_FRAME == frame->mFrameType) &&
+ (NULL != mCameraHal) &&
+ (NULL != mDataCb) &&
+ (CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG & frame->mQuirks) )
+ {
+
+ int encode_quality = 100, tn_quality = 100;
+ int tn_width, tn_height;
+ unsigned int current_snapshot = 0;
+ Encoder_libjpeg::params *main_jpeg = NULL, *tn_jpeg = NULL;
+ void* exif_data = NULL;
+ camera_memory_t* raw_picture = mRequestMemory(-1, frame->mLength, 1, NULL);
+
+ if(raw_picture) {
+ buf = raw_picture->data;
+ }
+
+ CameraParameters parameters;
+ char *params = mCameraHal->getParameters();
+ const String8 strParams(params);
+ parameters.unflatten(strParams);
+
+ encode_quality = parameters.getInt(CameraParameters::KEY_JPEG_QUALITY);
+ if (encode_quality < 0 || encode_quality > 100) {
+ encode_quality = 100;
+ }
+
+ tn_quality = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ if (tn_quality < 0 || tn_quality > 100) {
+ tn_quality = 100;
+ }
+
+ if (CameraFrame::HAS_EXIF_DATA & frame->mQuirks) {
+ exif_data = frame->mCookie2;
+ }
+
+ main_jpeg = (Encoder_libjpeg::params*)
+ malloc(sizeof(Encoder_libjpeg::params));
+ if (main_jpeg) {
+ main_jpeg->src = (uint8_t*) frame->mBuffer;
+ main_jpeg->src_size = frame->mLength;
+ main_jpeg->dst = (uint8_t*) buf;
+ main_jpeg->dst_size = frame->mLength;
+ main_jpeg->quality = encode_quality;
+ main_jpeg->in_width = frame->mWidth;
+ main_jpeg->in_height = frame->mHeight;
+ main_jpeg->out_width = frame->mWidth;
+ main_jpeg->out_height = frame->mHeight;
+ main_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV422I;
+ }
+
+ tn_width = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ tn_height = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+
+ if ((tn_width > 0) && (tn_height > 0)) {
+ tn_jpeg = (Encoder_libjpeg::params*)
+ malloc(sizeof(Encoder_libjpeg::params));
+ // if malloc fails just keep going and encode main jpeg
+ if (!tn_jpeg) {
+ tn_jpeg = NULL;
+ }
+ }
+
+ if (tn_jpeg) {
+ int width, height;
+ parameters.getPreviewSize(&width,&height);
+ current_snapshot = (mPreviewBufCount + MAX_BUFFERS - 1) % MAX_BUFFERS;
+ tn_jpeg->src = (uint8_t*) mPreviewBufs[current_snapshot];
+ tn_jpeg->src_size = mPreviewMemory->size / MAX_BUFFERS;
+ tn_jpeg->dst = (uint8_t*) malloc(tn_jpeg->src_size);
+ tn_jpeg->dst_size = tn_jpeg->src_size;
+ tn_jpeg->quality = tn_quality;
+ tn_jpeg->in_width = width;
+ tn_jpeg->in_height = height;
+ tn_jpeg->out_width = tn_width;
+ tn_jpeg->out_height = tn_height;
+ tn_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV420SP;;
+ }
+
+ sp<Encoder_libjpeg> encoder = new Encoder_libjpeg(main_jpeg,
+ tn_jpeg,
+ AppCallbackNotifierEncoderCallback,
+ (CameraFrame::FrameType)frame->mFrameType,
+ this,
+ raw_picture,
+ exif_data);
+ encoder->run();
+ gEncoderQueue.add(frame->mBuffer, encoder);
+ encoder.clear();
+ if (params != NULL)
+ {
+ mCameraHal->putParameters(params);
+ }
+ }
+ else if ( ( CameraFrame::IMAGE_FRAME == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) )
+ {
+
+ // CTS, MTS requirements: Every 'takePicture()' call
+ // who registers a raw callback should receive one
+ // as well. This is not always the case with
+ // CameraAdapters though.
+ if (!mRawAvailable) {
+ dummyRaw();
+ } else {
+ mRawAvailable = false;
+ }
+
+#ifdef COPY_IMAGE_BUFFER
+ {
+ Mutex::Autolock lock(mBurstLock);
+#if 0 //TODO: enable burst mode later
+ if ( mBurst )
+ {
+ `(CAMERA_MSG_BURST_IMAGE, JPEGPictureMemBase, mCallbackCookie);
+ }
+ else
+#endif
+ {
+ copyAndSendPictureFrame(frame, CAMERA_MSG_COMPRESSED_IMAGE);
+ }
+ }
+#else
+ //TODO: Find a way to map a Tiler buffer to a MemoryHeapBase
+#endif
+ }
+ else if ( ( CameraFrame::VIDEO_FRAME_SYNC == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME) ) )
+ {
+ mRecordingLock.lock();
+ if(mRecording)
+ {
+ if(mUseMetaDataBufferMode)
+ {
+ camera_memory_t *videoMedatadaBufferMemory =
+ (camera_memory_t *) mVideoMetadataBufferMemoryMap.valueFor((uint32_t) frame->mBuffer);
+ video_metadata_t *videoMetadataBuffer = (video_metadata_t *) videoMedatadaBufferMemory->data;
+
+ if( (NULL == videoMedatadaBufferMemory) || (NULL == videoMetadataBuffer) || (NULL == frame->mBuffer) )
+ {
+ CAMHAL_LOGEA("Error! One of the video buffers is NULL");
+ break;
+ }
+
+ if ( mUseVideoBuffers )
+ {
+ int vBuf = mVideoMap.valueFor((uint32_t) frame->mBuffer);
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ Rect bounds;
+ bounds.left = 0;
+ bounds.top = 0;
+ bounds.right = mVideoWidth;
+ bounds.bottom = mVideoHeight;
+
+ void *y_uv[2];
+ mapper.lock((buffer_handle_t)vBuf, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
+
+ structConvImage input = {frame->mWidth,
+ frame->mHeight,
+ 4096,
+ IC_FORMAT_YCbCr420_lp,
+ (mmByte *)frame->mYuv[0],
+ (mmByte *)frame->mYuv[1],
+ frame->mOffset};
+
+ structConvImage output = {mVideoWidth,
+ mVideoHeight,
+ 4096,
+ IC_FORMAT_YCbCr420_lp,
+ (mmByte *)y_uv[0],
+ (mmByte *)y_uv[1],
+ 0};
+
+ VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0);
+ mapper.unlock((buffer_handle_t)vBuf);
+ videoMetadataBuffer->metadataBufferType = 0;//(int) kMetadataBufferTypeCameraSource;
+ videoMetadataBuffer->handle = (void *)vBuf;
+ videoMetadataBuffer->offset = 0;
+ }
+ else
+ {
+ videoMetadataBuffer->metadataBufferType = 0;//(int) kMetadataBufferTypeCameraSource;
+ videoMetadataBuffer->handle = frame->mBuffer;
+ videoMetadataBuffer->offset = frame->mOffset;
+ }
+
+ CAMHAL_LOGVB("mDataCbTimestamp : frame->mBuffer=0x%x, videoMetadataBuffer=0x%x, videoMedatadaBufferMemory=0x%x",
+ frame->mBuffer, videoMetadataBuffer, videoMedatadaBufferMemory);
+
+ mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME,
+ videoMedatadaBufferMemory, 0, mCallbackCookie);
+ }
+ else
+ {
+ //TODO: Need to revisit this, should ideally be mapping the TILER buffer using mRequestMemory
+ camera_memory_t* fakebuf = mRequestMemory(-1, 4, 1, NULL);
+ if( (NULL == fakebuf) || ( NULL == fakebuf->data) || ( NULL == frame->mBuffer))
+ {
+ CAMHAL_LOGEA("Error! One of the video buffers is NULL");
+ break;
+ }
+
+ fakebuf->data = frame->mBuffer;
+ mDataCbTimestamp(frame->mTimestamp, CAMERA_MSG_VIDEO_FRAME, fakebuf, 0, mCallbackCookie);
+ fakebuf->release(fakebuf);
+ }
+ }
+ mRecordingLock.unlock();
+
+ }
+ else if(( CameraFrame::SNAPSHOT_FRAME == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( NULL != mNotifyCb)) {
+ //When enabled, measurement data is sent instead of video data
+ if ( !mMeasurementEnabled ) {
+ copyAndSendPreviewFrame(frame, CAMERA_MSG_POSTVIEW_FRAME);
+ } else {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ (CameraFrame::FrameType) frame->mFrameType);
+ }
+ }
+ else if ( ( CameraFrame::PREVIEW_FRAME_SYNC== frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) ) {
+ //When enabled, measurement data is sent instead of video data
+ if ( !mMeasurementEnabled ) {
+ copyAndSendPreviewFrame(frame, CAMERA_MSG_PREVIEW_FRAME);
+ } else {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ (CameraFrame::FrameType) frame->mFrameType);
+ }
+ }
+ else if ( ( CameraFrame::FRAME_DATA_SYNC == frame->mFrameType ) &&
+ ( NULL != mCameraHal ) &&
+ ( NULL != mDataCb) &&
+ ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) ) {
+ copyAndSendPreviewFrame(frame, CAMERA_MSG_PREVIEW_FRAME);
+ } else {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ ( CameraFrame::FrameType ) frame->mFrameType);
+ CAMHAL_LOGDB("Frame type 0x%x is still unsupported!", frame->mFrameType);
+ }
+
+ break;
+
+ default:
+
+ break;
+
+ };
+
+exit:
+
+ if ( NULL != frame )
+ {
+ delete frame;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::frameCallbackRelay(CameraFrame* caFrame)
+{
+ LOG_FUNCTION_NAME;
+ AppCallbackNotifier *appcbn = (AppCallbackNotifier*) (caFrame->mCookie);
+ appcbn->frameCallback(caFrame);
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::frameCallback(CameraFrame* caFrame)
+{
+ ///Post the event to the event queue of AppCallbackNotifier
+ TIUTILS::Message msg;
+ CameraFrame *frame;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != caFrame )
+ {
+
+ frame = new CameraFrame(*caFrame);
+ if ( NULL != frame )
+ {
+ msg.command = AppCallbackNotifier::NOTIFIER_CMD_PROCESS_FRAME;
+ msg.arg1 = frame;
+ mFrameQ.put(&msg);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Not enough resources to allocate CameraFrame");
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::flushAndReturnFrames()
+{
+ TIUTILS::Message msg;
+ CameraFrame *frame;
+
+ Mutex::Autolock lock(mLock);
+ while (!mFrameQ.isEmpty()) {
+ mFrameQ.get(&msg);
+ frame = (CameraFrame*) msg.arg1;
+ if (frame) {
+ mFrameProvider->returnFrame(frame->mBuffer,
+ (CameraFrame::FrameType) frame->mFrameType);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::eventCallbackRelay(CameraHalEvent* chEvt)
+{
+ LOG_FUNCTION_NAME;
+ AppCallbackNotifier *appcbn = (AppCallbackNotifier*) (chEvt->mCookie);
+ appcbn->eventCallback(chEvt);
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::eventCallback(CameraHalEvent* chEvt)
+{
+
+ ///Post the event to the event queue of AppCallbackNotifier
+ TIUTILS::Message msg;
+ CameraHalEvent *event;
+
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != chEvt )
+ {
+
+ event = new CameraHalEvent(*chEvt);
+ if ( NULL != event )
+ {
+ msg.command = AppCallbackNotifier::NOTIFIER_CMD_PROCESS_EVENT;
+ msg.arg1 = event;
+ {
+ Mutex::Autolock lock(mLock);
+ mEventQ.put(&msg);
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEA("Not enough resources to allocate CameraHalEvent");
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+void AppCallbackNotifier::flushEventQueue()
+{
+
+ {
+ Mutex::Autolock lock(mLock);
+ mEventQ.clear();
+ }
+}
+
+
+bool AppCallbackNotifier::processMessage()
+{
+ ///Retrieve the command from the command queue and process it
+ TIUTILS::Message msg;
+
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDA("+Msg get...");
+ mNotificationThread->msgQ().get(&msg);
+ CAMHAL_LOGDA("-Msg get...");
+ bool ret = true;
+
+ switch(msg.command)
+ {
+ case NotificationThread::NOTIFIER_EXIT:
+ {
+ CAMHAL_LOGEA("Received NOTIFIER_EXIT command from Camera HAL");
+ mNotifierState = AppCallbackNotifier::NOTIFIER_EXITED;
+ ret = false;
+ break;
+ }
+ default:
+ {
+ CAMHAL_LOGEA("Error: ProcessMsg() command from Camera HAL");
+ break;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+
+}
+
+AppCallbackNotifier::~AppCallbackNotifier()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Stop app callback notifier if not already stopped
+ stop();
+
+ ///Unregister with the frame provider
+ if ( NULL != mFrameProvider )
+ {
+ mFrameProvider->disableFrameNotification(CameraFrame::ALL_FRAMES);
+ }
+
+ //unregister with the event provider
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
+ }
+
+ TIUTILS::Message msg = {0,0,0,0,0,0};
+ msg.command = NotificationThread::NOTIFIER_EXIT;
+
+ ///Post the message to display thread
+ mNotificationThread->msgQ().put(&msg);
+
+ //Exit and cleanup the thread
+ mNotificationThread->requestExit();
+ mNotificationThread->join();
+
+ //Delete the display thread
+ mNotificationThread.clear();
+
+
+ ///Free the event and frame providers
+ if ( NULL != mEventProvider )
+ {
+ ///Deleting the event provider
+ CAMHAL_LOGDA("Stopping Event Provider");
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ if ( NULL != mFrameProvider )
+ {
+ ///Deleting the frame provider
+ CAMHAL_LOGDA("Stopping Frame Provider");
+ delete mFrameProvider;
+ mFrameProvider = NULL;
+ }
+
+ releaseSharedVideoBuffers();
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+//Free all video heaps and buffers
+void AppCallbackNotifier::releaseSharedVideoBuffers()
+{
+ LOG_FUNCTION_NAME;
+
+ if(mUseMetaDataBufferMode)
+ {
+ camera_memory_t* videoMedatadaBufferMemory;
+ for (unsigned int i = 0; i < mVideoMetadataBufferMemoryMap.size(); i++)
+ {
+ videoMedatadaBufferMemory = (camera_memory_t*) mVideoMetadataBufferMemoryMap.valueAt(i);
+ if(NULL != videoMedatadaBufferMemory)
+ {
+ videoMedatadaBufferMemory->release(videoMedatadaBufferMemory);
+ CAMHAL_LOGDB("Released videoMedatadaBufferMemory=0x%x", videoMedatadaBufferMemory);
+ }
+ }
+
+ mVideoMetadataBufferMemoryMap.clear();
+ mVideoMetadataBufferReverseMap.clear();
+ if (mUseVideoBuffers)
+ {
+ mVideoMap.clear();
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::setEventProvider(int32_t eventMask, MessageNotifier * eventNotifier)
+{
+
+ LOG_FUNCTION_NAME;
+ ///@remarks There is no NULL check here. We will check
+ ///for NULL when we get start command from CameraHal
+ ///@Remarks Currently only one event provider (CameraAdapter) is supported
+ ///@todo Have an array of event providers for each event bitmask
+ mEventProvider = new EventProvider(eventNotifier, this, eventCallbackRelay);
+ if ( NULL == mEventProvider )
+ {
+ CAMHAL_LOGEA("Error in creating EventProvider");
+ }
+ else
+ {
+ mEventProvider->enableEventNotification(eventMask);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::setFrameProvider(FrameNotifier *frameNotifier)
+{
+ LOG_FUNCTION_NAME;
+ ///@remarks There is no NULL check here. We will check
+ ///for NULL when we get the start command from CameraAdapter
+ mFrameProvider = new FrameProvider(frameNotifier, this, frameCallbackRelay);
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Error in creating FrameProvider");
+ }
+ else
+ {
+ //Register only for captured images and RAW for now
+ //TODO: Register for and handle all types of frames
+ mFrameProvider->enableFrameNotification(CameraFrame::IMAGE_FRAME);
+ mFrameProvider->enableFrameNotification(CameraFrame::RAW_FRAME);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t AppCallbackNotifier::startPreviewCallbacks(CameraParameters &params, void *buffers, uint32_t *offsets, int fd, size_t length, size_t count)
+{
+ sp<MemoryHeapBase> heap;
+ sp<MemoryBase> buffer;
+ unsigned int *bufArr;
+ size_t size = 0;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to start video recording without FrameProvider");
+ return -EINVAL;
+ }
+
+ if ( mPreviewing )
+ {
+ CAMHAL_LOGDA("+Already previewing");
+ return NO_INIT;
+ }
+
+ int w,h;
+ ///Get preview size
+ params.getPreviewSize(&w, &h);
+
+ //Get the preview pixel format
+ mPreviewPixelFormat = params.getPreviewFormat();
+
+ if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ {
+ size = w*h*2;
+ mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV422I;
+ }
+ else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
+ {
+ size = (w*h*3)/2;
+ mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV420SP;
+ }
+ else if(strcmp(mPreviewPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ {
+ size = w*h*2;
+ mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_RGB565;
+ }
+
+ mPreviewMemory = mRequestMemory(-1, size, AppCallbackNotifier::MAX_BUFFERS, NULL);
+ if (!mPreviewMemory) {
+ return NO_MEMORY;
+ }
+
+ for (int i=0; i < AppCallbackNotifier::MAX_BUFFERS; i++) {
+ mPreviewBufs[i] = (unsigned char*) mPreviewMemory->data + (i*size);
+ }
+
+ if ( mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME ) ) {
+ mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+
+ mPreviewBufCount = 0;
+
+ mPreviewing = true;
+
+ LOG_FUNCTION_NAME;
+
+ return NO_ERROR;
+}
+
+void AppCallbackNotifier::setBurst(bool burst)
+{
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mBurstLock);
+
+ mBurst = burst;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void AppCallbackNotifier::useVideoBuffers(bool useVideoBuffers)
+{
+ LOG_FUNCTION_NAME;
+
+ mUseVideoBuffers = useVideoBuffers;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+bool AppCallbackNotifier::getUesVideoBuffers()
+{
+ return mUseVideoBuffers;
+}
+
+void AppCallbackNotifier::setVideoRes(int width, int height)
+{
+ LOG_FUNCTION_NAME;
+
+ mVideoWidth = width;
+ mVideoHeight = height;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t AppCallbackNotifier::stopPreviewCallbacks()
+{
+ sp<MemoryHeapBase> heap;
+ sp<MemoryBase> buffer;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to stop preview callbacks without FrameProvider");
+ return -EINVAL;
+ }
+
+ if ( !mPreviewing )
+ {
+ return NO_INIT;
+ }
+
+ mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+
+ {
+ Mutex::Autolock lock(mLock);
+ mPreviewMemory->release(mPreviewMemory);
+ }
+
+ mPreviewing = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+
+}
+
+status_t AppCallbackNotifier::useMetaDataBufferMode(bool enable)
+{
+ mUseMetaDataBufferMode = enable;
+
+ return NO_ERROR;
+}
+
+
+status_t AppCallbackNotifier::startRecording()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mRecordingLock);
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to start video recording without FrameProvider");
+ ret = -1;
+ }
+
+ if(mRecording)
+ {
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mFrameProvider->enableFrameNotification(CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+ mRecording = true;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+//Allocate metadata buffers for video recording
+status_t AppCallbackNotifier::initSharedVideoBuffers(void *buffers, uint32_t *offsets, int fd, size_t length, size_t count, void *vidBufs)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ if(mUseMetaDataBufferMode)
+ {
+ uint32_t *bufArr = NULL;
+ camera_memory_t* videoMedatadaBufferMemory = NULL;
+
+ if(NULL == buffers)
+ {
+ CAMHAL_LOGEA("Error! Video buffers are NULL");
+ return BAD_VALUE;
+ }
+ bufArr = (uint32_t *) buffers;
+
+ for (uint32_t i = 0; i < count; i++)
+ {
+ videoMedatadaBufferMemory = mRequestMemory(-1, sizeof(video_metadata_t), 1, NULL);
+ if((NULL == videoMedatadaBufferMemory) || (NULL == videoMedatadaBufferMemory->data))
+ {
+ CAMHAL_LOGEA("Error! Could not allocate memory for Video Metadata Buffers");
+ return NO_MEMORY;
+ }
+
+ mVideoMetadataBufferMemoryMap.add(bufArr[i], (uint32_t)(videoMedatadaBufferMemory));
+ mVideoMetadataBufferReverseMap.add((uint32_t)(videoMedatadaBufferMemory->data), bufArr[i]);
+ CAMHAL_LOGDB("bufArr[%d]=0x%x, videoMedatadaBufferMemory=0x%x, videoMedatadaBufferMemory->data=0x%x",
+ i, bufArr[i], videoMedatadaBufferMemory, videoMedatadaBufferMemory->data);
+
+ if (vidBufs != NULL)
+ {
+ uint32_t *vBufArr = (uint32_t *) vidBufs;
+ mVideoMap.add(bufArr[i], vBufArr[i]);
+ CAMHAL_LOGVB("bufArr[%d]=0x%x, vBuffArr[%d]=0x%x", i, bufArr[i], i, vBufArr[i]);
+ }
+ }
+ }
+
+exit:
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t AppCallbackNotifier::stopRecording()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mRecordingLock);
+
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to stop video recording without FrameProvider");
+ ret = -1;
+ }
+
+ if(!mRecording)
+ {
+ return NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mFrameProvider->disableFrameNotification(CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+ ///Release the shared video buffers
+ releaseSharedVideoBuffers();
+
+ mRecording = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t AppCallbackNotifier::releaseRecordingFrame(const void* mem)
+{
+ status_t ret = NO_ERROR;
+ void *frame = NULL;
+
+ LOG_FUNCTION_NAME;
+ if ( NULL == mFrameProvider )
+ {
+ CAMHAL_LOGEA("Trying to stop video recording without FrameProvider");
+ ret = -1;
+ }
+
+ if ( NULL == mem )
+ {
+ CAMHAL_LOGEA("Video Frame released is invalid");
+ ret = -1;
+ }
+
+ if( NO_ERROR != ret )
+ {
+ return ret;
+ }
+
+ if(mUseMetaDataBufferMode)
+ {
+ video_metadata_t *videoMetadataBuffer = (video_metadata_t *) mem ;
+ frame = (void*) mVideoMetadataBufferReverseMap.valueFor((uint32_t) videoMetadataBuffer);
+ CAMHAL_LOGVB("Releasing frame with videoMetadataBuffer=0x%x, videoMetadataBuffer->handle=0x%x & frame handle=0x%x\n",
+ videoMetadataBuffer, videoMetadataBuffer->handle, frame);
+ }
+ else
+ {
+ frame = (void*)(*((uint32_t *)mem));
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = mFrameProvider->returnFrame(frame, CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t AppCallbackNotifier::enableMsgType(int32_t msgType)
+{
+ if( msgType & (CAMERA_MSG_POSTVIEW_FRAME | CAMERA_MSG_PREVIEW_FRAME) ) {
+ mFrameProvider->enableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+
+ return NO_ERROR;
+}
+
+status_t AppCallbackNotifier::disableMsgType(int32_t msgType)
+{
+ if(!mCameraHal->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME | CAMERA_MSG_POSTVIEW_FRAME)) {
+ mFrameProvider->disableFrameNotification(CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+
+ return NO_ERROR;
+
+}
+
+status_t AppCallbackNotifier::start()
+{
+ LOG_FUNCTION_NAME;
+ if(mNotifierState==AppCallbackNotifier::NOTIFIER_STARTED)
+ {
+ CAMHAL_LOGDA("AppCallbackNotifier already running");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+
+ ///Check whether initial conditions are met for us to start
+ ///A frame provider should be available, if not return error
+ if(!mFrameProvider)
+ {
+ ///AppCallbackNotifier not properly initialized
+ CAMHAL_LOGEA("AppCallbackNotifier not properly initialized - Frame provider is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ ///At least one event notifier should be available, if not return error
+ ///@todo Modify here when there is an array of event providers
+ if(!mEventProvider)
+ {
+ CAMHAL_LOGEA("AppCallbackNotifier not properly initialized - Event provider is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ ///AppCallbackNotifier not properly initialized
+ return NO_INIT;
+ }
+
+ mNotifierState = AppCallbackNotifier::NOTIFIER_STARTED;
+ CAMHAL_LOGDA(" --> AppCallbackNotifier NOTIFIER_STARTED \n");
+
+ gEncoderQueue.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+
+}
+
+status_t AppCallbackNotifier::stop()
+{
+ LOG_FUNCTION_NAME;
+
+ if(mNotifierState!=AppCallbackNotifier::NOTIFIER_STARTED)
+ {
+ CAMHAL_LOGDA("AppCallbackNotifier already in stopped state");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+ {
+ Mutex::Autolock lock(mLock);
+
+ mNotifierState = AppCallbackNotifier::NOTIFIER_STOPPED;
+ CAMHAL_LOGDA(" --> AppCallbackNotifier NOTIFIER_STOPPED \n");
+ }
+
+ while(!gEncoderQueue.isEmpty()) {
+ sp<Encoder_libjpeg> encoder = gEncoderQueue.valueAt(0);
+ if(encoder.get()) {
+ encoder->cancel();
+ encoder->join();
+ encoder.clear();
+ }
+ gEncoderQueue.removeItemsAt(0);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+
+/*--------------------NotificationHandler Class ENDS here-----------------------------*/
+
+
+
+};
diff --git a/BaseCameraAdapter.cpp b/BaseCameraAdapter.cpp
new file mode 100644
index 0000000..925a09c
--- a/dev/null
+++ b/BaseCameraAdapter.cpp
@@ -0,0 +1,2312 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#define LOG_TAG "CameraHAL"
+
+#include "BaseCameraAdapter.h"
+
+namespace android {
+
+/*--------------------Camera Adapter Class STARTS here-----------------------------*/
+
+BaseCameraAdapter::BaseCameraAdapter()
+{
+ mReleaseImageBuffersCallback = NULL;
+ mEndImageCaptureCallback = NULL;
+ mErrorNotifier = NULL;
+ mEndCaptureData = NULL;
+ mReleaseData = NULL;
+ mRecording = false;
+
+ mPreviewBuffers = NULL;
+ mPreviewBufferCount = 0;
+ mPreviewBuffersLength = 0;
+
+ mVideoBuffers = NULL;
+ mVideoBuffersCount = 0;
+ mVideoBuffersLength = 0;
+
+ mCaptureBuffers = NULL;
+ mCaptureBuffersCount = 0;
+ mCaptureBuffersLength = 0;
+
+ mPreviewDataBuffers = NULL;
+ mPreviewDataBuffersCount = 0;
+ mPreviewDataBuffersLength = 0;
+
+ mAdapterState = INTIALIZED_STATE;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ mStartFocus.tv_sec = 0;
+ mStartFocus.tv_usec = 0;
+ mStartCapture.tv_sec = 0;
+ mStartCapture.tv_usec = 0;
+#endif
+
+}
+
+BaseCameraAdapter::~BaseCameraAdapter()
+{
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mSubscriberLock);
+
+ mFrameSubscribers.clear();
+ mImageSubscribers.clear();
+ mRawSubscribers.clear();
+ mVideoSubscribers.clear();
+ mFocusSubscribers.clear();
+ mShutterSubscribers.clear();
+ mZoomSubscribers.clear();
+ mFaceSubscribers.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t BaseCameraAdapter::registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mReleaseImageBuffersCallback = callback;
+ mReleaseData = user_data;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::registerEndCaptureCallback(end_image_capture_callback callback, void *user_data)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mEndImageCaptureCallback= callback;
+ mEndCaptureData = user_data;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == errorNotifier )
+ {
+ CAMHAL_LOGEA("Invalid Error Notifier reference");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mErrorNotifier = errorNotifier;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void BaseCameraAdapter::enableMsgType(int32_t msgs, frame_callback callback, event_callback eventCb, void* cookie)
+{
+ Mutex::Autolock lock(mSubscriberLock);
+
+ LOG_FUNCTION_NAME;
+
+ if ( CameraFrame::PREVIEW_FRAME_SYNC == msgs )
+ {
+ mFrameSubscribers.add((int) cookie, callback);
+ }
+ else if ( CameraFrame::FRAME_DATA_SYNC == msgs )
+ {
+ mFrameDataSubscribers.add((int) cookie, callback);
+ }
+ else if ( CameraFrame::IMAGE_FRAME == msgs)
+ {
+ mImageSubscribers.add((int) cookie, callback);
+ }
+ else if ( CameraFrame::RAW_FRAME == msgs)
+ {
+ mRawSubscribers.add((int) cookie, callback);
+ }
+ else if ( CameraFrame::VIDEO_FRAME_SYNC == msgs)
+ {
+ mVideoSubscribers.add((int) cookie, callback);
+ }
+ else if ( CameraHalEvent::ALL_EVENTS == msgs)
+ {
+ mFocusSubscribers.add((int) cookie, eventCb);
+ mShutterSubscribers.add((int) cookie, eventCb);
+ mZoomSubscribers.add((int) cookie, eventCb);
+ mFaceSubscribers.add((int) cookie, eventCb);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Message type subscription no supported yet!");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void BaseCameraAdapter::disableMsgType(int32_t msgs, void* cookie)
+{
+ Mutex::Autolock lock(mSubscriberLock);
+
+ LOG_FUNCTION_NAME;
+
+ if ( CameraFrame::PREVIEW_FRAME_SYNC == msgs )
+ {
+ mFrameSubscribers.removeItem((int) cookie);
+ }
+ else if ( CameraFrame::FRAME_DATA_SYNC == msgs )
+ {
+ mFrameDataSubscribers.removeItem((int) cookie);
+ }
+ else if ( CameraFrame::IMAGE_FRAME == msgs)
+ {
+ mImageSubscribers.removeItem((int) cookie);
+ }
+ else if ( CameraFrame::RAW_FRAME == msgs)
+ {
+ mRawSubscribers.removeItem((int) cookie);
+ }
+ else if ( CameraFrame::VIDEO_FRAME_SYNC == msgs)
+ {
+ mVideoSubscribers.removeItem((int) cookie);
+ }
+ else if ( CameraFrame::ALL_FRAMES == msgs )
+ {
+ mFrameSubscribers.removeItem((int) cookie);
+ mFrameDataSubscribers.removeItem((int) cookie);
+ mImageSubscribers.removeItem((int) cookie);
+ mRawSubscribers.removeItem((int) cookie);
+ mVideoSubscribers.removeItem((int) cookie);
+ }
+ else if ( CameraHalEvent::ALL_EVENTS == msgs)
+ {
+ //Subscribe only for focus
+ //TODO: Process case by case
+ mFocusSubscribers.removeItem((int) cookie);
+ mShutterSubscribers.removeItem((int) cookie);
+ mZoomSubscribers.removeItem((int) cookie);
+ mFaceSubscribers.removeItem((int) cookie);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Message type 0x%x subscription no supported yet!", msgs);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void BaseCameraAdapter::addFramePointers(void *frameBuf, void *buf)
+{
+ unsigned int *pBuf = (unsigned int *)buf;
+ Mutex::Autolock lock(mSubscriberLock);
+
+ if ((frameBuf != NULL) && ( pBuf != NULL) )
+ {
+ CameraFrame *frame = new CameraFrame;
+ frame->mBuffer = frameBuf;
+ frame->mYuv[0] = pBuf[0];
+ frame->mYuv[1] = pBuf[1];
+ mFrameQueue.add(frameBuf, frame);
+
+ CAMHAL_LOGVB("Adding Frame=0x%x Y=0x%x UV=0x%x", frame->mBuffer, frame->mYuv[0], frame->mYuv[1]);
+ }
+}
+
+void BaseCameraAdapter::removeFramePointers()
+{
+ Mutex::Autolock lock(mSubscriberLock);
+
+ int size = mFrameQueue.size();
+ CAMHAL_LOGVB("Removing %d Frames = ", size);
+ for (int i = 0; i < size; i++)
+ {
+ CameraFrame *frame = (CameraFrame *)mFrameQueue.valueAt(i);
+ CAMHAL_LOGVB("Free Frame=0x%x Y=0x%x UV=0x%x", frame->mBuffer, frame->mYuv[0], frame->mYuv[1]);
+ delete frame;
+ }
+ mFrameQueue.clear();
+}
+
+void BaseCameraAdapter::returnFrame(void* frameBuf, CameraFrame::FrameType frameType)
+{
+ status_t res = NO_ERROR;
+ size_t subscriberCount = 0;
+ int refCount = -1;
+
+ Mutex::Autolock lock(mReturnFrameLock);
+
+ if ( NULL == frameBuf )
+ {
+ CAMHAL_LOGEA("Invalid frameBuf");
+ return;
+ }
+
+ if ( NO_ERROR == res)
+ {
+
+ refCount = getFrameRefCount(frameBuf, frameType);
+
+ if(frameType == CameraFrame::PREVIEW_FRAME_SYNC)
+ {
+ mFramesWithDisplay--;
+ }
+ else if(frameType == CameraFrame::VIDEO_FRAME_SYNC)
+ {
+ mFramesWithEncoder--;
+ }
+
+ if ( 0 < refCount )
+ {
+
+ refCount--;
+ setFrameRefCount(frameBuf, frameType, refCount);
+
+
+ if ( mRecording && (CameraFrame::VIDEO_FRAME_SYNC == frameType) ) {
+ refCount += getFrameRefCount(frameBuf, CameraFrame::PREVIEW_FRAME_SYNC);
+ } else if ( mRecording && (CameraFrame::PREVIEW_FRAME_SYNC == frameType) ) {
+ refCount += getFrameRefCount(frameBuf, CameraFrame::VIDEO_FRAME_SYNC);
+ } else if ( mRecording && (CameraFrame::SNAPSHOT_FRAME == frameType) ) {
+ refCount += getFrameRefCount(frameBuf, CameraFrame::VIDEO_FRAME_SYNC);
+ }
+
+
+ }
+ else
+ {
+ CAMHAL_LOGEA("Frame returned when ref count is already zero!!");
+ return;
+ }
+ }
+
+ CAMHAL_LOGVB("REFCOUNT 0x%x %d", frameBuf, refCount);
+
+ if ( NO_ERROR == res )
+ {
+ //check if someone is holding this buffer
+ if ( 0 == refCount )
+ {
+#ifdef DEBUG_LOG && 0
+ //TODO figure out if this is a problem
+ if(mBuffersWithDucati.indexOfKey((int)frameBuf)>=0)
+ {
+ LOGE("Buffer already with Ducati!! 0x%x", frameBuf);
+ for(int i=0;i<mBuffersWithDucati.size();i++) LOGE("0x%x", mBuffersWithDucati.keyAt(i));
+ }
+ mBuffersWithDucati.add((int)frameBuf,1);
+#endif
+ res = fillThisBuffer(frameBuf, frameType);
+ }
+ }
+
+}
+
+status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, int value2, int value3)
+{
+ status_t ret = NO_ERROR;
+ struct timeval *refTimestamp;
+ BuffersDescriptor *desc = NULL;
+ CameraFrame *frame = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ switch ( operation ) {
+ case CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW:
+ CAMHAL_LOGDA("Use buffers for preview");
+ desc = ( BuffersDescriptor * ) value1;
+
+ if ( NULL == desc )
+ {
+ CAMHAL_LOGEA("Invalid preview buffers!");
+ return -EINVAL;
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ Mutex::Autolock lock(mPreviewBufferLock);
+ mPreviewBuffers = (int *) desc->mBuffers;
+ mPreviewBuffersLength = desc->mLength;
+ mPreviewBuffersAvailable.clear();
+ for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
+ {
+ mPreviewBuffersAvailable.add(mPreviewBuffers[i], 0);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
+ {
+ mPreviewBuffersAvailable.add(mPreviewBuffers[i], 1);
+ }
+ }
+
+ if ( NULL != desc )
+ {
+ ret = useBuffers(CameraAdapter::CAMERA_PREVIEW,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA:
+ CAMHAL_LOGDA("Use buffers for preview data");
+ desc = ( BuffersDescriptor * ) value1;
+
+ if ( NULL == desc )
+ {
+ CAMHAL_LOGEA("Invalid preview data buffers!");
+ return -EINVAL;
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ Mutex::Autolock lock(mPreviewDataBufferLock);
+ mPreviewDataBuffers = (int *) desc->mBuffers;
+ mPreviewDataBuffersLength = desc->mLength;
+ mPreviewDataBuffersAvailable.clear();
+ for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
+ {
+ mPreviewDataBuffersAvailable.add(mPreviewDataBuffers[i], 0);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
+ {
+ mPreviewDataBuffersAvailable.add(mPreviewDataBuffers[i], 1);
+ }
+ }
+
+ if ( NULL != desc )
+ {
+ ret = useBuffers(CameraAdapter::CAMERA_MEASUREMENT,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDA("Use buffers for image capture");
+ desc = ( BuffersDescriptor * ) value1;
+
+ if ( NULL == desc )
+ {
+ CAMHAL_LOGEA("Invalid capture buffers!");
+ return -EINVAL;
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ Mutex::Autolock lock(mCaptureBufferLock);
+ mCaptureBuffers = (int *) desc->mBuffers;
+ mCaptureBuffersLength = desc->mLength;
+ mCaptureBuffersAvailable.clear();
+ for ( uint32_t i = 0 ; i < desc->mMaxQueueable ; i++ )
+ {
+ mCaptureBuffersAvailable.add(mCaptureBuffers[i], 0);
+ }
+ // initial ref count for undeqeueued buffers is 1 since buffer provider
+ // is still holding on to it
+ for ( uint32_t i = desc->mMaxQueueable ; i < desc->mCount ; i++ )
+ {
+ mCaptureBuffersAvailable.add(mCaptureBuffers[i], 1);
+ }
+ }
+
+ if ( NULL != desc )
+ {
+ ret = useBuffers(CameraAdapter::CAMERA_IMAGE_CAPTURE,
+ desc->mBuffers,
+ desc->mCount,
+ desc->mLength,
+ desc->mMaxQueueable);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_START_SMOOTH_ZOOM:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = startSmoothZoom(value1);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopSmoothZoom();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_START_PREVIEW:
+ {
+
+ CAMHAL_LOGDA("Start Preview");
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = startPreview();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_PREVIEW:
+ {
+
+ CAMHAL_LOGDA("Stop Preview");
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopPreview();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_START_VIDEO:
+ {
+
+ CAMHAL_LOGDA("Start video recording");
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = startVideoCapture();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_VIDEO:
+ {
+
+ CAMHAL_LOGDA("Stop video recording");
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopVideoCapture();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = flushBuffers();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_START_IMAGE_CAPTURE:
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ refTimestamp = ( struct timeval * ) value1;
+ if ( NULL != refTimestamp )
+ {
+ memcpy( &mStartCapture, refTimestamp, sizeof( struct timeval ));
+ }
+
+#endif
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = takePicture();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopImageCapture();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_START_BRACKET_CAPTURE:
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ refTimestamp = ( struct timeval * ) value2;
+ if ( NULL != refTimestamp )
+ {
+ memcpy( &mStartCapture, refTimestamp, sizeof( struct timeval ));
+ }
+
+#endif
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = startBracketing(value1);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_STOP_BRACKET_CAPTURE:
+ {
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = stopBracketing();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ }
+
+ case CameraAdapter::CAMERA_PERFORM_AUTOFOCUS:
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ refTimestamp = ( struct timeval * ) value1;
+ if ( NULL != refTimestamp )
+ {
+ memcpy( &mStartFocus, refTimestamp, sizeof( struct timeval ));
+ }
+
+#endif
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = autoFocus();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_CANCEL_AUTOFOCUS:
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = cancelAutoFocus();
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW:
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ frame = ( CameraFrame * ) value1;
+
+ if ( NULL != frame )
+ {
+ ret = getFrameSize(frame->mWidth, frame->mHeight);
+ }
+ else
+ {
+ ret = -EINVAL;
+ }
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ frame = ( CameraFrame * ) value1;
+
+ if ( NULL != frame )
+ {
+ ret = getPictureBufferSize(frame->mLength, value2);
+ }
+ else
+ {
+ ret = -EINVAL;
+ }
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
+
+ if ( ret == NO_ERROR )
+ {
+ ret = setState(operation);
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ frame = ( CameraFrame * ) value1;
+
+ if ( NULL != frame )
+ {
+ ret = getFrameDataSize(frame->mLength, value2);
+ }
+ else
+ {
+ ret = -EINVAL;
+ }
+ }
+
+ if ( ret == NO_ERROR )
+ {
+ ret = commitState();
+ }
+ else
+ {
+ ret |= rollbackState();
+ }
+
+ break;
+
+ case CameraAdapter::CAMERA_START_FD:
+
+ ret = startFaceDetection();
+
+ break;
+
+ case CameraAdapter::CAMERA_STOP_FD:
+
+ ret = stopFaceDetection();
+
+ break;
+
+ case CameraAdapter::CAMERA_SWITCH_TO_EXECUTING:
+ ret = switchToExecuting();
+ break;
+
+ default:
+ CAMHAL_LOGEB("Command 0x%x unsupported!", operation);
+ break;
+ };
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t BaseCameraAdapter::notifyFocusSubscribers(bool status)
+{
+ event_callback eventCb;
+ CameraHalEvent focusEvent;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mFocusSubscribers.size() == 0 ) {
+ CAMHAL_LOGDA("No Focus Subscribers!");
+ return NO_INIT;
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //dump the AF latency
+ CameraHal::PPM("Focus finished in: ", &mStartFocus);
+
+#endif
+
+ focusEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == focusEvent.mEventData.get() ) {
+ return -ENOMEM;
+ }
+
+ focusEvent.mEventType = CameraHalEvent::EVENT_FOCUS_LOCKED;
+ focusEvent.mEventData->focusEvent.focusLocked = status;
+ focusEvent.mEventData->focusEvent.focusError = !status;
+
+ for (unsigned int i = 0 ; i < mFocusSubscribers.size(); i++ )
+ {
+ focusEvent.mCookie = (void *) mFocusSubscribers.keyAt(i);
+ eventCb = (event_callback) mFocusSubscribers.valueAt(i);
+ eventCb ( &focusEvent );
+ }
+
+ focusEvent.mEventData.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::notifyShutterSubscribers()
+{
+ CameraHalEvent shutterEvent;
+ event_callback eventCb;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mShutterSubscribers.size() == 0 )
+ {
+ CAMHAL_LOGEA("No shutter Subscribers!");
+ return NO_INIT;
+ }
+
+ shutterEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == shutterEvent.mEventData.get() ) {
+ return -ENOMEM;
+ }
+
+ shutterEvent.mEventType = CameraHalEvent::EVENT_SHUTTER;
+ shutterEvent.mEventData->shutterEvent.shutterClosed = true;
+
+ for (unsigned int i = 0 ; i < mShutterSubscribers.size() ; i++ ) {
+ shutterEvent.mCookie = ( void * ) mShutterSubscribers.keyAt(i);
+ eventCb = ( event_callback ) mShutterSubscribers.valueAt(i);
+
+ CAMHAL_LOGEA("Sending shutter callback");
+
+ eventCb ( &shutterEvent );
+ }
+
+ shutterEvent.mEventData.clear();
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::notifyZoomSubscribers(int zoomIdx, bool targetReached)
+{
+ event_callback eventCb;
+ CameraHalEvent zoomEvent;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mZoomSubscribers.size() == 0 ) {
+ CAMHAL_LOGDA("No zoom Subscribers!");
+ return NO_INIT;
+ }
+
+ zoomEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == zoomEvent.mEventData.get() ) {
+ return -ENOMEM;
+ }
+
+ zoomEvent.mEventType = CameraHalEvent::EVENT_ZOOM_INDEX_REACHED;
+ zoomEvent.mEventData->zoomEvent.currentZoomIndex = zoomIdx;
+ zoomEvent.mEventData->zoomEvent.targetZoomIndexReached = targetReached;
+
+ for (unsigned int i = 0 ; i < mZoomSubscribers.size(); i++ ) {
+ zoomEvent.mCookie = (void *) mZoomSubscribers.keyAt(i);
+ eventCb = (event_callback) mZoomSubscribers.valueAt(i);
+
+ eventCb ( &zoomEvent );
+ }
+
+ zoomEvent.mEventData.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::notifyFaceSubscribers(sp<CameraFDResult> &faces)
+{
+ event_callback eventCb;
+ CameraHalEvent faceEvent;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( mFaceSubscribers.size() == 0 ) {
+ CAMHAL_LOGDA("No face detection subscribers!");
+ return NO_INIT;
+ }
+
+ faceEvent.mEventData = new CameraHalEvent::CameraHalEventData();
+ if ( NULL == faceEvent.mEventData.get() ) {
+ return -ENOMEM;
+ }
+
+ faceEvent.mEventType = CameraHalEvent::EVENT_FACE;
+ faceEvent.mEventData->faceEvent = faces;
+
+ for (unsigned int i = 0 ; i < mFaceSubscribers.size(); i++ ) {
+ faceEvent.mCookie = (void *) mFaceSubscribers.keyAt(i);
+ eventCb = (event_callback) mFaceSubscribers.valueAt(i);
+
+ eventCb ( &faceEvent );
+ }
+
+ faceEvent.mEventData.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::sendFrameToSubscribers(CameraFrame *frame)
+{
+ status_t ret = NO_ERROR;
+ unsigned int mask;
+
+ if ( NULL == frame )
+ {
+ CAMHAL_LOGEA("Invalid CameraFrame");
+ return -EINVAL;
+ }
+
+ for( mask = 1; mask < CameraFrame::ALL_FRAMES; mask <<= 1){
+ if( mask & frame->mFrameMask ){
+ switch( mask ){
+
+ case CameraFrame::IMAGE_FRAME:
+ {
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ CameraHal::PPM("Shot to Jpeg: ", &mStartCapture);
+#endif
+ ret = __sendFrameToSubscribers(frame, &mImageSubscribers, CameraFrame::IMAGE_FRAME);
+ }
+ break;
+ case CameraFrame::RAW_FRAME:
+ {
+ ret = __sendFrameToSubscribers(frame, &mRawSubscribers, CameraFrame::RAW_FRAME);
+ }
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ {
+ ret = __sendFrameToSubscribers(frame, &mFrameSubscribers, CameraFrame::PREVIEW_FRAME_SYNC);
+ }
+ break;
+ case CameraFrame::SNAPSHOT_FRAME:
+ {
+ ret = __sendFrameToSubscribers(frame, &mFrameSubscribers, CameraFrame::SNAPSHOT_FRAME);
+ }
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ {
+ ret = __sendFrameToSubscribers(frame, &mVideoSubscribers, CameraFrame::VIDEO_FRAME_SYNC);
+ }
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ {
+ ret = __sendFrameToSubscribers(frame, &mFrameDataSubscribers, CameraFrame::FRAME_DATA_SYNC);
+ }
+ break;
+ default:
+ CAMHAL_LOGEB("FRAMETYPE NOT SUPPORTED 0x%x", mask);
+ break;
+ }//SWITCH
+ frame->mFrameMask &= ~mask;
+
+ if (ret != NO_ERROR) {
+ goto EXIT;
+ }
+ }//IF
+ }//FOR
+
+ EXIT:
+ return ret;
+}
+
+status_t BaseCameraAdapter::__sendFrameToSubscribers(CameraFrame* frame,
+ KeyedVector<int, frame_callback> *subscribers,
+ CameraFrame::FrameType frameType)
+{
+ size_t refCount = 0;
+ status_t ret = NO_ERROR;
+ frame_callback callback = NULL;
+
+ frame->mFrameType = frameType;
+
+ if ( (frameType == CameraFrame::PREVIEW_FRAME_SYNC) ||
+ (frameType == CameraFrame::VIDEO_FRAME_SYNC) ||
+ (frameType == CameraFrame::SNAPSHOT_FRAME) ){
+ if (mFrameQueue.size() > 0){
+ CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(frame->mBuffer);
+ frame->mYuv[0] = lframe->mYuv[0];
+ frame->mYuv[1] = lframe->mYuv[1];
+ }
+ else{
+ CAMHAL_LOGEA("Empty Frame Queue");
+ return -EINVAL;
+ }
+ }
+
+ if (NULL != subscribers) {
+ refCount = getFrameRefCount(frame->mBuffer, frameType);
+
+ if (refCount == 0) {
+ CAMHAL_LOGDA("Invalid ref count of 0");
+ return -EINVAL;
+ }
+
+ if (refCount > subscribers->size()) {
+ CAMHAL_LOGEB("Invalid ref count for frame type: 0x%x", frameType);
+ return -EINVAL;
+ }
+
+ CAMHAL_LOGVB("Type of Frame: 0x%x address: 0x%x refCount start %d",
+ frame->mFrameType,
+ ( uint32_t ) frame->mBuffer,
+ refCount);
+
+ for ( unsigned int i = 0 ; i < refCount; i++ ) {
+ frame->mCookie = ( void * ) subscribers->keyAt(i);
+ callback = (frame_callback) subscribers->valueAt(i);
+
+ if (!callback) {
+ CAMHAL_LOGEB("callback not set for frame type: 0x%x", frameType);
+ return -EINVAL;
+ }
+
+ callback(frame);
+ }
+ } else {
+ CAMHAL_LOGEA("Subscribers is null??");
+ return -EINVAL;
+ }
+
+ return ret;
+}
+
+int BaseCameraAdapter::setInitFrameRefCount(void* buf, unsigned int mask)
+{
+ int ret = NO_ERROR;
+ unsigned int lmask;
+
+ LOG_FUNCTION_NAME;
+
+ if (buf == NULL)
+ {
+ return -EINVAL;
+ }
+
+ for( lmask = 1; lmask < CameraFrame::ALL_FRAMES; lmask <<= 1){
+ if( lmask & mask ){
+ switch( lmask ){
+
+ case CameraFrame::IMAGE_FRAME:
+ {
+ setFrameRefCount(buf, CameraFrame::IMAGE_FRAME, (int) mImageSubscribers.size());
+ }
+ break;
+ case CameraFrame::RAW_FRAME:
+ {
+ setFrameRefCount(buf, CameraFrame::RAW_FRAME, mRawSubscribers.size());
+ }
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ {
+ setFrameRefCount(buf, CameraFrame::PREVIEW_FRAME_SYNC, mFrameSubscribers.size());
+ }
+ break;
+ case CameraFrame::SNAPSHOT_FRAME:
+ {
+ setFrameRefCount(buf, CameraFrame::SNAPSHOT_FRAME, mFrameSubscribers.size());
+ }
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ {
+ setFrameRefCount(buf,CameraFrame::VIDEO_FRAME_SYNC, mVideoSubscribers.size());
+ }
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ {
+ setFrameRefCount(buf, CameraFrame::FRAME_DATA_SYNC, mFrameDataSubscribers.size());
+ }
+ break;
+ default:
+ CAMHAL_LOGEB("FRAMETYPE NOT SUPPORTED 0x%x", lmask);
+ break;
+ }//SWITCH
+ mask &= ~lmask;
+ }//IF
+ }//FOR
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+int BaseCameraAdapter::getFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType)
+{
+ int res = -1;
+
+ LOG_FUNCTION_NAME;
+
+ switch ( frameType )
+ {
+ case CameraFrame::IMAGE_FRAME:
+ case CameraFrame::RAW_FRAME:
+ {
+ Mutex::Autolock lock(mCaptureBufferLock);
+ res = mCaptureBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ }
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ case CameraFrame::SNAPSHOT_FRAME:
+ {
+ Mutex::Autolock lock(mPreviewBufferLock);
+ res = mPreviewBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ }
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ {
+ Mutex::Autolock lock(mPreviewDataBufferLock);
+ res = mPreviewDataBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ }
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ {
+ Mutex::Autolock lock(mVideoBufferLock);
+ res = mVideoBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
+ }
+ break;
+ default:
+ break;
+ };
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return res;
+}
+
+void BaseCameraAdapter::setFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType, int refCount)
+{
+
+ LOG_FUNCTION_NAME;
+
+ switch ( frameType )
+ {
+ case CameraFrame::IMAGE_FRAME:
+ case CameraFrame::RAW_FRAME:
+ {
+ Mutex::Autolock lock(mCaptureBufferLock);
+ mCaptureBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ }
+ break;
+ case CameraFrame::PREVIEW_FRAME_SYNC:
+ case CameraFrame::SNAPSHOT_FRAME:
+ {
+ Mutex::Autolock lock(mPreviewBufferLock);
+ mPreviewBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ }
+ break;
+ case CameraFrame::FRAME_DATA_SYNC:
+ {
+ Mutex::Autolock lock(mPreviewDataBufferLock);
+ mPreviewDataBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ }
+ break;
+ case CameraFrame::VIDEO_FRAME_SYNC:
+ {
+ Mutex::Autolock lock(mVideoBufferLock);
+ mVideoBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
+ }
+ break;
+ default:
+ break;
+ };
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+status_t BaseCameraAdapter::startVideoCapture()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mVideoBufferLock);
+
+ //If the capture is already ongoing, return from here.
+ if ( mRecording )
+ {
+ ret = NO_INIT;
+ }
+
+
+ if ( NO_ERROR == ret )
+ {
+
+ for ( unsigned int i = 0 ; i < mPreviewBuffersAvailable.size() ; i++ )
+ {
+ mVideoBuffersAvailable.add(mPreviewBuffersAvailable.keyAt(i), 0);
+ }
+
+ mRecording = true;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopVideoCapture()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( !mRecording )
+ {
+ ret = NO_INIT;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ for ( unsigned int i = 0 ; i < mVideoBuffersAvailable.size() ; i++ )
+ {
+ void *frameBuf = ( void * ) mVideoBuffersAvailable.keyAt(i);
+ if( getFrameRefCount(frameBuf, CameraFrame::VIDEO_FRAME_SYNC) > 0)
+ {
+ returnFrame(frameBuf, CameraFrame::VIDEO_FRAME_SYNC);
+ }
+ }
+
+ mVideoBuffersAvailable.clear();
+
+ mRecording = false;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+//-----------------Stub implementation of the interface ------------------------------
+
+status_t BaseCameraAdapter::takePicture()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopImageCapture()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::startBracketing(int range)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopBracketing()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::autoFocus()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ notifyFocusSubscribers(false);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::cancelAutoFocus()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::startSmoothZoom(int targetIdx)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopSmoothZoom()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::startPreview()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopPreview()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::getFrameSize(size_t &width, size_t &height)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::startFaceDetection()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::stopFaceDetection()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::switchToExecuting()
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t BaseCameraAdapter::setState(CameraCommands operation)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mLock.lock();
+
+ switch ( mAdapterState )
+ {
+
+ case INTIALIZED_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_USE_BUFFERS_PREVIEW:
+ CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->LOADED_PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = LOADED_PREVIEW_STATE;
+ break;
+
+ //These events don't change the current state
+ case CAMERA_QUERY_RESOLUTION_PREVIEW:
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ case CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
+ CAMHAL_LOGDB("Adapter state switch INTIALIZED_STATE->INTIALIZED_STATE event = 0x%x",
+ operation);
+ mNextState = INTIALIZED_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch INTIALIZED_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case LOADED_PREVIEW_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_START_PREVIEW:
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ //These events don't change the current state
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ case CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA:
+ case CAMERA_USE_BUFFERS_PREVIEW_DATA:
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW_STATE->LOADED_PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = LOADED_PREVIEW_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGDB("Adapter state switch LOADED_PREVIEW Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case PREVIEW_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_PREVIEW:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->INTIALIZED_STATE event = 0x%x",
+ operation);
+ mNextState = INTIALIZED_STATE;
+ break;
+
+ case CAMERA_PERFORM_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->AF_STATE event = 0x%x",
+ operation);
+ mNextState = AF_STATE;
+ break;
+
+ case CAMERA_START_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->ZOOM_STATE event = 0x%x",
+ operation);
+ mNextState = ZOOM_STATE;
+ break;
+
+ case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->LOADED_CAPTURE_STATE event = 0x%x",
+ operation);
+ mNextState = LOADED_CAPTURE_STATE;
+ break;
+
+ case CAMERA_START_VIDEO:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_STATE->VIDEO_STATE event = 0x%x",
+ operation);
+ mNextState = VIDEO_STATE;
+ break;
+
+ case CAMERA_CANCEL_AUTOFOCUS:
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch PREVIEW_ACTIVE->PREVIEW_ACTIVE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch PREVIEW_ACTIVE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case LOADED_CAPTURE_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = 0x%x",
+ operation);
+ mNextState = CAPTURE_STATE;
+ break;
+
+ case CAMERA_START_BRACKET_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->BRACKETING_STATE event = 0x%x",
+ operation);
+ mNextState = BRACKETING_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case CAPTURE_STATE:
+
+ switch ( operation )
+ {
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ case CAMERA_STOP_BRACKET_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case BRACKETING_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ case CAMERA_STOP_BRACKET_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_STATE->CAPTURE_STATE event = 0x%x",
+ operation);
+ mNextState = CAPTURE_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch BRACKETING_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case AF_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_CANCEL_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_START_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->AF_ZOOM_STATE event = 0x%x",
+ operation);
+ mNextState = AF_ZOOM_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch AF_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case ZOOM_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_CANCEL_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch AF_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = ZOOM_STATE;
+ break;
+
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_PERFORM_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->AF_ZOOM_STATE event = 0x%x",
+ operation);
+ mNextState = AF_ZOOM_STATE;
+ break;
+
+ case CAMERA_START_VIDEO:
+ CAMHAL_LOGDB("Adapter state switch ZOOM_STATE->VIDEO_ZOOM_STATE event = 0x%x",
+ operation);
+ mNextState = VIDEO_ZOOM_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch ZOOM_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case VIDEO_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_VIDEO:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = PREVIEW_STATE;
+ break;
+
+ case CAMERA_PERFORM_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_AF_STATE event = 0x%x",
+ operation);
+ mNextState = VIDEO_AF_STATE;
+ break;
+
+ case CAMERA_START_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_ZOOM_STATE event = 0x%x",
+ operation);
+ mNextState = VIDEO_ZOOM_STATE;
+ break;
+
+ case CAMERA_USE_BUFFERS_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_LOADED_CAPTURE_STATE event = 0x%x",
+ operation);
+ mNextState = VIDEO_LOADED_CAPTURE_STATE;
+ break;
+
+ case CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_STATE->VIDEO_STATE event = 0x%x",
+ operation);
+ mNextState = VIDEO_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch VIDEO_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case VIDEO_AF_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_CANCEL_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_AF_STATE->VIDEO_STATE event = 0x%x",
+ operation);
+ mNextState = VIDEO_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch VIDEO_AF_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case VIDEO_LOADED_CAPTURE_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_START_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch LOADED_CAPTURE_STATE->CAPTURE_STATE event = 0x%x",
+ operation);
+ mNextState = VIDEO_CAPTURE_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch LOADED_CAPTURE_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case VIDEO_CAPTURE_STATE:
+
+ switch ( operation )
+ {
+ case CAMERA_STOP_IMAGE_CAPTURE:
+ CAMHAL_LOGDB("Adapter state switch CAPTURE_STATE->PREVIEW_STATE event = 0x%x",
+ operation);
+ mNextState = VIDEO_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch CAPTURE_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case AF_ZOOM_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->AF_STATE event = 0x%x",
+ operation);
+ mNextState = AF_STATE;
+ break;
+
+ case CAMERA_CANCEL_AUTOFOCUS:
+ CAMHAL_LOGDB("Adapter state switch AF_ZOOM_STATE->ZOOM_STATE event = 0x%x",
+ operation);
+ mNextState = ZOOM_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch AF_ZOOM_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case VIDEO_ZOOM_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->VIDEO_STATE event = 0x%x",
+ operation);
+ mNextState = VIDEO_STATE;
+ break;
+
+ case CAMERA_STOP_VIDEO:
+ CAMHAL_LOGDB("Adapter state switch VIDEO_ZOOM_STATE->ZOOM_STATE event = 0x%x",
+ operation);
+ mNextState = ZOOM_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch VIDEO_ZOOM_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ case BRACKETING_ZOOM_STATE:
+
+ switch ( operation )
+ {
+
+ case CAMERA_STOP_SMOOTH_ZOOM:
+ CAMHAL_LOGDB("Adapter state switch BRACKETING_ZOOM_STATE->BRACKETING_STATE event = 0x%x",
+ operation);
+ mNextState = BRACKETING_STATE;
+ break;
+
+ default:
+ CAMHAL_LOGEB("Adapter state switch BRACKETING_ZOOM_STATE Invalid Op! event = 0x%x",
+ operation);
+ ret = INVALID_OPERATION;
+ break;
+
+ }
+
+ break;
+
+ default:
+ CAMHAL_LOGEA("Invalid Adapter state!");
+ ret = INVALID_OPERATION;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::rollbackToInitializedState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ while ((getState() != INTIALIZED_STATE) && (ret == NO_ERROR)) {
+ ret = rollbackToPreviousState();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::rollbackToPreviousState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ CameraAdapter::AdapterState currentState = getState();
+
+ switch (currentState) {
+ case INTIALIZED_STATE:
+ return NO_ERROR;
+
+ case PREVIEW_STATE:
+ ret = sendCommand(CAMERA_STOP_PREVIEW);
+ break;
+
+ case CAPTURE_STATE:
+ ret = sendCommand(CAMERA_STOP_IMAGE_CAPTURE);
+ break;
+
+ case BRACKETING_STATE:
+ ret = sendCommand(CAMERA_STOP_BRACKET_CAPTURE);
+ break;
+
+ case AF_STATE:
+ ret = sendCommand(CAMERA_CANCEL_AUTOFOCUS);
+ break;
+
+ case ZOOM_STATE:
+ ret = sendCommand(CAMERA_STOP_SMOOTH_ZOOM);
+ break;
+
+ case VIDEO_STATE:
+ ret = sendCommand(CAMERA_STOP_VIDEO);
+ break;
+
+ case VIDEO_AF_STATE:
+ ret = sendCommand(CAMERA_CANCEL_AUTOFOCUS);
+ break;
+
+ case VIDEO_CAPTURE_STATE:
+ ret = sendCommand(CAMERA_STOP_IMAGE_CAPTURE);
+ break;
+
+ case AF_ZOOM_STATE:
+ ret = sendCommand(CAMERA_STOP_SMOOTH_ZOOM);
+ break;
+
+ case VIDEO_ZOOM_STATE:
+ ret = sendCommand(CAMERA_STOP_SMOOTH_ZOOM);
+ break;
+
+ case BRACKETING_ZOOM_STATE:
+ ret = sendCommand(CAMERA_STOP_SMOOTH_ZOOM);
+ break;
+
+ default:
+ CAMHAL_LOGEA("Invalid Adapter state!");
+ ret = INVALID_OPERATION;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+//State transition finished successfully.
+//Commit the state and unlock the adapter state.
+status_t BaseCameraAdapter::commitState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mAdapterState = mNextState;
+
+ mLock.unlock();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::rollbackState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ mNextState = mAdapterState;
+
+ mLock.unlock();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+// getNextState() and getState()
+// publicly exposed functions to retrieve the adapter states
+// please notice that these functions are locked
+CameraAdapter::AdapterState BaseCameraAdapter::getState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mAdapterState;
+}
+
+CameraAdapter::AdapterState BaseCameraAdapter::getNextState()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mNextState;
+}
+
+// getNextState() and getState()
+// internal protected functions to retrieve the adapter states
+// please notice that these functions are NOT locked to help
+// internal functions query state in the middle of state
+// transition
+status_t BaseCameraAdapter::getState(AdapterState &state)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ state = mAdapterState;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t BaseCameraAdapter::getNextState(AdapterState &state)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ state = mNextState;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void BaseCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
+{
+ LOG_FUNCTION_NAME;
+ LOG_FUNCTION_NAME_EXIT;
+}
+//-----------------------------------------------------------------------------
+
+
+
+};
+
+/*--------------------Camera Adapter Class ENDS here-----------------------------*/
+
diff --git a/CameraHal.cpp b/CameraHal.cpp
new file mode 100644
index 0000000..0a1d0fa
--- a/dev/null
+++ b/CameraHal.cpp
@@ -0,0 +1,3516 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraHal.cpp
+*
+* This file maps the Camera Hardware Interface to V4L2.
+*
+*/
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "CameraHAL"
+
+#include "CameraHal.h"
+#include "ANativeWindowDisplayAdapter.h"
+#include "TICameraParameters.h"
+#include "CameraProperties.h"
+#include <cutils/properties.h>
+
+#include <poll.h>
+#include <math.h>
+
+namespace android {
+
+extern "C" CameraAdapter* CameraAdapter_Factory(size_t);
+
+/*****************************************************************************/
+
+////Constant definitions and declarations
+////@todo Have a CameraProperties class to store these parameters as constants for every camera
+//// Currently, they are hard-coded
+
+const int CameraHal::NO_BUFFERS_PREVIEW = MAX_CAMERA_BUFFERS;
+const int CameraHal::NO_BUFFERS_IMAGE_CAPTURE = 2;
+
+const uint32_t MessageNotifier::EVENT_BIT_FIELD_POSITION = 0;
+const uint32_t MessageNotifier::FRAME_BIT_FIELD_POSITION = 0;
+
+/******************************************************************************/
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+struct timeval CameraHal::mStartPreview;
+struct timeval CameraHal::mStartFocus;
+struct timeval CameraHal::mStartCapture;
+
+#endif
+
+static void orientation_cb(uint32_t orientation, uint32_t tilt, void* cookie) {
+ CameraHal *camera = NULL;
+
+ if (cookie) {
+ camera = (CameraHal*) cookie;
+ camera->onOrientationEvent(orientation, tilt);
+ }
+
+}
+/*-------------Camera Hal Interface Method definitions STARTS here--------------------*/
+
+/**
+ Callback function to receive orientation events from SensorListener
+ */
+void CameraHal::onOrientationEvent(uint32_t orientation, uint32_t tilt) {
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != mCameraAdapter ) {
+ mCameraAdapter->onOrientationEvent(orientation, tilt);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Set the notification and data callbacks
+
+ @param[in] notify_cb Notify callback for notifying the app about events and errors
+ @param[in] data_cb Buffer callback for sending the preview/raw frames to the app
+ @param[in] data_cb_timestamp Buffer callback for sending the video frames w/ timestamp
+ @param[in] user Callback cookie
+ @return none
+
+ */
+void CameraHal::setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != mAppCallbackNotifier.get() )
+ {
+ mAppCallbackNotifier->setCallbacks(this,
+ notify_cb,
+ data_cb,
+ data_cb_timestamp,
+ get_memory,
+ user);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Enable a message, or set of messages.
+
+ @param[in] msgtype Bitmask of the messages to enable (defined in include/ui/Camera.h)
+ @return none
+
+ */
+void CameraHal::enableMsgType(int32_t msgType)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( ( msgType & CAMERA_MSG_SHUTTER ) && ( !mShutterEnabled ) )
+ {
+ msgType &= ~CAMERA_MSG_SHUTTER;
+ }
+
+ // ignoring enable focus message from camera service
+ // we will enable internally in autoFocus call
+ if (msgType & CAMERA_MSG_FOCUS) {
+ msgType &= ~CAMERA_MSG_FOCUS;
+ }
+
+ {
+ Mutex::Autolock lock(mLock);
+ mMsgEnabled |= msgType;
+ }
+
+ if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ if(mDisplayPaused)
+ {
+ CAMHAL_LOGDA("Preview currently paused...will enable preview callback when restarted");
+ msgType &= ~CAMERA_MSG_PREVIEW_FRAME;
+ }else
+ {
+ CAMHAL_LOGDA("Enabling Preview Callback");
+ }
+ }
+ else
+ {
+ CAMHAL_LOGDB("Preview callback not enabled %x", msgType);
+ }
+
+
+ ///Configure app callback notifier with the message callback required
+ mAppCallbackNotifier->enableMsgType (msgType);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Disable a message, or set of messages.
+
+ @param[in] msgtype Bitmask of the messages to disable (defined in include/ui/Camera.h)
+ @return none
+
+ */
+void CameraHal::disableMsgType(int32_t msgType)
+{
+ LOG_FUNCTION_NAME;
+
+ {
+ Mutex::Autolock lock(mLock);
+ mMsgEnabled &= ~msgType;
+ }
+
+ if( msgType & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ CAMHAL_LOGDA("Disabling Preview Callback");
+ }
+
+ ///Configure app callback notifier
+ mAppCallbackNotifier->disableMsgType (msgType);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Query whether a message, or a set of messages, is enabled.
+
+ Note that this is operates as an AND, if any of the messages queried are off, this will
+ return false.
+
+ @param[in] msgtype Bitmask of the messages to query (defined in include/ui/Camera.h)
+ @return true If all message types are enabled
+ false If any message type
+
+ */
+int CameraHal::msgTypeEnabled(int32_t msgType)
+{
+ LOG_FUNCTION_NAME;
+ Mutex::Autolock lock(mLock);
+ LOG_FUNCTION_NAME_EXIT;
+ return (mMsgEnabled & msgType);
+}
+
+/**
+ @brief Set the camera parameters.
+
+ @param[in] params Camera parameters to configure the camera
+ @return NO_ERROR
+ @todo Define error codes
+
+ */
+int CameraHal::setParameters(const char* parameters)
+{
+
+ LOG_FUNCTION_NAME;
+
+ CameraParameters params;
+
+ String8 str_params(parameters);
+ params.unflatten(str_params);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return setParameters(params);
+}
+
+/**
+ @brief Set the camera parameters.
+
+ @param[in] params Camera parameters to configure the camera
+ @return NO_ERROR
+ @todo Define error codes
+
+ */
+int CameraHal::setParameters(const CameraParameters& params)
+{
+
+ LOG_FUNCTION_NAME;
+
+ int w, h;
+ int w_orig, h_orig;
+ int framerate,minframerate;
+ int maxFPS, minFPS;
+ int error;
+ int base;
+ const char *valstr = NULL;
+ const char *prevFormat;
+ char *af_coord;
+ TIUTILS::Message msg;
+ status_t ret = NO_ERROR;
+ // Needed for KEY_RECORDING_HINT
+ bool restartPreviewRequired = false;
+ bool updateRequired = false;
+ CameraParameters oldParams(mParameters.flatten());
+ bool videoMode = false;
+ char range[MAX_PROP_VALUE_LENGTH];
+
+ {
+ Mutex::Autolock lock(mLock);
+
+ ///Ensure that preview is not enabled when the below parameters are changed.
+ if(!previewEnabled())
+ {
+
+ CAMHAL_LOGDB("PreviewFormat %s", params.getPreviewFormat());
+
+ if ((valstr = params.getPreviewFormat()) != NULL) {
+ if ( isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS))) {
+ mParameters.setPreviewFormat(valstr);
+ } else {
+ CAMHAL_LOGEB("Invalid preview format.Supported: %s", mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
+ return -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_VNF)) != NULL) {
+ if ( (params.getInt(TICameraParameters::KEY_VNF)==0) || (params.getInt(TICameraParameters::KEY_VNF)==1) ) {
+ CAMHAL_LOGDB("VNF set %s", params.get(TICameraParameters::KEY_VNF));
+ mParameters.set(TICameraParameters::KEY_VNF, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid VNF: %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL) {
+ // make sure we support vstab...if we don't and application is trying to set
+ // vstab then return an error
+ if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
+ CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGDB("VSTAB %s",
+ params.get(CameraParameters::KEY_VIDEO_STABILIZATION));
+ mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION,
+ params.get(CameraParameters::KEY_VIDEO_STABILIZATION));
+ } else if (strcmp(valstr, CameraParameters::TRUE) == 0) {
+ CAMHAL_LOGEB("ERROR: Invalid VSTAB: %s", valstr);
+ ret = -EINVAL;
+ } else {
+ mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION,
+ CameraParameters::FALSE);
+ }
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_CAP_MODE)) != NULL)
+ {
+ CAMHAL_LOGDB("Capture mode set %s", params.get(TICameraParameters::KEY_CAP_MODE));
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, valstr);
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_IPP)) != NULL) {
+ if (isParameterValid(valstr,mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES))) {
+ CAMHAL_LOGDB("IPP mode set %s", params.get(TICameraParameters::KEY_IPP));
+ mParameters.set(TICameraParameters::KEY_IPP, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid IPP mode: %s. Supported: %s", valstr,
+ mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES));
+ ret = -EINVAL;
+ }
+ }
+
+ if((valstr = params.get(TICameraParameters::KEY_S3D2D_PREVIEW)) != NULL)
+ {
+ CAMHAL_LOGDB("Stereo 3D->2D Preview mode is %s", params.get(TICameraParameters::KEY_S3D2D_PREVIEW));
+ mParameters.set(TICameraParameters::KEY_S3D2D_PREVIEW, valstr);
+ }
+
+ if((valstr = params.get(TICameraParameters::KEY_AUTOCONVERGENCE)) != NULL)
+ {
+ CAMHAL_LOGDB("AutoConvergence mode is %s", params.get(TICameraParameters::KEY_AUTOCONVERGENCE));
+ mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE, valstr);
+ }
+
+ }
+
+ params.getPreviewSize(&w, &h);
+ if (w == -1 && h == -1) {
+ CAMHAL_LOGEA("Unable to get preview size");
+ return -EINVAL;
+ }
+
+ int oldWidth, oldHeight;
+ mParameters.getPreviewSize(&oldWidth, &oldHeight);
+
+ int orientation =0;
+ if((valstr = params.get(TICameraParameters::KEY_SENSOR_ORIENTATION)) != NULL)
+ {
+ CAMHAL_LOGDB("Sensor Orientation is set to %s", params.get(TICameraParameters::KEY_SENSOR_ORIENTATION));
+ mParameters.set(TICameraParameters::KEY_SENSOR_ORIENTATION, valstr);
+ orientation = params.getInt(TICameraParameters::KEY_SENSOR_ORIENTATION);
+ }
+
+ if(orientation ==90 || orientation ==270)
+ {
+ if ( !isResolutionValid(h,w, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)))
+ {
+ CAMHAL_LOGEB("Invalid preview resolution %d x %d. Supported: %s", w, h,
+ mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
+ return -EINVAL;
+ }
+ else
+ {
+ mParameters.setPreviewSize(w, h);
+ mVideoWidth = w;
+ mVideoHeight = h;
+ }
+ }
+ else
+ {
+ if ( !isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES)))
+ {
+ CAMHAL_LOGEB("Invalid preview resolution2 %d x %d. Supported: %s", w, h,
+ mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
+ return -EINVAL;
+ }
+ else
+ {
+ mParameters.setPreviewSize(w, h);
+ }
+ }
+
+ if ( ( oldWidth != w ) || ( oldHeight != h ) )
+ {
+ restartPreviewRequired |= true;
+ }
+
+ CAMHAL_LOGDB("PreviewResolution by App %d x %d", w, h);
+
+ // Handle RECORDING_HINT to Set/Reset Video Mode Parameters
+ valstr = params.get(CameraParameters::KEY_RECORDING_HINT);
+ if(valstr != NULL)
+ {
+ if(strcmp(valstr, CameraParameters::TRUE) == 0)
+ {
+ CAMHAL_LOGDB("Recording Hint is set to %s", valstr);
+ mParameters.set(CameraParameters::KEY_RECORDING_HINT, valstr);
+ videoMode = true;
+ int w, h;
+
+ params.getPreviewSize(&w, &h);
+ CAMHAL_LOGVB("%s Preview Width=%d Height=%d\n", __FUNCTION__, w, h);
+ //HACK FOR MMS
+ mVideoWidth = w;
+ mVideoHeight = h;
+ CAMHAL_LOGVB("%s Video Width=%d Height=%d\n", __FUNCTION__, mVideoWidth, mVideoHeight);
+
+ setPreferredPreviewRes(w, h);
+ mParameters.getPreviewSize(&w, &h);
+ CAMHAL_LOGVB("%s Preview Width=%d Height=%d\n", __FUNCTION__, w, h);
+ //Avoid restarting preview for MMS HACK
+ if ((w != mVideoWidth) && (h != mVideoHeight))
+ {
+ restartPreviewRequired = false;
+ }
+
+ restartPreviewRequired |= setVideoModeParameters(params);
+ }
+ else if(strcmp(valstr, CameraParameters::FALSE) == 0)
+ {
+ CAMHAL_LOGDB("Recording Hint is set to %s", valstr);
+ mParameters.set(CameraParameters::KEY_RECORDING_HINT, valstr);
+ restartPreviewRequired |= resetVideoModeParameters();
+ params.getPreviewSize(&mVideoWidth, &mVideoHeight);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Invalid RECORDING_HINT");
+ return -EINVAL;
+ }
+ }
+ else
+ {
+ // This check is required in following case.
+ // If VideoRecording activity sets KEY_RECORDING_HINT to TRUE and
+ // ImageCapture activity doesnot set KEY_RECORDING_HINT to FALSE (i.e. simply NULL),
+ // then Video Mode parameters may remain present in ImageCapture activity as well.
+ CAMHAL_LOGDA("Recording Hint is set to NULL");
+ mParameters.set(CameraParameters::KEY_RECORDING_HINT, "");
+ restartPreviewRequired |= resetVideoModeParameters();
+ params.getPreviewSize(&mVideoWidth, &mVideoHeight);
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_FOCUS_MODE)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES))) {
+ CAMHAL_LOGDB("Focus mode set %s", params.get(CameraParameters::KEY_FOCUS_MODE));
+
+ // we need to take a decision on the capture mode based on whether CAF picture or
+ // video is chosen so the behavior of each is consistent to the application
+ if(strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0){
+ restartPreviewRequired |= resetVideoModeParameters();
+ } else if (strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) == 0){
+ restartPreviewRequired |= setVideoModeParameters(params);
+ }
+
+ mParameters.set(CameraParameters::KEY_FOCUS_MODE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid FOCUS mode = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ ///Below parameters can be changed when the preview is running
+ if ( (valstr = params.getPictureFormat()) != NULL ) {
+ if (isParameterValid(params.getPictureFormat(),mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS))) {
+ mParameters.setPictureFormat(valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid picture format: %s",valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ params.getPictureSize(&w, &h);
+ if ( isResolutionValid(w, h, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES))) {
+ mParameters.setPictureSize(w, h);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid picture resolution %dx%d", w, h);
+ ret = -EINVAL;
+ }
+
+ CAMHAL_LOGDB("Picture Size by App %d x %d", w, h);
+
+ if ((valstr = params.get(TICameraParameters::KEY_BURST)) != NULL) {
+ if (params.getInt(TICameraParameters::KEY_BURST) >=0) {
+ CAMHAL_LOGDB("Burst set %s", params.get(TICameraParameters::KEY_BURST));
+ mParameters.set(TICameraParameters::KEY_BURST, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Burst value: %s",valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ framerate = params.getPreviewFrameRate();
+ valstr = params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE);
+ CAMHAL_LOGDB("FRAMERATE %d", framerate);
+
+ CAMHAL_LOGDB("Passed FRR: %s, Supported FRR %s", valstr
+ , mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
+ CAMHAL_LOGDB("Passed FR: %d, Supported FR %s", framerate
+ , mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
+
+
+ if (valstr == NULL)
+ valstr = "";
+ //Perform parameter validation
+ if(!isParameterValid(valstr
+ , mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED))
+ || !isParameterValid(framerate,
+ mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES)))
+ {
+ CAMHAL_LOGEA("Invalid frame rate range or frame rate");
+ return -EINVAL;
+ }
+
+ // Variable framerate ranges have higher priority over
+ // deprecated constant FPS. "KEY_PREVIEW_FPS_RANGE" should
+ // be cleared by the client in order for constant FPS to get
+ // applied.
+ if ( strcmp(valstr, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE)) != 0)
+ {
+ // APP wants to set FPS range
+ //Set framerate = MAXFPS
+ CAMHAL_LOGDA("APP IS CHANGING FRAME RATE RANGE");
+ params.getPreviewFpsRange(&minFPS, &maxFPS);
+
+ if ( ( 0 > minFPS ) || ( 0 > maxFPS ) )
+ {
+ CAMHAL_LOGEA("ERROR: FPS Range is negative!");
+ return -EINVAL;
+ }
+
+ framerate = maxFPS /CameraHal::VFR_SCALE;
+
+ }
+ else
+ {
+ if ( framerate != atoi(mCameraProperties->get(CameraProperties::PREVIEW_FRAME_RATE)) )
+ {
+
+ selectFPSRange(framerate, &minFPS, &maxFPS);
+ CAMHAL_LOGDB("Select FPS Range %d %d", minFPS, maxFPS);
+ }
+ else
+ {
+ if (videoMode) {
+ valstr = mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_VIDEO);
+ CameraParameters temp;
+ temp.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
+ temp.getPreviewFpsRange(&minFPS, &maxFPS);
+ }
+ else {
+ valstr = mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_IMAGE);
+ CameraParameters temp;
+ temp.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
+ temp.getPreviewFpsRange(&minFPS, &maxFPS);
+ }
+
+ framerate = maxFPS / CameraHal::VFR_SCALE;
+ }
+
+ }
+
+ CAMHAL_LOGDB("FPS Range = %s", valstr);
+ CAMHAL_LOGDB("DEFAULT FPS Range = %s", mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
+
+ minFPS /= CameraHal::VFR_SCALE;
+ maxFPS /= CameraHal::VFR_SCALE;
+
+ if ( ( 0 == minFPS ) || ( 0 == maxFPS ) )
+ {
+ CAMHAL_LOGEA("ERROR: FPS Range is invalid!");
+ ret = -EINVAL;
+ }
+
+ if ( maxFPS < minFPS )
+ {
+ CAMHAL_LOGEA("ERROR: Max FPS is smaller than Min FPS!");
+ ret = -EINVAL;
+ }
+ CAMHAL_LOGDB("SET FRAMERATE %d", framerate);
+ mParameters.setPreviewFrameRate(framerate);
+ valstr = params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE);
+ LOGD("adk;slfjasdf;j %s", valstr);
+ if (!valstr) valstr = "";
+ mParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
+ LOGD("line %d", __LINE__);
+
+ CAMHAL_LOGDB("FPS Range [%d, %d]", minFPS, maxFPS);
+ mParameters.set(TICameraParameters::KEY_MINFRAMERATE, minFPS);
+ mParameters.set(TICameraParameters::KEY_MAXFRAMERATE, maxFPS);
+
+ if( ( valstr = params.get(TICameraParameters::KEY_GBCE) ) != NULL )
+ {
+ CAMHAL_LOGDB("GBCE Value = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GBCE, valstr);
+ }
+
+ if( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL )
+ {
+ CAMHAL_LOGDB("GLBCE Value = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_GLBCE, valstr);
+ }
+
+ ///Update the current parameter set
+ if( (valstr = params.get(TICameraParameters::KEY_AUTOCONVERGENCE)) != NULL)
+ {
+ CAMHAL_LOGDB("AutoConvergence Mode is set = %s", params.get(TICameraParameters::KEY_AUTOCONVERGENCE));
+ mParameters.set(TICameraParameters::KEY_AUTOCONVERGENCE, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES)) !=NULL )
+ {
+ CAMHAL_LOGDB("ManualConvergence Value = %s", params.get(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES));
+ mParameters.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, valstr);
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_EXPOSURE_MODE)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES))) {
+ CAMHAL_LOGDB("Exposure set = %s", valstr);
+ mParameters.set(TICameraParameters::KEY_EXPOSURE_MODE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Exposure = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_WHITE_BALANCE)) != NULL) {
+ if ( isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE))) {
+ CAMHAL_LOGDB("White balance set %s", valstr);
+ mParameters.set(CameraParameters::KEY_WHITE_BALANCE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid white balance = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_CONTRAST)) != NULL) {
+ if (params.getInt(TICameraParameters::KEY_CONTRAST) >= 0 ) {
+ CAMHAL_LOGDB("Contrast set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_CONTRAST, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Contrast = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr =params.get(TICameraParameters::KEY_SHARPNESS)) != NULL) {
+ if (params.getInt(TICameraParameters::KEY_SHARPNESS) >= 0 ) {
+ CAMHAL_LOGDB("Sharpness set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_SHARPNESS, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Sharpness = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_SATURATION)) != NULL) {
+ if (params.getInt(TICameraParameters::KEY_SATURATION) >= 0 ) {
+ CAMHAL_LOGDB("Saturation set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_SATURATION, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Saturation = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_BRIGHTNESS)) != NULL) {
+ if (params.getInt(TICameraParameters::KEY_BRIGHTNESS) >= 0 ) {
+ CAMHAL_LOGDB("Brightness set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_BRIGHTNESS, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Brightness = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_ANTIBANDING)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING))) {
+ CAMHAL_LOGDB("Antibanding set %s", valstr);
+ mParameters.set(CameraParameters::KEY_ANTIBANDING, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Antibanding = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(TICameraParameters::KEY_ISO)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES))) {
+ CAMHAL_LOGDB("ISO set %s", valstr);
+ mParameters.set(TICameraParameters::KEY_ISO, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid ISO = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_FOCUS_AREAS)) != NULL )
+ {
+ CAMHAL_LOGEB("Focus areas position set %s", params.get(CameraParameters::KEY_FOCUS_AREAS));
+ mParameters.set(CameraParameters::KEY_FOCUS_AREAS, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
+ {
+ CAMHAL_LOGDB("Measurements set to %s", params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE));
+ mParameters.set(TICameraParameters::KEY_MEASUREMENT_ENABLE, valstr);
+
+ if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_ENABLE) == 0)
+ {
+ mMeasurementEnabled = true;
+ }
+ else if (strcmp(valstr, (const char *) TICameraParameters::MEASUREMENT_DISABLE) == 0)
+ {
+ mMeasurementEnabled = false;
+ }
+ else
+ {
+ mMeasurementEnabled = false;
+ }
+
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION)) != NULL)
+ {
+ CAMHAL_LOGDB("Exposure compensation set %s", params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION));
+ mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, valstr);
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_SCENE_MODE)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES))) {
+ CAMHAL_LOGDB("Scene mode set %s", valstr);
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(CameraParameters::KEY_SCENE_MODE),
+ updateRequired);
+ mParameters.set(CameraParameters::KEY_SCENE_MODE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Scene mode = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_FLASH_MODE)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES))) {
+ CAMHAL_LOGDB("Flash mode set %s", valstr);
+ mParameters.set(CameraParameters::KEY_FLASH_MODE, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Flash mode = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_EFFECT)) != NULL) {
+ if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS))) {
+ CAMHAL_LOGDB("Effect set %s", valstr);
+ mParameters.set(CameraParameters::KEY_EFFECT, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Effect = %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_ROTATION)) != NULL)
+ && (params.getInt(CameraParameters::KEY_ROTATION) >=0))
+ {
+ CAMHAL_LOGDB("Rotation set %s", params.get(CameraParameters::KEY_ROTATION));
+ mParameters.set(CameraParameters::KEY_ROTATION, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_JPEG_QUALITY)) != NULL)
+ && (params.getInt(CameraParameters::KEY_JPEG_QUALITY) >=0))
+ {
+ CAMHAL_LOGDB("Jpeg quality set %s", params.get(CameraParameters::KEY_JPEG_QUALITY));
+ mParameters.set(CameraParameters::KEY_JPEG_QUALITY, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH)) != NULL)
+ && (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH) >=0))
+ {
+ CAMHAL_LOGDB("Thumbnail width set %s", params.get(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH));
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT)) != NULL)
+ && (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT) >=0))
+ {
+ CAMHAL_LOGDB("Thumbnail width set %s", params.get(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT));
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, valstr);
+ }
+
+ if(( (valstr = params.get(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY)) != NULL )
+ && (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY) >=0))
+ {
+ CAMHAL_LOGDB("Thumbnail quality set %s", params.get(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY));
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, valstr);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_LATITUDE)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS latitude set %s", params.get(CameraParameters::KEY_GPS_LATITUDE));
+ mParameters.set(CameraParameters::KEY_GPS_LATITUDE, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_LATITUDE);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_LONGITUDE)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS longitude set %s", params.get(CameraParameters::KEY_GPS_LONGITUDE));
+ mParameters.set(CameraParameters::KEY_GPS_LONGITUDE, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_LONGITUDE);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_ALTITUDE)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS altitude set %s", params.get(CameraParameters::KEY_GPS_ALTITUDE));
+ mParameters.set(CameraParameters::KEY_GPS_ALTITUDE, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_ALTITUDE);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS timestamp set %s", params.get(CameraParameters::KEY_GPS_TIMESTAMP));
+ mParameters.set(CameraParameters::KEY_GPS_TIMESTAMP, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_TIMESTAMP);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_GPS_DATESTAMP)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS datestamp set %s", params.get(TICameraParameters::KEY_GPS_DATESTAMP));
+ mParameters.set(TICameraParameters::KEY_GPS_DATESTAMP, valstr);
+ }else{
+ mParameters.remove(TICameraParameters::KEY_GPS_DATESTAMP);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS processing method set %s", params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD));
+ mParameters.set(CameraParameters::KEY_GPS_PROCESSING_METHOD, valstr);
+ }else{
+ mParameters.remove(CameraParameters::KEY_GPS_PROCESSING_METHOD);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_GPS_MAPDATUM )) != NULL )
+ {
+ CAMHAL_LOGDB("GPS MAPDATUM set %s", params.get(TICameraParameters::KEY_GPS_MAPDATUM));
+ mParameters.set(TICameraParameters::KEY_GPS_MAPDATUM, valstr);
+ }else{
+ mParameters.remove(TICameraParameters::KEY_GPS_MAPDATUM);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_GPS_VERSION)) != NULL )
+ {
+ CAMHAL_LOGDB("GPS MAPDATUM set %s", params.get(TICameraParameters::KEY_GPS_VERSION));
+ mParameters.set(TICameraParameters::KEY_GPS_VERSION, valstr);
+ }else{
+ mParameters.remove(TICameraParameters::KEY_GPS_VERSION);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_EXIF_MODEL)) != NULL )
+ {
+ CAMHAL_LOGDB("EXIF Model set %s", params.get(TICameraParameters::KEY_EXIF_MODEL));
+ mParameters.set(TICameraParameters::KEY_EXIF_MODEL, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_EXIF_MAKE)) != NULL )
+ {
+ CAMHAL_LOGDB("EXIF Make set %s", params.get(TICameraParameters::KEY_EXIF_MAKE));
+ mParameters.set(TICameraParameters::KEY_EXIF_MAKE, valstr);
+ }
+
+ if( (valstr = params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE)) != NULL )
+ {
+ CAMHAL_LOGDB("Exposure Bracketing set %s", params.get(TICameraParameters::KEY_EXP_BRACKETING_RANGE));
+ mParameters.set(TICameraParameters::KEY_EXP_BRACKETING_RANGE, valstr);
+ }
+ else
+ {
+ mParameters.remove(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
+ }
+
+ if ((valstr = params.get(CameraParameters::KEY_ZOOM)) != NULL ) {
+ if ((params.getInt(CameraParameters::KEY_ZOOM) >= 0 ) &&
+ (params.getInt(CameraParameters::KEY_ZOOM) <= mMaxZoomSupported )) {
+ CAMHAL_LOGDB("Zoom set %s", valstr);
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(CameraParameters::KEY_ZOOM),
+ updateRequired);
+ mParameters.set(CameraParameters::KEY_ZOOM, valstr);
+ } else {
+ CAMHAL_LOGEB("ERROR: Invalid Zoom: %s", valstr);
+ ret = -EINVAL;
+ }
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK)) != NULL )
+ {
+ CAMHAL_LOGDB("Auto Exposure Lock set %s", params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK));
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK),
+ updateRequired);
+ mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, valstr);
+ }
+
+ if( (valstr = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK)) != NULL )
+ {
+ CAMHAL_LOGDB("Auto WhiteBalance Lock set %s", params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK));
+ doesSetParameterNeedUpdate(valstr,
+ mParameters.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK),
+ updateRequired);
+ mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, valstr);
+ }
+ if( (valstr = params.get(CameraParameters::KEY_METERING_AREAS)) != NULL )
+ {
+ CAMHAL_LOGEB("Metering areas position set %s", params.get(CameraParameters::KEY_METERING_AREAS));
+ mParameters.set(CameraParameters::KEY_METERING_AREAS, valstr);
+ }
+
+LOGD("setParameters, 1 mParameters KEY_PICTURE_SIZE=%s", mParameters.get(CameraParameters::KEY_PICTURE_SIZE));
+ CameraParameters adapterParams = mParameters;
+
+ // Only send parameters to adapter if preview is already
+ // enabled or doesSetParameterNeedUpdate says so. Initial setParameters to camera adapter,
+ // will be called in startPreview()
+ // TODO(XXX): Need to identify other parameters that need update from camera adapter
+LOGD("setParameters mCameraAdapter=%#x mPreviewEnabled=%d updateRequired=%d", mCameraAdapter, mPreviewEnabled, updateRequired);
+ if ( (NULL != mCameraAdapter) && (mPreviewEnabled || updateRequired) ) {
+ ret |= mCameraAdapter->setParameters(adapterParams);
+ }
+LOGD("setParameters, 2 mParameters KEY_PICTURE_SIZE=%s", mParameters.get(CameraParameters::KEY_PICTURE_SIZE));
+
+ if( NULL != params.get(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS) )
+ {
+ int posBracketRange = params.getInt(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS);
+ if ( 0 < posBracketRange )
+ {
+ mBracketRangePositive = posBracketRange;
+ }
+ }
+ CAMHAL_LOGDB("Positive bracketing range %d", mBracketRangePositive);
+
+
+ if( NULL != params.get(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG) )
+ {
+ int negBracketRange = params.getInt(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG);
+ if ( 0 < negBracketRange )
+ {
+ mBracketRangeNegative = negBracketRange;
+ }
+ }
+ CAMHAL_LOGDB("Negative bracketing range %d", mBracketRangeNegative);
+
+ if( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL) &&
+ ( strcmp(valstr, TICameraParameters::BRACKET_ENABLE) == 0 ))
+ {
+ if ( !mBracketingEnabled )
+ {
+ CAMHAL_LOGDA("Enabling bracketing");
+ mBracketingEnabled = true;
+
+ //Wait for AF events to enable bracketing
+ if ( NULL != mCameraAdapter )
+ {
+ setEventProvider( CameraHalEvent::ALL_EVENTS, mCameraAdapter );
+ }
+ }
+ else
+ {
+ CAMHAL_LOGDA("Bracketing already enabled");
+ }
+ }
+ else if ( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL ) &&
+ ( strcmp(valstr, TICameraParameters::BRACKET_DISABLE) == 0 ))
+ {
+ CAMHAL_LOGDA("Disabling bracketing");
+
+ mBracketingEnabled = false;
+ stopImageBracketing();
+
+ //Remove AF events subscription
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification( CameraHalEvent::ALL_EVENTS );
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ }
+
+ if( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
+ ( strcmp(valstr, TICameraParameters::SHUTTER_ENABLE) == 0 ))
+ {
+ CAMHAL_LOGDA("Enabling shutter sound");
+
+ mShutterEnabled = true;
+ mMsgEnabled |= CAMERA_MSG_SHUTTER;
+ mParameters.set(TICameraParameters::KEY_SHUTTER_ENABLE, valstr);
+ }
+ else if ( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
+ ( strcmp(valstr, TICameraParameters::SHUTTER_DISABLE) == 0 ))
+ {
+ CAMHAL_LOGDA("Disabling shutter sound");
+
+ mShutterEnabled = false;
+ mMsgEnabled &= ~CAMERA_MSG_SHUTTER;
+ mParameters.set(TICameraParameters::KEY_SHUTTER_ENABLE, valstr);
+ }
+
+ }
+
+ //On fail restore old parameters
+ if ( NO_ERROR != ret ) {
+ mParameters.unflatten(oldParams.flatten());
+ }
+
+ // Restart Preview if needed by KEY_RECODING_HINT only if preview is already running.
+ // If preview is not started yet, Video Mode parameters will take effect on next startPreview()
+ if (restartPreviewRequired && previewEnabled() && !mRecordingEnabled) {
+ CAMHAL_LOGDA("Restarting Preview");
+ ret = restartPreview();
+ } else if (restartPreviewRequired && !previewEnabled() &&
+ mDisplayPaused && !mRecordingEnabled) {
+ CAMHAL_LOGDA("Stopping Preview");
+ forceStopPreview();
+ }
+
+ if (ret != NO_ERROR)
+ {
+ CAMHAL_LOGEA("Failed to restart Preview");
+ return ret;
+ }
+
+LOGD("setParameters, 3 mParameters KEY_PICTURE_SIZE=%s", mParameters.get(CameraParameters::KEY_PICTURE_SIZE));
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::allocPreviewBufs(int width, int height, const char* previewFormat,
+ unsigned int buffercount, unsigned int &max_queueable)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if(mDisplayAdapter.get() == NULL)
+ {
+ // Memory allocation of preview buffers is now placed in gralloc
+ // CameraHal should not allocate preview buffers without DisplayAdapter
+ return NO_MEMORY;
+ }
+
+ if(!mPreviewBufs)
+ {
+ ///@todo Pluralise the name of this method to allocateBuffers
+ mPreviewLength = 0;
+ mPreviewBufs = (int32_t *) mDisplayAdapter->allocateBuffer(width, height,
+ previewFormat,
+ mPreviewLength,
+ buffercount);
+ LOGD("allocPreviewBufs buffercount=%d", buffercount);
+
+ if (NULL == mPreviewBufs ) {
+ CAMHAL_LOGEA("Couldn't allocate preview buffers");
+ return NO_MEMORY;
+ }
+
+ mPreviewOffsets = (uint32_t *) mDisplayAdapter->getOffsets();
+ if ( NULL == mPreviewOffsets ) {
+ CAMHAL_LOGEA("Buffer mapping failed");
+ return BAD_VALUE;
+ }
+
+ mPreviewFd = mDisplayAdapter->getFd();
+ /* mPreviewFd and desc.mFd seem to be unused.
+ if ( -1 == mPreviewFd ) {
+ CAMHAL_LOGEA("Invalid handle");
+ return BAD_VALUE;
+ }*/
+
+ mBufProvider = (BufferProvider*) mDisplayAdapter.get();
+
+ ret = mDisplayAdapter->maxQueueableBuffers(max_queueable);
+ if (ret != NO_ERROR) {
+ return ret;
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+}
+
+status_t CameraHal::freePreviewBufs()
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDB("mPreviewBufs = 0x%x", (unsigned int)mPreviewBufs);
+ if(mPreviewBufs)
+ {
+ ///@todo Pluralise the name of this method to freeBuffers
+ ret = mBufProvider->freeBuffer(mPreviewBufs);
+ mPreviewBufs = NULL;
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+
+status_t CameraHal::allocPreviewDataBufs(size_t size, size_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+ int bytes;
+
+ LOG_FUNCTION_NAME;
+
+ bytes = size;
+
+ if ( NO_ERROR == ret )
+ {
+ if( NULL != mPreviewDataBufs )
+ {
+ ret = freePreviewDataBufs();
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ bytes = ((bytes+4095)/4096)*4096;
+ mPreviewDataBufs = (int32_t *)mMemoryManager->allocateBuffer(0, 0, NULL, bytes, bufferCount);
+
+ CAMHAL_LOGDB("Size of Preview data buffer = %d", bytes);
+ if( NULL == mPreviewDataBufs )
+ {
+ CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
+ ret = -NO_MEMORY;
+ }
+ else
+ {
+ bytes = size;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mPreviewDataFd = mMemoryManager->getFd();
+ mPreviewDataLength = bytes;
+ mPreviewDataOffsets = mMemoryManager->getOffsets();
+ }
+ else
+ {
+ mPreviewDataFd = -1;
+ mPreviewDataLength = 0;
+ mPreviewDataOffsets = NULL;
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t CameraHal::freePreviewDataBufs()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+
+ if( NULL != mPreviewDataBufs )
+ {
+
+ ///@todo Pluralise the name of this method to freeBuffers
+ ret = mMemoryManager->freeBuffer(mPreviewDataBufs);
+ mPreviewDataBufs = NULL;
+
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::allocImageBufs(unsigned int width, unsigned int height, size_t size, const char* previewFormat, unsigned int bufferCount)
+{
+ status_t ret = NO_ERROR;
+ int bytes;
+
+ LOG_FUNCTION_NAME;
+
+ bytes = size;
+
+ // allocate image buffers only if not already allocated
+ if(NULL != mImageBufs) {
+ return NO_ERROR;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ bytes = ((bytes+4095)/4096)*4096;
+ mImageBufs = (int32_t *)mMemoryManager->allocateBuffer(0, 0, previewFormat, bytes, bufferCount);
+
+ CAMHAL_LOGDB("Size of Image cap buffer = %d", bytes);
+ if( NULL == mImageBufs )
+ {
+ CAMHAL_LOGEA("Couldn't allocate image buffers using memory manager");
+ ret = -NO_MEMORY;
+ }
+ else
+ {
+ bytes = size;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mImageFd = mMemoryManager->getFd();
+ mImageLength = bytes;
+ mImageOffsets = mMemoryManager->getOffsets();
+ }
+ else
+ {
+ mImageFd = -1;
+ mImageLength = 0;
+ mImageOffsets = NULL;
+ }
+
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+status_t CameraHal::allocVideoBufs(uint32_t width, uint32_t height, uint32_t bufferCount)
+{
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ if( NULL != mVideoBufs ){
+ ret = freeVideoBufs(mVideoBufs);
+ mVideoBufs = NULL;
+ }
+
+ if ( NO_ERROR == ret ){
+ int32_t stride;
+ buffer_handle_t *bufsArr = new buffer_handle_t [bufferCount];
+
+ if (bufsArr != NULL){
+ for (int i = 0; i< bufferCount; i++){
+ GraphicBufferAllocator &GrallocAlloc = GraphicBufferAllocator::get();
+ buffer_handle_t buf;
+ ret = GrallocAlloc.alloc(width, height, HAL_PIXEL_FORMAT_NV12, CAMHAL_GRALLOC_USAGE, &buf, &stride);
+ if (ret != NO_ERROR){
+ CAMHAL_LOGEA("Couldn't allocate video buffers using Gralloc");
+ ret = -NO_MEMORY;
+ for (int j=0; j< i; j++){
+ buf = (buffer_handle_t)bufsArr[j];
+ CAMHAL_LOGEB("Freeing Gralloc Buffer 0x%x", buf);
+ GrallocAlloc.free(buf);
+ }
+ delete [] bufsArr;
+ goto exit;
+ }
+ bufsArr[i] = buf;
+ CAMHAL_LOGVB("*** Gralloc Handle =0x%x ***", buf);
+ }
+
+ mVideoBufs = (int32_t *)bufsArr;
+ }
+ else{
+ CAMHAL_LOGEA("Couldn't allocate video buffers ");
+ ret = -NO_MEMORY;
+ }
+ }
+
+ exit:
+ LOG_FUNCTION_NAME;
+
+ return ret;
+}
+
+void endImageCapture( void *userData)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != userData )
+ {
+ CameraHal *c = reinterpret_cast<CameraHal *>(userData);
+ c->signalEndImageCapture();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void releaseImageBuffers(void *userData)
+{
+ LOG_FUNCTION_NAME;
+
+ if (NULL != userData) {
+ CameraHal *c = reinterpret_cast<CameraHal *>(userData);
+ c->freeImageBufs();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t CameraHal::signalEndImageCapture()
+{
+ status_t ret = NO_ERROR;
+ int w,h;
+ CameraParameters adapterParams = mParameters;
+ Mutex::Autolock lock(mLock);
+
+ LOG_FUNCTION_NAME;
+
+ if ( mBracketingRunning ) {
+ stopImageBracketing();
+ } else {
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::freeImageBufs()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NO_ERROR == ret )
+ {
+
+ if( NULL != mImageBufs )
+ {
+
+ ///@todo Pluralise the name of this method to freeBuffers
+ ret = mMemoryManager->freeBuffer(mImageBufs);
+ mImageBufs = NULL;
+
+ }
+ else
+ {
+ ret = -EINVAL;
+ }
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::freeVideoBufs(void *bufs)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ buffer_handle_t *pBuf = (buffer_handle_t*)bufs;
+ int count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
+ if(pBuf == NULL)
+ {
+ CAMHAL_LOGEA("NULL pointer passed to freeVideoBuffer");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ GraphicBufferAllocator &GrallocAlloc = GraphicBufferAllocator::get();
+
+ for(int i = 0; i < count; i++){
+ buffer_handle_t ptr = *pBuf++;
+ CAMHAL_LOGVB("Free Video Gralloc Handle 0x%x", ptr);
+ GrallocAlloc.free(ptr);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Start preview mode.
+
+ @param none
+ @return NO_ERROR Camera switched to VF mode
+ @todo Update function header with the different errors that are possible
+
+ */
+status_t CameraHal::startPreview()
+{
+
+ status_t ret = NO_ERROR;
+ CameraAdapter::BuffersDescriptor desc;
+ CameraFrame frame;
+ const char *valstr = NULL;
+ unsigned int required_buffer_count;
+ unsigned int max_queueble_buffers;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ gettimeofday(&mStartPreview, NULL);
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if ( mPreviewEnabled ){
+ CAMHAL_LOGDA("Preview already running");
+ LOG_FUNCTION_NAME_EXIT;
+ return ALREADY_EXISTS;
+ }
+
+ if ( NULL != mCameraAdapter ) {
+ ret = mCameraAdapter->setParameters(mParameters);
+ }
+
+ if ((mPreviewStartInProgress == false) && (mDisplayPaused == false)){
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_RESOLUTION_PREVIEW,( int ) &frame);
+ if ( NO_ERROR != ret ){
+ CAMHAL_LOGEB("Error: CAMERA_QUERY_RESOLUTION_PREVIEW %d", ret);
+ return ret;
+ }
+
+ ///Update the current preview width and height
+ mPreviewWidth = frame.mWidth;
+ mPreviewHeight = frame.mHeight;
+ //Update the padded width and height - required for VNF and VSTAB
+ mParameters.set(TICameraParameters::KEY_PADDED_WIDTH, mPreviewWidth);
+ mParameters.set(TICameraParameters::KEY_PADDED_HEIGHT, mPreviewHeight);
+
+ }
+
+ ///If we don't have the preview callback enabled and display adapter,
+ if(!mSetPreviewWindowCalled || (mDisplayAdapter.get() == NULL)){
+ CAMHAL_LOGEA("Preview not started. Preview in progress flag set");
+ mPreviewStartInProgress = true;
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_SWITCH_TO_EXECUTING);
+ if ( NO_ERROR != ret ){
+ CAMHAL_LOGEB("Error: CAMERA_SWITCH_TO_EXECUTING %d", ret);
+ return ret;
+ }
+ return NO_ERROR;
+ }
+
+ if( (mDisplayAdapter.get() != NULL) && ( !mPreviewEnabled ) && ( mDisplayPaused ) )
+ {
+ CAMHAL_LOGDA("Preview is in paused state");
+
+ mDisplayPaused = false;
+ mPreviewEnabled = true;
+ if ( NO_ERROR == ret )
+ {
+ ret = mDisplayAdapter->pauseDisplay(mDisplayPaused);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Display adapter resume failed %x", ret);
+ }
+ }
+ //restart preview callbacks
+ if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)
+ {
+ mAppCallbackNotifier->enableMsgType (CAMERA_MSG_PREVIEW_FRAME);
+ }
+ return ret;
+ }
+
+
+ required_buffer_count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
+
+ ///Allocate the preview buffers
+ ret = allocPreviewBufs(mPreviewWidth, mPreviewHeight, mParameters.getPreviewFormat(), required_buffer_count, max_queueble_buffers);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEA("Couldn't allocate buffers for Preview");
+ goto error;
+ }
+
+ if ( mMeasurementEnabled )
+ {
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA,
+ ( int ) &frame,
+ required_buffer_count);
+ if ( NO_ERROR != ret )
+ {
+ return ret;
+ }
+
+ ///Allocate the preview data buffers
+ ret = allocPreviewDataBufs(frame.mLength, required_buffer_count);
+ if ( NO_ERROR != ret ) {
+ CAMHAL_LOGEA("Couldn't allocate preview data buffers");
+ goto error;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ desc.mBuffers = mPreviewDataBufs;
+ desc.mOffsets = mPreviewDataOffsets;
+ desc.mFd = mPreviewDataFd;
+ desc.mLength = mPreviewDataLength;
+ desc.mCount = ( size_t ) required_buffer_count;
+ desc.mMaxQueueable = (size_t) required_buffer_count;
+
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW_DATA,
+ ( int ) &desc);
+ }
+
+ }
+
+ ///Pass the buffers to Camera Adapter
+ desc.mBuffers = mPreviewBufs;
+ desc.mOffsets = mPreviewOffsets;
+ desc.mFd = mPreviewFd;
+ desc.mLength = mPreviewLength;
+ desc.mCount = ( size_t ) required_buffer_count;
+ desc.mMaxQueueable = (size_t) max_queueble_buffers;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_PREVIEW,
+ ( int ) &desc);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("Failed to register preview buffers: 0x%x", ret);
+ freePreviewBufs();
+ return ret;
+ }
+
+ mAppCallbackNotifier->startPreviewCallbacks(mParameters, mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, required_buffer_count);
+
+ ///Start the callback notifier
+ ret = mAppCallbackNotifier->start();
+
+ if( ALREADY_EXISTS == ret )
+ {
+ //Already running, do nothing
+ CAMHAL_LOGDA("AppCallbackNotifier already running");
+ ret = NO_ERROR;
+ }
+ else if ( NO_ERROR == ret ) {
+ CAMHAL_LOGDA("Started AppCallbackNotifier..");
+ mAppCallbackNotifier->setMeasurements(mMeasurementEnabled);
+ }
+ else
+ {
+ CAMHAL_LOGDA("Couldn't start AppCallbackNotifier");
+ goto error;
+ }
+
+ ///Enable the display adapter if present, actual overlay enable happens when we post the buffer
+ if(mDisplayAdapter.get() != NULL)
+ {
+ CAMHAL_LOGDA("Enabling display");
+ bool isS3d = false;
+ DisplayAdapter::S3DParameters s3dParams;
+ int width, height;
+ mParameters.getPreviewSize(&width, &height);
+#if 0 //TODO: s3d is not part of bringup...will reenable
+ if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D_SUPPORTED)) != NULL) {
+ isS3d = (strcmp(valstr, "true") == 0);
+ }
+ if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D2D_PREVIEW)) != NULL) {
+ if (strcmp(valstr, "off") == 0)
+ {
+ CAMHAL_LOGEA("STEREO 3D->2D PREVIEW MODE IS OFF");
+ //TODO: obtain the frame packing configuration from camera or user settings
+ //once side by side configuration is supported
+ s3dParams.mode = OVERLAY_S3D_MODE_ON;
+ s3dParams.framePacking = OVERLAY_S3D_FORMAT_OVERUNDER;
+ s3dParams.order = OVERLAY_S3D_ORDER_LF;
+ s3dParams.subSampling = OVERLAY_S3D_SS_NONE;
+ }
+ else
+ {
+ CAMHAL_LOGEA("STEREO 3D->2D PREVIEW MODE IS ON");
+ s3dParams.mode = OVERLAY_S3D_MODE_OFF;
+ s3dParams.framePacking = OVERLAY_S3D_FORMAT_OVERUNDER;
+ s3dParams.order = OVERLAY_S3D_ORDER_LF;
+ s3dParams.subSampling = OVERLAY_S3D_SS_NONE;
+ }
+ }
+#endif //if 0
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ ret = mDisplayAdapter->enableDisplay(width, height, &mStartPreview, isS3d ? &s3dParams : NULL);
+
+#else
+
+ ret = mDisplayAdapter->enableDisplay(width, height, NULL, isS3d ? &s3dParams : NULL);
+
+#endif
+
+ if ( ret != NO_ERROR )
+ {
+ CAMHAL_LOGEA("Couldn't enable display");
+ goto error;
+ }
+
+ }
+
+ ///Send START_PREVIEW command to adapter
+ CAMHAL_LOGDA("Starting CameraAdapter preview mode");
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_PREVIEW);
+
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEA("Couldn't start preview w/ CameraAdapter");
+ goto error;
+ }
+ CAMHAL_LOGDA("Started preview");
+
+ mPreviewEnabled = true;
+ mPreviewStartInProgress = false;
+ return ret;
+
+ error:
+
+ CAMHAL_LOGEA("Performing cleanup after error");
+
+ //Do all the cleanup
+ freePreviewBufs();
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_PREVIEW);
+ if(mDisplayAdapter.get() != NULL)
+ {
+ mDisplayAdapter->disableDisplay(false);
+ }
+ mAppCallbackNotifier->stop();
+ mPreviewStartInProgress = false;
+ mPreviewEnabled = false;
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Sets ANativeWindow object.
+
+ Preview buffers provided to CameraHal via this object. DisplayAdapter will be interfacing with it
+ to render buffers to display.
+
+ @param[in] window The ANativeWindow object created by Surface flinger
+ @return NO_ERROR If the ANativeWindow object passes validation criteria
+ @todo Define validation criteria for ANativeWindow object. Define error codes for scenarios
+
+ */
+status_t CameraHal::setPreviewWindow(struct preview_stream_ops *window)
+{
+ status_t ret = NO_ERROR;
+ CameraAdapter::BuffersDescriptor desc;
+
+ LOG_FUNCTION_NAME;
+ mSetPreviewWindowCalled = true;
+
+ ///If the Camera service passes a null window, we destroy existing window and free the DisplayAdapter
+ if(!window)
+ {
+ if(mDisplayAdapter.get() != NULL)
+ {
+ ///NULL window passed, destroy the display adapter if present
+ CAMHAL_LOGEA("NULL window passed, destroying display adapter");
+ mDisplayAdapter.clear();
+ ///@remarks If there was a window previously existing, we usually expect another valid window to be passed by the client
+ ///@remarks so, we will wait until it passes a valid window to begin the preview again
+ mSetPreviewWindowCalled = false;
+ }
+ CAMHAL_LOGEA("NULL ANativeWindow passed to setPreviewWindow");
+ return NO_ERROR;
+ }else if(mDisplayAdapter.get() == NULL)
+ {
+ // Need to create the display adapter since it has not been created
+ // Create display adapter
+ mDisplayAdapter = new ANativeWindowDisplayAdapter();
+ ret = NO_ERROR;
+ if(!mDisplayAdapter.get() || ((ret=mDisplayAdapter->initialize())!=NO_ERROR))
+ {
+ if(ret!=NO_ERROR)
+ {
+ mDisplayAdapter.clear();
+ CAMHAL_LOGEA("DisplayAdapter initialize failed");
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+ else
+ {
+ CAMHAL_LOGEA("Couldn't create DisplayAdapter");
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_MEMORY;
+ }
+ }
+
+ // DisplayAdapter needs to know where to get the CameraFrames from inorder to display
+ // Since CameraAdapter is the one that provides the frames, set it as the frame provider for DisplayAdapter
+ mDisplayAdapter->setFrameProvider(mCameraAdapter);
+
+ // Any dynamic errors that happen during the camera use case has to be propagated back to the application
+ // via CAMERA_MSG_ERROR. AppCallbackNotifier is the class that notifies such errors to the application
+ // Set it as the error handler for the DisplayAdapter
+ mDisplayAdapter->setErrorHandler(mAppCallbackNotifier.get());
+
+ // Update the display adapter with the new window that is passed from CameraService
+ ret = mDisplayAdapter->setPreviewWindow(window);
+ if(ret!=NO_ERROR)
+ {
+ CAMHAL_LOGEB("DisplayAdapter setPreviewWindow returned error %d", ret);
+ }
+
+ if(mPreviewStartInProgress)
+ {
+ CAMHAL_LOGDA("setPreviewWindow called when preview running");
+ // Start the preview since the window is now available
+ ret = startPreview();
+ }
+ }else
+ {
+ /* If mDisplayAdpater is already created. No need to do anything.
+ * We get a surface handle directly now, so we can reconfigure surface
+ * itself in DisplayAdapter if dimensions have changed
+ */
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+
+}
+
+
+/**
+ @brief Stop a previously started preview.
+
+ @param none
+ @return none
+
+ */
+void CameraHal::stopPreview()
+{
+ LOG_FUNCTION_NAME;
+
+ if( (!previewEnabled() && !mDisplayPaused) || mRecordingEnabled)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ return;
+ }
+
+ bool imageCaptureRunning = (mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE) &&
+ (mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE);
+ if(mDisplayPaused && !imageCaptureRunning)
+ {
+ // Display is paused, which essentially means there is no preview active.
+ // Note: this is done so that when stopPreview is called by client after
+ // an image capture, we do not de-initialize the camera adapter and
+ // restart over again.
+
+ return;
+ }
+
+ forceStopPreview();
+
+ // Reset Capture-Mode to default, so that when we switch from VideoRecording
+ // to ImageCapture, CAPTURE_MODE is not left to VIDEO_MODE.
+ CAMHAL_LOGDA("Resetting Capture-Mode to default");
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, "");
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Returns true if preview is enabled
+
+ @param none
+ @return true If preview is running currently
+ false If preview has been stopped
+
+ */
+bool CameraHal::previewEnabled()
+{
+ LOG_FUNCTION_NAME;
+
+ return (mPreviewEnabled || mPreviewStartInProgress);
+}
+
+/**
+ @brief Start record mode.
+
+ When a record image is available a CAMERA_MSG_VIDEO_FRAME message is sent with
+ the corresponding frame. Every record frame must be released by calling
+ releaseRecordingFrame().
+
+ @param none
+ @return NO_ERROR If recording could be started without any issues
+ @todo Update the header with possible error values in failure scenarios
+
+ */
+status_t CameraHal::startRecording( )
+{
+ int w, h;
+ const char *valstr = NULL;
+ bool restartPreviewRequired = false;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartPreview, NULL);
+
+#endif
+
+ if(!previewEnabled())
+ {
+ return NO_INIT;
+ }
+
+ // set internal recording hint in case camera adapter needs to make some
+ // decisions....(will only be sent to camera adapter if camera restart is required)
+ mParameters.set(TICameraParameters::KEY_RECORDING_HINT, CameraParameters::TRUE);
+
+ // if application starts recording in continuous focus picture mode...
+ // then we need to force default capture mode (as opposed to video mode)
+ if ( ((valstr = mParameters.get(CameraParameters::KEY_FOCUS_MODE)) != NULL) &&
+ (strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0) ){
+ restartPreviewRequired = resetVideoModeParameters();
+ }
+
+ // only need to check recording hint if preview restart is not already needed
+ valstr = mParameters.get(CameraParameters::KEY_RECORDING_HINT);
+ if ( !restartPreviewRequired &&
+ (!valstr || (valstr && (strcmp(valstr, CameraParameters::TRUE) != 0))) ) {
+ restartPreviewRequired = setVideoModeParameters(mParameters);
+ }
+
+ if (restartPreviewRequired) {
+ ret = restartPreview();
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ int count = atoi(mCameraProperties->get(CameraProperties::REQUIRED_PREVIEW_BUFS));
+ mParameters.getPreviewSize(&w, &h);
+ CAMHAL_LOGDB("%s Video Width=%d Height=%d", __FUNCTION__, mVideoWidth, mVideoHeight);
+
+ if ((w != mVideoWidth) && (h != mVideoHeight))
+ {
+ ret = allocVideoBufs(mVideoWidth, mVideoHeight, count);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
+ mParameters.remove(TICameraParameters::KEY_RECORDING_HINT);
+ return ret;
+ }
+
+ mAppCallbackNotifier->useVideoBuffers(true);
+ mAppCallbackNotifier->setVideoRes(mVideoWidth, mVideoHeight);
+ ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, count, mVideoBufs);
+ }
+ else
+ {
+ mAppCallbackNotifier->useVideoBuffers(false);
+ mAppCallbackNotifier->setVideoRes(mPreviewWidth, mPreviewHeight);
+ ret = mAppCallbackNotifier->initSharedVideoBuffers(mPreviewBufs, mPreviewOffsets, mPreviewFd, mPreviewLength, count, NULL);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ret = mAppCallbackNotifier->startRecording();
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ ///Buffers for video capture (if different from preview) are expected to be allocated within CameraAdapter
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_VIDEO);
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mRecordingEnabled = true;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Set the camera parameters specific to Video Recording.
+
+ This function checks for the camera parameters which have to be set for recording.
+ Video Recording needs CAPTURE_MODE to be VIDEO_MODE. This function sets it.
+ This function also enables Video Recording specific functions like VSTAB & VNF.
+
+ @param none
+ @return true if preview needs to be restarted for VIDEO_MODE parameters to take effect.
+ @todo Modify the policies for enabling VSTAB & VNF usecase based later.
+
+ */
+bool CameraHal::setVideoModeParameters(const CameraParameters& params)
+{
+ const char *valstr = NULL;
+ bool restartPreviewRequired = false;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ // Set CAPTURE_MODE to VIDEO_MODE, if not set already and Restart Preview
+ valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ if ( (valstr == NULL) ||
+ ( (valstr != NULL) && (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) != 0) ) )
+ {
+ CAMHAL_LOGDA("Set CAPTURE_MODE to VIDEO_MODE");
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, (const char *) TICameraParameters::VIDEO_MODE);
+ restartPreviewRequired = true;
+ }
+
+ // Check if CAPTURE_MODE is VIDEO_MODE, since VSTAB & VNF work only in VIDEO_MODE.
+ valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ if (strcmp(valstr, (const char *) TICameraParameters::VIDEO_MODE) == 0) {
+ // set VSTAB. restart is required if vstab value has changed
+ if (params.get(CameraParameters::KEY_VIDEO_STABILIZATION) != NULL) {
+ // make sure we support vstab
+ if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
+ CameraParameters::TRUE) == 0) {
+ valstr = mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION);
+ // vstab value has changed
+ if ((valstr != NULL) &&
+ strcmp(valstr, params.get(CameraParameters::KEY_VIDEO_STABILIZATION)) != 0) {
+ restartPreviewRequired = true;
+ }
+ mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION,
+ params.get(CameraParameters::KEY_VIDEO_STABILIZATION));
+ }
+ } else if (mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION)) {
+ // vstab was configured but now unset
+ restartPreviewRequired = true;
+ mParameters.remove(CameraParameters::KEY_VIDEO_STABILIZATION);
+ }
+
+ // Set VNF
+ if (params.get(TICameraParameters::KEY_VNF) == NULL) {
+ CAMHAL_LOGDA("Enable VNF");
+ mParameters.set(TICameraParameters::KEY_VNF, "1");
+ restartPreviewRequired = true;
+ } else {
+ valstr = mParameters.get(TICameraParameters::KEY_VNF);
+ if (valstr && strcmp(valstr, params.get(TICameraParameters::KEY_VNF)) != 0) {
+ restartPreviewRequired = true;
+ }
+ mParameters.set(TICameraParameters::KEY_VNF, params.get(TICameraParameters::KEY_VNF));
+ }
+
+ // For VSTAB alone for 1080p resolution, padded width goes > 2048, which cannot be rendered by GPU.
+ // In such case, there is support in Ducati for combination of VSTAB & VNF requiring padded width < 2048.
+ // So we are forcefully enabling VNF, if VSTAB is enabled for 1080p resolution.
+ valstr = mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION);
+ if (valstr && (strcmp(valstr, CameraParameters::TRUE) == 0) && (mPreviewWidth == 1920)) {
+ CAMHAL_LOGDA("Force Enable VNF for 1080p");
+ mParameters.set(TICameraParameters::KEY_VNF, "1");
+ restartPreviewRequired = true;
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return restartPreviewRequired;
+}
+
+/**
+ @brief Reset the camera parameters specific to Video Recording.
+
+ This function resets CAPTURE_MODE and disables Recording specific functions like VSTAB & VNF.
+
+ @param none
+ @return true if preview needs to be restarted for VIDEO_MODE parameters to take effect.
+
+ */
+bool CameraHal::resetVideoModeParameters()
+{
+ const char *valstr = NULL;
+ bool restartPreviewRequired = false;
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ // ignore this if we are already recording
+ if (mRecordingEnabled) {
+ return false;
+ }
+
+ // Set CAPTURE_MODE to VIDEO_MODE, if not set already and Restart Preview
+ valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ if ((valstr != NULL) && (strcmp(valstr, TICameraParameters::VIDEO_MODE) == 0)) {
+ CAMHAL_LOGDA("Reset Capture-Mode to default");
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, "");
+ restartPreviewRequired = true;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return restartPreviewRequired;
+}
+
+/**
+ @brief Restart the preview with setParameter.
+
+ This function restarts preview, for some VIDEO_MODE parameters to take effect.
+
+ @param none
+ @return NO_ERROR If recording parameters could be set without any issues
+
+ */
+status_t CameraHal::restartPreview()
+{
+ const char *valstr = NULL;
+ char tmpvalstr[30];
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ // Retain CAPTURE_MODE before calling stopPreview(), since it is reset in stopPreview().
+ tmpvalstr[0] = 0;
+ valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ if(valstr != NULL)
+ {
+ if(sizeof(tmpvalstr) < (strlen(valstr)+1))
+ {
+ return -EINVAL;
+ }
+
+ strncpy(tmpvalstr, valstr, sizeof(tmpvalstr));
+ tmpvalstr[sizeof(tmpvalstr)-1] = 0;
+ }
+
+ forceStopPreview();
+
+ {
+ Mutex::Autolock lock(mLock);
+ mParameters.set(TICameraParameters::KEY_CAP_MODE, tmpvalstr);
+ mCameraAdapter->setParameters(mParameters);
+ }
+
+ ret = startPreview();
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Stop a previously started recording.
+
+ @param none
+ @return none
+
+ */
+void CameraHal::stopRecording()
+{
+ CameraAdapter::AdapterState currentState;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ if (!mRecordingEnabled )
+ {
+ return;
+ }
+
+ currentState = mCameraAdapter->getState();
+ if (currentState == CameraAdapter::VIDEO_CAPTURE_STATE) {
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE);
+ }
+
+ mAppCallbackNotifier->stopRecording();
+
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_VIDEO);
+
+ mRecordingEnabled = false;
+
+ if ( mAppCallbackNotifier->getUesVideoBuffers() ){
+ freeVideoBufs(mVideoBufs);
+ if (mVideoBufs){
+ CAMHAL_LOGVB(" FREEING mVideoBufs 0x%x", mVideoBufs);
+ delete [] mVideoBufs;
+ }
+ mVideoBufs = NULL;
+ }
+
+ // reset internal recording hint in case camera adapter needs to make some
+ // decisions....(will only be sent to camera adapter if camera restart is required)
+ mParameters.remove(TICameraParameters::KEY_RECORDING_HINT);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Returns true if recording is enabled.
+
+ @param none
+ @return true If recording is currently running
+ false If recording has been stopped
+
+ */
+int CameraHal::recordingEnabled()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return mRecordingEnabled;
+}
+
+/**
+ @brief Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+
+ @param[in] mem MemoryBase pointer to the frame being released. Must be one of the buffers
+ previously given by CameraHal
+ @return none
+
+ */
+void CameraHal::releaseRecordingFrame(const void* mem)
+{
+ LOG_FUNCTION_NAME;
+
+ //CAMHAL_LOGDB(" 0x%x", mem->pointer());
+
+ if ( ( mRecordingEnabled ) && mem != NULL)
+ {
+ mAppCallbackNotifier->releaseRecordingFrame(mem);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return;
+}
+
+/**
+ @brief Start auto focus
+
+ This call asynchronous.
+ The notification callback routine is called with CAMERA_MSG_FOCUS once when
+ focusing is complete. autoFocus() will be called again if another auto focus is
+ needed.
+
+ @param none
+ @return NO_ERROR
+ @todo Define the error codes if the focus is not locked
+
+ */
+status_t CameraHal::autoFocus()
+{
+ status_t ret = NO_ERROR;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartFocus, NULL);
+
+#endif
+
+
+ LOG_FUNCTION_NAME;
+
+ {
+ Mutex::Autolock lock(mLock);
+ mMsgEnabled |= CAMERA_MSG_FOCUS;
+ }
+
+
+ if ( NULL != mCameraAdapter )
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //pass the autoFocus timestamp along with the command to camera adapter
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_PERFORM_AUTOFOCUS, ( int ) &mStartFocus);
+
+#else
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_PERFORM_AUTOFOCUS);
+
+#endif
+
+ }
+ else
+ {
+ ret = -1;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Cancels auto-focus function.
+
+ If the auto-focus is still in progress, this function will cancel it.
+ Whether the auto-focus is in progress or not, this function will return the
+ focus position to the default. If the camera does not support auto-focus, this is a no-op.
+
+
+ @param none
+ @return NO_ERROR If the cancel succeeded
+ @todo Define error codes if cancel didnt succeed
+
+ */
+status_t CameraHal::cancelAutoFocus()
+{
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+ CameraParameters adapterParams = mParameters;
+ mMsgEnabled &= ~CAMERA_MSG_FOCUS;
+
+ if( NULL != mCameraAdapter )
+ {
+ adapterParams.set(TICameraParameters::KEY_AUTO_FOCUS_LOCK, CameraParameters::FALSE);
+ mCameraAdapter->setParameters(adapterParams);
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_CANCEL_AUTOFOCUS);
+ mAppCallbackNotifier->flushEventQueue();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+void CameraHal::setEventProvider(int32_t eventMask, MessageNotifier * eventNotifier)
+{
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ mEventProvider = new EventProvider(eventNotifier, this, eventCallbackRelay);
+ if ( NULL == mEventProvider )
+ {
+ CAMHAL_LOGEA("Error in creating EventProvider");
+ }
+ else
+ {
+ mEventProvider->enableEventNotification(eventMask);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void CameraHal::eventCallbackRelay(CameraHalEvent* event)
+{
+ LOG_FUNCTION_NAME;
+
+ CameraHal *appcbn = ( CameraHal * ) (event->mCookie);
+ appcbn->eventCallback(event );
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void CameraHal::eventCallback(CameraHalEvent* event)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( NULL != event )
+ {
+ switch( event->mEventType )
+ {
+ case CameraHalEvent::EVENT_FOCUS_LOCKED:
+ case CameraHalEvent::EVENT_FOCUS_ERROR:
+ {
+ if ( mBracketingEnabled )
+ {
+ startImageBracketing();
+ }
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ };
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t CameraHal::startImageBracketing()
+{
+ status_t ret = NO_ERROR;
+ CameraFrame frame;
+ CameraAdapter::BuffersDescriptor desc;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartCapture, NULL);
+
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if(!previewEnabled() && !mDisplayPaused)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_INIT;
+ }
+
+ if ( !mBracketingEnabled )
+ {
+ return ret;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mBracketingRunning = true;
+ }
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+ {
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE,
+ ( int ) &frame,
+ ( mBracketRangeNegative + 1 ));
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE returned error 0x%x", ret);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ if ( NULL != mAppCallbackNotifier.get() )
+ {
+ mAppCallbackNotifier->setBurst(true);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mParameters.getPictureSize(( int * ) &frame.mWidth,
+ ( int * ) &frame.mHeight);
+
+ ret = allocImageBufs(frame.mWidth,
+ frame.mHeight,
+ frame.mLength,
+ mParameters.getPictureFormat(),
+ ( mBracketRangeNegative + 1 ));
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
+ }
+ }
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+ {
+
+ desc.mBuffers = mImageBufs;
+ desc.mOffsets = mImageOffsets;
+ desc.mFd = mImageFd;
+ desc.mLength = mImageLength;
+ desc.mCount = ( size_t ) ( mBracketRangeNegative + 1 );
+ desc.mMaxQueueable = ( size_t ) ( mBracketRangeNegative + 1 );
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE,
+ ( int ) &desc);
+
+ if ( NO_ERROR == ret )
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //pass capture timestamp along with the camera adapter command
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_BRACKET_CAPTURE, ( mBracketRangePositive + 1 ), (int) &mStartCapture);
+
+#else
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_BRACKET_CAPTURE, ( mBracketRangePositive + 1 ));
+
+#endif
+
+ }
+ }
+
+ return ret;
+}
+
+status_t CameraHal::stopImageBracketing()
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if( !previewEnabled() )
+ {
+ return NO_INIT;
+ }
+
+ mBracketingRunning = false;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_BRACKET_CAPTURE);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Take a picture.
+
+ @param none
+ @return NO_ERROR If able to switch to image capture
+ @todo Define error codes if unable to switch to image capture
+
+ */
+status_t CameraHal::takePicture( )
+{
+ status_t ret = NO_ERROR;
+ CameraFrame frame;
+ CameraAdapter::BuffersDescriptor desc;
+ int burst;
+ const char *valstr = NULL;
+ unsigned int bufferCount = 1;
+
+ Mutex::Autolock lock(mLock);
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ gettimeofday(&mStartCapture, NULL);
+
+#endif
+
+ LOG_FUNCTION_NAME;
+
+ if(!previewEnabled() && !mDisplayPaused)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ CAMHAL_LOGEA("Preview not started...");
+ return NO_INIT;
+ }
+
+ // return error if we are already capturing
+ if ( (mCameraAdapter->getState() == CameraAdapter::CAPTURE_STATE &&
+ mCameraAdapter->getNextState() != CameraAdapter::PREVIEW_STATE) ||
+ (mCameraAdapter->getState() == CameraAdapter::VIDEO_CAPTURE_STATE &&
+ mCameraAdapter->getNextState() != CameraAdapter::VIDEO_STATE) ) {
+ CAMHAL_LOGEA("Already capturing an image...");
+ return NO_INIT;
+ }
+
+ // we only support video snapshot if we are in video mode (recording hint is set)
+ valstr = mParameters.get(TICameraParameters::KEY_CAP_MODE);
+ if ( (mCameraAdapter->getState() == CameraAdapter::VIDEO_STATE) &&
+ (valstr && strcmp(valstr, TICameraParameters::VIDEO_MODE)) ) {
+ CAMHAL_LOGEA("Trying to capture while recording without recording hint set...");
+ return INVALID_OPERATION;
+ }
+
+ if ( !mBracketingRunning )
+ {
+
+ if ( NO_ERROR == ret )
+ {
+ burst = mParameters.getInt(TICameraParameters::KEY_BURST);
+ }
+
+ //Allocate all buffers only in burst capture case
+ if ( burst > 1 )
+ {
+ bufferCount = CameraHal::NO_BUFFERS_IMAGE_CAPTURE;
+ if ( NULL != mAppCallbackNotifier.get() )
+ {
+ mAppCallbackNotifier->setBurst(true);
+ }
+ }
+ else
+ {
+ if ( NULL != mAppCallbackNotifier.get() )
+ {
+ mAppCallbackNotifier->setBurst(false);
+ }
+ }
+
+ // pause preview during normal image capture
+ // do not pause preview if recording (video state)
+ if (NO_ERROR == ret &&
+ NULL != mDisplayAdapter.get() &&
+ burst < 1) {
+ if (mCameraAdapter->getState() != CameraAdapter::VIDEO_STATE) {
+ mDisplayPaused = true;
+ mPreviewEnabled = false;
+ ret = mDisplayAdapter->pauseDisplay(mDisplayPaused);
+ // since preview is paused we should stop sending preview frames too
+ if(mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
+ mAppCallbackNotifier->disableMsgType (CAMERA_MSG_PREVIEW_FRAME);
+ }
+ }
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ mDisplayAdapter->setSnapshotTimeRef(&mStartCapture);
+#endif
+ }
+
+ // if we taking video snapshot...
+ if ((NO_ERROR == ret) && (mCameraAdapter->getState() == CameraAdapter::VIDEO_STATE)) {
+ // enable post view frames if not already enabled so we can internally
+ // save snapshot frames for generating thumbnail
+ if((mMsgEnabled & CAMERA_MSG_POSTVIEW_FRAME) == 0) {
+ mAppCallbackNotifier->enableMsgType(CAMERA_MSG_POSTVIEW_FRAME);
+ }
+ }
+
+ if ( (NO_ERROR == ret) && (NULL != mCameraAdapter) )
+ {
+ if ( NO_ERROR == ret )
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE,
+ ( int ) &frame,
+ bufferCount);
+
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE returned error 0x%x", ret);
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mParameters.getPictureSize(( int * ) &frame.mWidth,
+ ( int * ) &frame.mHeight);
+
+ ret = allocImageBufs(frame.mWidth,
+ frame.mHeight,
+ frame.mLength,
+ mParameters.getPictureFormat(),
+ bufferCount);
+ if ( NO_ERROR != ret )
+ {
+ CAMHAL_LOGEB("allocImageBufs returned error 0x%x", ret);
+ }
+ }
+
+ if ( (NO_ERROR == ret) && ( NULL != mCameraAdapter ) )
+ {
+ desc.mBuffers = mImageBufs;
+ desc.mOffsets = mImageOffsets;
+ desc.mFd = mImageFd;
+ desc.mLength = mImageLength;
+ desc.mCount = ( size_t ) bufferCount;
+ desc.mMaxQueueable = ( size_t ) bufferCount;
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_USE_BUFFERS_IMAGE_CAPTURE,
+ ( int ) &desc);
+ }
+ }
+
+ if ( ( NO_ERROR == ret ) && ( NULL != mCameraAdapter ) )
+ {
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //pass capture timestamp along with the camera adapter command
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE, (int) &mStartCapture);
+
+#else
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_IMAGE_CAPTURE);
+
+#endif
+
+ }
+
+ return ret;
+}
+
+/**
+ @brief Cancel a picture that was started with takePicture.
+
+ Calling this method when no picture is being taken is a no-op.
+
+ @param none
+ @return NO_ERROR If cancel succeeded. Cancel can succeed if image callback is not sent
+ @todo Define error codes
+
+ */
+status_t CameraHal::cancelPicture( )
+{
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_IMAGE_CAPTURE);
+
+ return NO_ERROR;
+}
+
+/**
+ @brief Return the camera parameters.
+
+ @param none
+ @return Currently configured camera parameters
+
+ */
+char* CameraHal::getParameters()
+{
+ String8 params_str8;
+ char* params_string;
+ const char * valstr = NULL;
+
+ LOG_FUNCTION_NAME;
+
+LOGD("getParameters, 1 mParameters KEY_PICTURE_SIZE=%s", mParameters.get(CameraParameters::KEY_PICTURE_SIZE));
+ if( NULL != mCameraAdapter )
+ {
+ mCameraAdapter->getParameters(mParameters);
+ }
+LOGD("getParameters, 2 mParameters KEY_PICTURE_SIZE=%s", mParameters.get(CameraParameters::KEY_PICTURE_SIZE));
+
+ CameraParameters mParams = mParameters;
+
+ // Handle RECORDING_HINT to Set/Reset Video Mode Parameters
+ valstr = mParameters.get(CameraParameters::KEY_RECORDING_HINT);
+ if(valstr != NULL)
+ {
+ if(strcmp(valstr, CameraParameters::TRUE) == 0)
+ {
+ //HACK FOR MMS MODE
+ resetPreviewRes(&mParams, mVideoWidth, mVideoHeight);
+ }
+ }
+
+ // do not send internal parameters to upper layers
+ mParams.remove(TICameraParameters::KEY_RECORDING_HINT);
+ mParams.remove(TICameraParameters::KEY_AUTO_FOCUS_LOCK);
+
+ params_str8 = mParams.flatten();
+
+ // camera service frees this string...
+ params_string = (char*) malloc(sizeof(char) * (params_str8.length()+1));
+ strcpy(params_string, params_str8.string());
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ ///Return the current set of parameters
+
+ return params_string;
+}
+
+void CameraHal::putParameters(char *parms)
+{
+ free(parms);
+}
+
+/**
+ @brief Send command to camera driver.
+
+ @param none
+ @return NO_ERROR If the command succeeds
+ @todo Define the error codes that this function can return
+
+ */
+status_t CameraHal::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+
+ if ( ( NO_ERROR == ret ) && ( NULL == mCameraAdapter ) )
+ {
+ CAMHAL_LOGEA("No CameraAdapter instance");
+ ret = -EINVAL;
+ }
+
+ if ( ( NO_ERROR == ret ) && ( !previewEnabled() ))
+ {
+ CAMHAL_LOGEA("Preview is not running");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ switch(cmd)
+ {
+ case CAMERA_CMD_START_SMOOTH_ZOOM:
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_SMOOTH_ZOOM, arg1);
+
+ break;
+ case CAMERA_CMD_STOP_SMOOTH_ZOOM:
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_SMOOTH_ZOOM);
+
+ case CAMERA_CMD_START_FACE_DETECTION:
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_START_FD);
+
+ break;
+
+ case CAMERA_CMD_STOP_FACE_DETECTION:
+
+ ret = mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_FD);
+
+ break;
+
+ default:
+ break;
+ };
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+/**
+ @brief Release the hardware resources owned by this object.
+
+ Note that this is *not* done in the destructor.
+
+ @param none
+ @return none
+
+ */
+void CameraHal::release()
+{
+ LOG_FUNCTION_NAME;
+ ///@todo Investigate on how release is used by CameraService. Vaguely remember that this is called
+ ///just before CameraHal object destruction
+ deinitialize();
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+/**
+ @brief Dump state of the camera hardware
+
+ @param[in] fd File descriptor
+ @param[in] args Arguments
+ @return NO_ERROR Dump succeeded
+ @todo Error codes for dump fail
+
+ */
+status_t CameraHal::dump(int fd) const
+{
+ LOG_FUNCTION_NAME;
+ ///Implement this method when the h/w dump function is supported on Ducati side
+ return NO_ERROR;
+}
+
+/*-------------Camera Hal Interface Method definitions ENDS here--------------------*/
+
+
+
+
+/*-------------Camera Hal Internal Method definitions STARTS here--------------------*/
+
+/**
+ @brief Constructor of CameraHal
+
+ Member variables are initialized here. No allocations should be done here as we
+ don't use c++ exceptions in the code.
+
+ */
+CameraHal::CameraHal(int cameraId)
+{
+ LOG_FUNCTION_NAME;
+
+ ///Initialize all the member variables to their defaults
+ mPreviewEnabled = false;
+ mPreviewBufs = NULL;
+ mImageBufs = NULL;
+ mBufProvider = NULL;
+ mPreviewStartInProgress = false;
+ mVideoBufs = NULL;
+ mVideoBufProvider = NULL;
+ mRecordingEnabled = false;
+ mDisplayPaused = false;
+ mSetPreviewWindowCalled = false;
+ mMsgEnabled = 0;
+ mAppCallbackNotifier = NULL;
+ mMemoryManager = NULL;
+ mCameraAdapter = NULL;
+ mBracketingEnabled = false;
+ mBracketingRunning = false;
+ mEventProvider = NULL;
+ mBracketRangePositive = 1;
+ mBracketRangeNegative = 1;
+ mMaxZoomSupported = 0;
+ mShutterEnabled = true;
+ mMeasurementEnabled = false;
+ mPreviewDataBufs = NULL;
+ mCameraProperties = NULL;
+ mCurrentTime = 0;
+ mFalsePreview = 0;
+ mImageOffsets = NULL;
+ mImageLength = 0;
+ mImageFd = 0;
+ mVideoOffsets = NULL;
+ mVideoFd = 0;
+ mVideoLength = 0;
+ mPreviewDataOffsets = NULL;
+ mPreviewDataFd = 0;
+ mPreviewDataLength = 0;
+ mPreviewFd = 0;
+ mPreviewWidth = 0;
+ mPreviewHeight = 0;
+ mPreviewLength = 0;
+ mPreviewOffsets = NULL;
+ mPreviewRunning = 0;
+ mPreviewStateOld = 0;
+ mRecordingEnabled = 0;
+ mRecordEnabled = 0;
+ mSensorListener = NULL;
+ mVideoWidth = 0;
+ mVideoHeight = 0;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Initialize the CameraHAL constructor timestamp, which is used in the
+ // PPM() method as time reference if the user does not supply one.
+ gettimeofday(&ppm_start, NULL);
+
+#endif
+
+ mCameraIndex = cameraId;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Destructor of CameraHal
+
+ This function simply calls deinitialize() to free up memory allocate during construct
+ phase
+ */
+CameraHal::~CameraHal()
+{
+ LOG_FUNCTION_NAME;
+
+ ///Call de-initialize here once more - it is the last chance for us to relinquish all the h/w and s/w resources
+ deinitialize();
+
+ if ( NULL != mEventProvider )
+ {
+ mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
+ delete mEventProvider;
+ mEventProvider = NULL;
+ }
+
+ /// Free the callback notifier
+ mAppCallbackNotifier.clear();
+
+ /// Free the display adapter
+ mDisplayAdapter.clear();
+
+ if ( NULL != mCameraAdapter ) {
+ int strongCount = mCameraAdapter->getStrongCount();
+
+ mCameraAdapter->decStrong(mCameraAdapter);
+
+ mCameraAdapter = NULL;
+ }
+
+ freeImageBufs();
+
+ /// Free the memory manager
+ mMemoryManager.clear();
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Initialize the Camera HAL
+
+ Creates CameraAdapter, AppCallbackNotifier, DisplayAdapter and MemoryManager
+
+ @param None
+ @return NO_ERROR - On success
+ NO_MEMORY - On failure to allocate memory for any of the objects
+ @remarks Camera Hal internal function
+
+ */
+
+status_t CameraHal::initialize(CameraProperties::Properties* properties)
+{
+ LOG_FUNCTION_NAME;
+
+ int sensor_index = 0;
+
+ ///Initialize the event mask used for registering an event provider for AppCallbackNotifier
+ ///Currently, registering all events as to be coming from CameraAdapter
+ int32_t eventMask = CameraHalEvent::ALL_EVENTS;
+
+ // Get my camera properties
+ mCameraProperties = properties;
+
+ if(!mCameraProperties)
+ {
+ goto fail_loop;
+ }
+
+ // Dump the properties of this Camera
+ // will only print if DEBUG macro is defined
+ mCameraProperties->dump();
+
+ if (strcmp(CameraProperties::DEFAULT_VALUE, mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX)) != 0 )
+ {
+ sensor_index = atoi(mCameraProperties->get(CameraProperties::CAMERA_SENSOR_INDEX));
+ }
+
+ CAMHAL_LOGDB("Sensor index %d", sensor_index);
+
+ mCameraAdapter = CameraAdapter_Factory(sensor_index);
+ if ( ( NULL == mCameraAdapter ) || (mCameraAdapter->initialize(properties)!=NO_ERROR))
+ {
+ CAMHAL_LOGEA("Unable to create or initialize CameraAdapter");
+ mCameraAdapter = NULL;
+ goto fail_loop;
+ }
+
+ mCameraAdapter->incStrong(mCameraAdapter);
+ mCameraAdapter->registerImageReleaseCallback(releaseImageBuffers, (void *) this);
+ mCameraAdapter->registerEndCaptureCallback(endImageCapture, (void *)this);
+
+ if(!mAppCallbackNotifier.get())
+ {
+ /// Create the callback notifier
+ mAppCallbackNotifier = new AppCallbackNotifier();
+ if( ( NULL == mAppCallbackNotifier.get() ) || ( mAppCallbackNotifier->initialize() != NO_ERROR))
+ {
+ CAMHAL_LOGEA("Unable to create or initialize AppCallbackNotifier");
+ goto fail_loop;
+ }
+ }
+
+ if(!mMemoryManager.get())
+ {
+ /// Create Memory Manager
+ mMemoryManager = new MemoryManager();
+ if( ( NULL == mMemoryManager.get() ) || ( mMemoryManager->initialize() != NO_ERROR))
+ {
+ CAMHAL_LOGEA("Unable to create or initialize MemoryManager");
+ goto fail_loop;
+ }
+ }
+
+ ///Setup the class dependencies...
+
+ ///AppCallbackNotifier has to know where to get the Camera frames and the events like auto focus lock etc from.
+ ///CameraAdapter is the one which provides those events
+ ///Set it as the frame and event providers for AppCallbackNotifier
+ ///@remarks setEventProvider API takes in a bit mask of events for registering a provider for the different events
+ /// That way, if events can come from DisplayAdapter in future, we will be able to add it as provider
+ /// for any event
+ mAppCallbackNotifier->setEventProvider(eventMask, mCameraAdapter);
+ mAppCallbackNotifier->setFrameProvider(mCameraAdapter);
+
+ ///Any dynamic errors that happen during the camera use case has to be propagated back to the application
+ ///via CAMERA_MSG_ERROR. AppCallbackNotifier is the class that notifies such errors to the application
+ ///Set it as the error handler for CameraAdapter
+ mCameraAdapter->setErrorHandler(mAppCallbackNotifier.get());
+
+ ///Start the callback notifier
+ if(mAppCallbackNotifier->start() != NO_ERROR)
+ {
+ CAMHAL_LOGEA("Couldn't start AppCallbackNotifier");
+ goto fail_loop;
+ }
+
+ CAMHAL_LOGDA("Started AppCallbackNotifier..");
+ mAppCallbackNotifier->setMeasurements(mMeasurementEnabled);
+
+ ///Initialize default parameters
+ initDefaultParameters();
+
+
+ if ( setParameters(mParameters) != NO_ERROR )
+ {
+ CAMHAL_LOGEA("Failed to set default parameters?!");
+ }
+
+ // register for sensor events
+ mSensorListener = new SensorListener();
+ if (mSensorListener.get()) {
+ if (mSensorListener->initialize() == NO_ERROR) {
+ mSensorListener->setCallbacks(orientation_cb, this);
+ mSensorListener->enableSensor(SensorListener::SENSOR_ORIENTATION);
+ } else {
+ CAMHAL_LOGEA("Error initializing SensorListener. not fatal, continuing");
+ mSensorListener.clear();
+ mSensorListener = NULL;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+
+ fail_loop:
+
+ ///Free up the resources because we failed somewhere up
+ deinitialize();
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_MEMORY;
+
+}
+
+bool CameraHal::isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions)
+{
+ bool ret = true;
+ status_t status = NO_ERROR;
+ char tmpBuffer[PARAM_BUFFER + 1];
+ char *pos = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedResolutions )
+ {
+ CAMHAL_LOGEA("Invalid supported resolutions string");
+ ret = false;
+ goto exit;
+ }
+
+ status = snprintf(tmpBuffer, PARAM_BUFFER, "%dx%d", width, height);
+ if ( 0 > status )
+ {
+ CAMHAL_LOGEA("Error encountered while generating validation string");
+ ret = false;
+ goto exit;
+ }
+
+ pos = strstr(supportedResolutions, tmpBuffer);
+ if ( NULL == pos )
+ {
+ ret = false;
+ }
+ else
+ {
+ ret = true;
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+bool CameraHal::isParameterValid(const char *param, const char *supportedParams)
+{
+ bool ret = true;
+ char *pos = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedParams )
+ {
+ CAMHAL_LOGEA("Invalid supported parameters string");
+ ret = false;
+ goto exit;
+ }
+
+ if ( NULL == param )
+ {
+ CAMHAL_LOGEA("Invalid parameter string");
+ ret = false;
+ goto exit;
+ }
+
+ pos = strstr(supportedParams, param);
+ if ( NULL == pos )
+ {
+ ret = false;
+ }
+ else
+ {
+ ret = true;
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+bool CameraHal::isParameterValid(int param, const char *supportedParams)
+{
+ bool ret = true;
+ char *pos = NULL;
+ status_t status;
+ char tmpBuffer[PARAM_BUFFER + 1];
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == supportedParams )
+ {
+ CAMHAL_LOGEA("Invalid supported parameters string");
+ ret = false;
+ goto exit;
+ }
+
+ status = snprintf(tmpBuffer, PARAM_BUFFER, "%d", param);
+ if ( 0 > status )
+ {
+ CAMHAL_LOGEA("Error encountered while generating validation string");
+ ret = false;
+ goto exit;
+ }
+
+ pos = strstr(supportedParams, tmpBuffer);
+ if ( NULL == pos )
+ {
+ ret = false;
+ }
+ else
+ {
+ ret = true;
+ }
+
+exit:
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t CameraHal::doesSetParameterNeedUpdate(const char* new_param, const char* old_param, bool& update) {
+ if (!new_param || !old_param) {
+ return -EINVAL;
+ }
+
+ // if params mismatch we should update parameters for camera adapter
+ if ((strcmp(new_param, old_param) != 0)) {
+ update = true;
+ }
+
+ return NO_ERROR;
+}
+
+status_t CameraHal::parseResolution(const char *resStr, int &width, int &height)
+{
+ status_t ret = NO_ERROR;
+ char *ctx, *pWidth, *pHeight;
+ const char *sep = "x";
+ char *tmp = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == resStr )
+ {
+ return -EINVAL;
+ }
+
+ //This fixes "Invalid input resolution"
+ char *resStr_copy = (char *)malloc(strlen(resStr) + 1);
+ if ( NULL!=resStr_copy ) {
+ if ( NO_ERROR == ret )
+ {
+ strcpy(resStr_copy, resStr);
+ pWidth = strtok_r( (char *) resStr_copy, sep, &ctx);
+
+ if ( NULL != pWidth )
+ {
+ width = atoi(pWidth);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid input resolution %s", resStr);
+ ret = -EINVAL;
+ }
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ pHeight = strtok_r(NULL, sep, &ctx);
+
+ if ( NULL != pHeight )
+ {
+ height = atoi(pHeight);
+ }
+ else
+ {
+ CAMHAL_LOGEB("Invalid input resolution %s", resStr);
+ ret = -EINVAL;
+ }
+ }
+
+ free(resStr_copy);
+ resStr_copy = NULL;
+ }
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+void CameraHal::insertSupportedParams()
+{
+ char tmpBuffer[PARAM_BUFFER + 1];
+
+ LOG_FUNCTION_NAME;
+
+ CameraParameters &p = mParameters;
+
+ ///Set the name of the camera
+ p.set(TICameraParameters::KEY_CAMERA_NAME, mCameraProperties->get(CameraProperties::CAMERA_NAME));
+
+ mMaxZoomSupported = atoi(mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
+
+ p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
+ p.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS));
+ p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
+ p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
+ p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
+ p.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_THUMBNAIL_SIZES));
+ p.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE));
+ p.set(CameraParameters::KEY_SUPPORTED_EFFECTS, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS));
+ p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
+ p.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES));
+ p.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES));
+ p.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING));
+ p.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MAX));
+ p.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MIN));
+ p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_EV_STEP));
+ p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
+ p.set(TICameraParameters::KEY_SUPPORTED_EXPOSURE, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES));
+ p.set(TICameraParameters::KEY_SUPPORTED_ISO_VALUES, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES));
+ p.set(CameraParameters::KEY_ZOOM_RATIOS, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_RATIOS));
+ p.set(CameraParameters::KEY_MAX_ZOOM, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
+ p.set(CameraParameters::KEY_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::ZOOM_SUPPORTED));
+ p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::SMOOTH_ZOOM_SUPPORTED));
+ p.set(TICameraParameters::KEY_SUPPORTED_IPP, mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES));
+ p.set(TICameraParameters::KEY_S3D_SUPPORTED,mCameraProperties->get(CameraProperties::S3D_SUPPORTED));
+ p.set(TICameraParameters::KEY_S3D2D_PREVIEW_MODE,mCameraProperties->get(CameraProperties::S3D2D_PREVIEW_MODES));
+ p.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE_MODE));
+ p.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, mCameraProperties->get(CameraProperties::MANUALCONVERGENCE_VALUES));
+ p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED));
+ p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
+ p.set(TICameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION));
+ p.set(TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION_VALUES));
+ p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED));
+ p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED));
+ p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, mCameraProperties->get(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED));
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+void CameraHal::initDefaultParameters()
+{
+ //Purpose of this function is to initialize the default current and supported parameters for the currently
+ //selected camera.
+
+ CameraParameters &p = mParameters;
+ int currentRevision, adapterRevision;
+ status_t ret = NO_ERROR;
+ int width, height;
+
+ LOG_FUNCTION_NAME;
+
+ ret = parseResolution(mCameraProperties->get(CameraProperties::PREVIEW_SIZE), width, height);
+
+ if ( NO_ERROR == ret )
+ {
+ p.setPreviewSize(width, height);
+ }
+ else
+ {
+ p.setPreviewSize(MIN_WIDTH, MIN_HEIGHT);
+ }
+
+ ret = parseResolution(mCameraProperties->get(CameraProperties::PICTURE_SIZE), width, height);
+
+ if ( NO_ERROR == ret )
+ {
+ p.setPictureSize(width, height);
+ }
+ else
+ {
+ p.setPictureSize(PICTURE_WIDTH, PICTURE_HEIGHT);
+ }
+
+ ret = parseResolution(mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_SIZE), width, height);
+
+ if ( NO_ERROR == ret )
+ {
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
+ }
+ else
+ {
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, MIN_WIDTH);
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, MIN_HEIGHT);
+ }
+
+ insertSupportedParams();
+
+ //Insert default values
+ p.setPreviewFrameRate(atoi(mCameraProperties->get(CameraProperties::PREVIEW_FRAME_RATE)));
+ p.setPreviewFormat(mCameraProperties->get(CameraProperties::PREVIEW_FORMAT));
+ p.setPictureFormat(mCameraProperties->get(CameraProperties::PICTURE_FORMAT));
+ p.set(CameraParameters::KEY_JPEG_QUALITY, mCameraProperties->get(CameraProperties::JPEG_QUALITY));
+ p.set(CameraParameters::KEY_WHITE_BALANCE, mCameraProperties->get(CameraProperties::WHITEBALANCE));
+ p.set(CameraParameters::KEY_EFFECT, mCameraProperties->get(CameraProperties::EFFECT));
+ p.set(CameraParameters::KEY_ANTIBANDING, mCameraProperties->get(CameraProperties::ANTIBANDING));
+ p.set(CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE));
+ p.set(CameraParameters::KEY_FOCUS_MODE, mCameraProperties->get(CameraProperties::FOCUS_MODE));
+ p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::EV_COMPENSATION));
+ p.set(CameraParameters::KEY_SCENE_MODE, mCameraProperties->get(CameraProperties::SCENE_MODE));
+ p.set(CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE));
+ p.set(CameraParameters::KEY_ZOOM, mCameraProperties->get(CameraProperties::ZOOM));
+ p.set(TICameraParameters::KEY_CONTRAST, mCameraProperties->get(CameraProperties::CONTRAST));
+ p.set(TICameraParameters::KEY_SATURATION, mCameraProperties->get(CameraProperties::SATURATION));
+ p.set(TICameraParameters::KEY_BRIGHTNESS, mCameraProperties->get(CameraProperties::BRIGHTNESS));
+ p.set(TICameraParameters::KEY_SHARPNESS, mCameraProperties->get(CameraProperties::SHARPNESS));
+ p.set(TICameraParameters::KEY_EXPOSURE_MODE, mCameraProperties->get(CameraProperties::EXPOSURE_MODE));
+ p.set(TICameraParameters::KEY_ISO, mCameraProperties->get(CameraProperties::ISO_MODE));
+ p.set(TICameraParameters::KEY_IPP, mCameraProperties->get(CameraProperties::IPP));
+ p.set(TICameraParameters::KEY_GBCE, mCameraProperties->get(CameraProperties::GBCE));
+ p.set(TICameraParameters::KEY_S3D2D_PREVIEW, mCameraProperties->get(CameraProperties::S3D2D_PREVIEW));
+ p.set(TICameraParameters::KEY_AUTOCONVERGENCE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE));
+ p.set(TICameraParameters::KEY_MANUALCONVERGENCE_VALUES, mCameraProperties->get(CameraProperties::MANUALCONVERGENCE_VALUES));
+ p.set(CameraParameters::KEY_VIDEO_STABILIZATION, mCameraProperties->get(CameraProperties::VSTAB));
+ p.set(CameraParameters::KEY_FOCAL_LENGTH, mCameraProperties->get(CameraProperties::FOCAL_LENGTH));
+ p.set(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::HOR_ANGLE));
+ p.set(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::VER_ANGLE));
+ p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
+ p.set(TICameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION));
+ p.set(TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION_VALUES));
+ p.set(TICameraParameters::KEY_EXIF_MAKE, mCameraProperties->get(CameraProperties::EXIF_MAKE));
+ p.set(TICameraParameters::KEY_EXIF_MODEL, mCameraProperties->get(CameraProperties::EXIF_MODEL));
+ p.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_QUALITY));
+ p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar");
+ p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, mCameraProperties->get(CameraProperties::MAX_FD_HW_FACES));
+ p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, mCameraProperties->get(CameraProperties::MAX_FD_SW_FACES));
+
+ // Only one area a.k.a Touch AF for now.
+ // TODO: Add support for multiple focus areas.
+ p.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, mCameraProperties->get(CameraProperties::MAX_FOCUS_AREAS));
+ p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK));
+ p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK));
+ p.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, mCameraProperties->get(CameraProperties::MAX_NUM_METERING_AREAS));
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Stop a previously started preview.
+ @param none
+ @return none
+
+ */
+void CameraHal::forceStopPreview()
+{
+ LOG_FUNCTION_NAME;
+
+ // stop bracketing if it is running
+ stopImageBracketing();
+
+ if(mDisplayAdapter.get() != NULL) {
+ ///Stop the buffer display first
+ mDisplayAdapter->disableDisplay();
+ }
+
+ if(mAppCallbackNotifier.get() != NULL) {
+ //Stop the callback sending
+ mAppCallbackNotifier->stop();
+ mAppCallbackNotifier->flushAndReturnFrames();
+ mAppCallbackNotifier->stopPreviewCallbacks();
+ }
+
+ if ( NULL != mCameraAdapter ) {
+ // only need to send these control commands to state machine if we are
+ // passed the LOADED_PREVIEW_STATE
+ if (mCameraAdapter->getState() > CameraAdapter::LOADED_PREVIEW_STATE) {
+ // according to javadoc...FD should be stopped in stopPreview
+ // and application needs to call startFaceDection again
+ // to restart FD
+ mCameraAdapter->sendCommand(CameraAdapter::CAMERA_STOP_FD);
+ }
+
+ LOGD("rollback!!!!!!!!");
+ mCameraAdapter->rollbackToInitializedState();
+
+ }
+
+ freePreviewBufs();
+ freePreviewDataBufs();
+
+ mPreviewEnabled = false;
+ mDisplayPaused = false;
+ mPreviewStartInProgress = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Deallocates memory for all the resources held by Camera HAL.
+
+ Frees the following objects- CameraAdapter, AppCallbackNotifier, DisplayAdapter,
+ and Memory Manager
+
+ @param none
+ @return none
+
+ */
+void CameraHal::deinitialize()
+{
+ LOG_FUNCTION_NAME;
+
+ if ( mPreviewEnabled || mDisplayPaused ) {
+ forceStopPreview();
+ }
+
+ mSetPreviewWindowCalled = false;
+
+ if (mSensorListener.get()) {
+ mSensorListener->disableSensor(SensorListener::SENSOR_ORIENTATION);
+ mSensorListener.clear();
+ mSensorListener = NULL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+status_t CameraHal::storeMetaDataInBuffers(bool enable)
+{
+ LOG_FUNCTION_NAME;
+
+ return mAppCallbackNotifier->useMetaDataBufferMode(enable);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void CameraHal::selectFPSRange(int framerate, int *min_fps, int *max_fps)
+{
+ char * ptr;
+ char supported[MAX_PROP_VALUE_LENGTH];
+ int fpsrangeArray[2];
+ int i = 0;
+
+ LOG_FUNCTION_NAME;
+ size_t size = strlen(mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED))+1;
+ strncpy(supported, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED), size);
+
+ ptr = strtok (supported," (,)");
+
+ while (ptr != NULL)
+ {
+ fpsrangeArray[i]= atoi(ptr)/CameraHal::VFR_SCALE;
+ if (i == 1)
+ {
+ if (framerate == fpsrangeArray[i])
+ {
+ CAMHAL_LOGDB("SETTING FPS RANGE min = %d max = %d \n", fpsrangeArray[0], fpsrangeArray[1]);
+ *min_fps = fpsrangeArray[0]*CameraHal::VFR_SCALE;
+ *max_fps = fpsrangeArray[1]*CameraHal::VFR_SCALE;
+ break;
+ }
+ }
+ ptr = strtok (NULL, " (,)");
+ i++;
+ i%=2;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+}
+
+void CameraHal::setPreferredPreviewRes(int width, int height)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( (width == 320) && (height == 240)){
+ mParameters.setPreviewSize(640,480);
+ }
+ if ( (width == 176) && (height == 144)){
+ mParameters.setPreviewSize(704,576);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void CameraHal::resetPreviewRes(CameraParameters *mParams, int width, int height)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( (width <= 320) && (height <= 240)){
+ mParams->setPreviewSize(mVideoWidth, mVideoHeight);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+};
+
+
diff --git a/CameraHalCommon.cpp b/CameraHalCommon.cpp
new file mode 100644
index 0000000..6d4ea2c
--- a/dev/null
+++ b/CameraHalCommon.cpp
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "CameraHal.h"
+
+namespace android {
+
+const char CameraHal::PARAMS_DELIMITER []= ",";
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+struct timeval CameraHal::ppm_start;
+
+#endif
+
+#if PPM_INSTRUMENTATION
+
+/**
+ @brief PPM instrumentation
+
+ Dumps the current time offset. The time reference point
+ lies within the CameraHAL constructor.
+
+ @param str - log message
+ @return none
+
+ */
+void CameraHal::PPM(const char* str){
+ struct timeval ppm;
+
+ gettimeofday(&ppm, NULL);
+ ppm.tv_sec = ppm.tv_sec - ppm_start.tv_sec;
+ ppm.tv_sec = ppm.tv_sec * 1000000;
+ ppm.tv_sec = ppm.tv_sec + ppm.tv_usec - ppm_start.tv_usec;
+
+ LOGD("PPM: %s :%ld.%ld ms", str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ));
+}
+
+#elif PPM_INSTRUMENTATION_ABS
+
+/**
+ @brief PPM instrumentation
+
+ Dumps the current time offset. The time reference point
+ lies within the CameraHAL constructor. This implemetation
+ will also dump the abosolute timestamp, which is useful when
+ post calculation is done with data coming from the upper
+ layers (Camera application etc.)
+
+ @param str - log message
+ @return none
+
+ */
+void CameraHal::PPM(const char* str){
+ struct timeval ppm;
+
+ unsigned long long elapsed, absolute;
+ gettimeofday(&ppm, NULL);
+ elapsed = ppm.tv_sec - ppm_start.tv_sec;
+ elapsed *= 1000000;
+ elapsed += ppm.tv_usec - ppm_start.tv_usec;
+ absolute = ppm.tv_sec;
+ absolute *= 1000;
+ absolute += ppm.tv_usec /1000;
+
+ LOGD("PPM: %s :%llu.%llu ms : %llu ms", str, ( elapsed /1000 ), ( elapsed % 1000 ), absolute);
+}
+
+#endif
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+/**
+ @brief PPM instrumentation
+
+ Calculates and dumps the elapsed time using 'ppm_first' as
+ reference.
+
+ @param str - log message
+ @return none
+
+ */
+void CameraHal::PPM(const char* str, struct timeval* ppm_first, ...){
+ char temp_str[256];
+ struct timeval ppm;
+ unsigned long long absolute;
+ va_list args;
+
+ va_start(args, ppm_first);
+ vsprintf(temp_str, str, args);
+ gettimeofday(&ppm, NULL);
+ absolute = ppm.tv_sec;
+ absolute *= 1000;
+ absolute += ppm.tv_usec /1000;
+ ppm.tv_sec = ppm.tv_sec - ppm_first->tv_sec;
+ ppm.tv_sec = ppm.tv_sec * 1000000;
+ ppm.tv_sec = ppm.tv_sec + ppm.tv_usec - ppm_first->tv_usec;
+
+ LOGD("PPM: %s :%ld.%ld ms : %llu ms", temp_str, ( ppm.tv_sec /1000 ), ( ppm.tv_sec % 1000 ), absolute);
+
+ va_end(args);
+}
+
+#endif
+
+};
+
+
diff --git a/CameraHalUtilClasses.cpp b/CameraHalUtilClasses.cpp
new file mode 100644
index 0000000..3074bc4
--- a/dev/null
+++ b/CameraHalUtilClasses.cpp
@@ -0,0 +1,362 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraHalUtilClasses.cpp
+*
+* This file maps the CameraHardwareInterface to the Camera interfaces on OMAP4 (mainly OMX).
+*
+*/
+
+#define LOG_TAG "CameraHAL"
+
+
+#include "CameraHal.h"
+
+namespace android {
+
+/*--------------------FrameProvider Class STARTS here-----------------------------*/
+
+int FrameProvider::enableFrameNotification(int32_t frameTypes)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ ///Enable the frame notification to CameraAdapter (which implements FrameNotifier interface)
+ mFrameNotifier->enableMsgType(frameTypes<<MessageNotifier::FRAME_BIT_FIELD_POSITION
+ , mFrameCallback
+ , NULL
+ , mCookie
+ );
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+int FrameProvider::disableFrameNotification(int32_t frameTypes)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ mFrameNotifier->disableMsgType(frameTypes<<MessageNotifier::FRAME_BIT_FIELD_POSITION
+ , mCookie
+ );
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+int FrameProvider::returnFrame(void *frameBuf, CameraFrame::FrameType frameType)
+{
+ status_t ret = NO_ERROR;
+
+ mFrameNotifier->returnFrame(frameBuf, frameType);
+
+ return ret;
+}
+
+void FrameProvider::addFramePointers(void *frameBuf, void *buf)
+{
+ mFrameNotifier->addFramePointers(frameBuf, buf);
+ return;
+}
+
+void FrameProvider::removeFramePointers()
+{
+ mFrameNotifier->removeFramePointers();
+ return;
+}
+
+/*--------------------FrameProvider Class ENDS here-----------------------------*/
+
+/*--------------------EventProvider Class STARTS here-----------------------------*/
+
+int EventProvider::enableEventNotification(int32_t frameTypes)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ ///Enable the frame notification to CameraAdapter (which implements FrameNotifier interface)
+ mEventNotifier->enableMsgType(frameTypes<<MessageNotifier::EVENT_BIT_FIELD_POSITION
+ , NULL
+ , mEventCallback
+ , mCookie
+ );
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+int EventProvider::disableEventNotification(int32_t frameTypes)
+{
+ LOG_FUNCTION_NAME;
+ status_t ret = NO_ERROR;
+
+ mEventNotifier->disableMsgType(frameTypes<<MessageNotifier::FRAME_BIT_FIELD_POSITION
+ , mCookie
+ );
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+/*--------------------EventProvider Class ENDS here-----------------------------*/
+
+/*--------------------CameraArea Class STARTS here-----------------------------*/
+
+status_t CameraArea::transfrom(size_t width,
+ size_t height,
+ size_t &top,
+ size_t &left,
+ size_t &areaWidth,
+ size_t &areaHeight)
+{
+ status_t ret = NO_ERROR;
+ size_t hRange, vRange;
+ double hScale, vScale;
+
+ LOG_FUNCTION_NAME
+
+ hRange = CameraArea::RIGHT - CameraArea::LEFT;
+ vRange = CameraArea::BOTTOM - CameraArea::TOP;
+ hScale = ( double ) width / ( double ) hRange;
+ vScale = ( double ) height / ( double ) vRange;
+
+ top = ( mTop + vRange / 2 ) * vScale;
+ left = ( mLeft + hRange / 2 ) * hScale;
+ areaHeight = ( mBottom + vRange / 2 ) * vScale;
+ areaHeight -= top;
+ areaWidth = ( mRight + hRange / 2) * hScale;
+ areaWidth -= left;
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+status_t CameraArea::checkArea(ssize_t top,
+ ssize_t left,
+ ssize_t bottom,
+ ssize_t right,
+ ssize_t weight)
+{
+
+ //Handles the invalid regin corner case.
+ if ( ( 0 == top ) && ( 0 == left ) && ( 0 == bottom ) && ( 0 == right ) && ( 0 == weight ) ) {
+ return NO_ERROR;
+ }
+
+ if ( ( CameraArea::WEIGHT_MIN > weight ) || ( CameraArea::WEIGHT_MAX < weight ) ) {
+ CAMHAL_LOGEB("Camera area weight is invalid %d", weight);
+ return -EINVAL;
+ }
+
+ if ( ( CameraArea::TOP > top ) || ( CameraArea::BOTTOM < top ) ) {
+ CAMHAL_LOGEB("Camera area top coordinate is invalid %d", top );
+ return -EINVAL;
+ }
+
+ if ( ( CameraArea::TOP > bottom ) || ( CameraArea::BOTTOM < bottom ) ) {
+ CAMHAL_LOGEB("Camera area bottom coordinate is invalid %d", bottom );
+ return -EINVAL;
+ }
+
+ if ( ( CameraArea::LEFT > left ) || ( CameraArea::RIGHT < left ) ) {
+ CAMHAL_LOGEB("Camera area left coordinate is invalid %d", left );
+ return -EINVAL;
+ }
+
+ if ( ( CameraArea::LEFT > right ) || ( CameraArea::RIGHT < right ) ) {
+ CAMHAL_LOGEB("Camera area right coordinate is invalid %d", right );
+ return -EINVAL;
+ }
+
+ if ( left >= right ) {
+ CAMHAL_LOGEA("Camera area left larger than right");
+ return -EINVAL;
+ }
+
+ if ( top >= bottom ) {
+ CAMHAL_LOGEA("Camera area top larger than bottom");
+ return -EINVAL;
+ }
+
+ return NO_ERROR;
+}
+
+status_t CameraArea::parseAreas(const char *area,
+ size_t areaLength,
+ Vector< sp<CameraArea> > &areas)
+{
+ status_t ret = NO_ERROR;
+ char *ctx;
+ char *pArea = NULL;
+ char *pStart = NULL;
+ char *pEnd = NULL;
+ const char *startToken = "(";
+ const char endToken = ')';
+ const char sep = ',';
+ ssize_t top, left, bottom, right, weight;
+ char *tmpBuffer = NULL;
+ sp<CameraArea> currentArea;
+
+ LOG_FUNCTION_NAME
+
+ if ( ( NULL == area ) ||
+ ( 0 >= areaLength ) )
+ {
+ return -EINVAL;
+ }
+
+ tmpBuffer = ( char * ) malloc(areaLength);
+ if ( NULL == tmpBuffer )
+ {
+ return -ENOMEM;
+ }
+
+ memcpy(tmpBuffer, area, areaLength);
+
+ pArea = strtok_r(tmpBuffer, startToken, &ctx);
+
+ do
+ {
+
+ pStart = pArea;
+ if ( NULL == pStart )
+ {
+ CAMHAL_LOGEA("Parsing of the left area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ left = static_cast<ssize_t>(strtol(pStart, &pEnd, 10));
+ }
+
+ if ( sep != *pEnd )
+ {
+ CAMHAL_LOGEA("Parsing of the top area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ top = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
+ }
+
+ if ( sep != *pEnd )
+ {
+ CAMHAL_LOGEA("Parsing of the right area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ right = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
+ }
+
+ if ( sep != *pEnd )
+ {
+ CAMHAL_LOGEA("Parsing of the bottom area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ bottom = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
+ }
+
+ if ( sep != *pEnd )
+ {
+ CAMHAL_LOGEA("Parsing of the weight area coordinate failed!");
+ ret = -EINVAL;
+ break;
+ }
+ else
+ {
+ weight = static_cast<ssize_t>(strtol(pEnd+1, &pEnd, 10));
+ }
+
+ if ( endToken != *pEnd )
+ {
+ CAMHAL_LOGEA("Malformed area!");
+ ret = -EINVAL;
+ break;
+ }
+
+ ret = checkArea(top, left, bottom, right, weight);
+ if ( NO_ERROR != ret ) {
+ break;
+ }
+
+ currentArea = new CameraArea(top, left, bottom, right, weight);
+ CAMHAL_LOGDB("Area parsed [%dx%d, %dx%d] %d",
+ ( int ) top,
+ ( int ) left,
+ ( int ) bottom,
+ ( int ) right,
+ ( int ) weight);
+ if ( NULL != currentArea.get() )
+ {
+ areas.add(currentArea);
+ }
+ else
+ {
+ ret = -ENOMEM;
+ break;
+ }
+
+ pArea = strtok_r(NULL, startToken, &ctx);
+
+ }
+ while ( NULL != pArea );
+
+ if ( NULL != tmpBuffer )
+ {
+ free(tmpBuffer);
+ }
+
+ LOG_FUNCTION_NAME_EXIT
+
+ return ret;
+}
+
+bool CameraArea::areAreasDifferent(Vector< sp<CameraArea> > &area1,
+ Vector< sp<CameraArea> > &area2) {
+ if (area1.size() != area2.size()) {
+ return true;
+ }
+
+ // not going to care about sorting order for now
+ for (int i = 0; i < area1.size(); i++) {
+ if (!area1.itemAt(i)->compare(area2.itemAt(i))) {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+bool CameraArea::compare(const sp<CameraArea> &area) {
+ return ((mTop == area->mTop) && (mLeft == area->mLeft) &&
+ (mBottom == area->mBottom) && (mRight == area->mRight) &&
+ (mWeight == area->mWeight));
+}
+
+
+/*--------------------CameraArea Class ENDS here-----------------------------*/
+
+};
diff --git a/CameraHal_Module.cpp b/CameraHal_Module.cpp
new file mode 100644
index 0000000..52b237c
--- a/dev/null
+++ b/CameraHal_Module.cpp
@@ -0,0 +1,682 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#define LOG_TAG "CameraHAL"
+
+#include <utils/threads.h>
+
+#include "CameraHal.h"
+#include "CameraProperties.h"
+#include "TICameraParameters.h"
+
+
+static android::CameraProperties gCameraProperties;
+static android::CameraHal* gCameraHals[MAX_CAMERAS_SUPPORTED];
+static unsigned int gCamerasOpen = 0;
+static android::Mutex gCameraHalDeviceLock;
+
+static int camera_device_open(const hw_module_t* module, const char* name,
+ hw_device_t** device);
+static int camera_device_close(hw_device_t* device);
+static int camera_get_number_of_cameras(void);
+static int camera_get_camera_info(int camera_id, struct camera_info *info);
+
+static struct hw_module_methods_t camera_module_methods = {
+ open: camera_device_open
+};
+
+camera_module_t HAL_MODULE_INFO_SYM = {
+ common: {
+ tag: HARDWARE_MODULE_TAG,
+ version_major: 1,
+ version_minor: 0,
+ id: CAMERA_HARDWARE_MODULE_ID,
+ name: "CameraHal Module",
+ author: "Amlogic",
+ methods: &camera_module_methods,
+ dso: NULL, /* remove compilation warnings */
+ reserved: {0}, /* remove compilation warnings */
+ },
+ get_number_of_cameras: camera_get_number_of_cameras,
+ get_camera_info: camera_get_camera_info,
+};
+
+typedef struct aml_camera_device {
+ camera_device_t base;
+ /* TI specific "private" data can go here (base.priv) */
+ int cameraid;
+} aml_camera_device_t;
+
+
+/*******************************************************************
+ * implementation of camera_device_ops functions
+ *******************************************************************/
+
+int camera_set_preview_window(struct camera_device * device,
+ struct preview_stream_ops *window)
+{
+ int rv = -EINVAL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ rv = gCameraHals[aml_dev->cameraid]->setPreviewWindow(window);
+
+ return rv;
+}
+
+void camera_set_callbacks(struct camera_device * device,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ gCameraHals[aml_dev->cameraid]->setCallbacks(notify_cb, data_cb, data_cb_timestamp, get_memory, user);
+}
+
+void camera_enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ gCameraHals[aml_dev->cameraid]->enableMsgType(msg_type);
+}
+
+void camera_disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ gCameraHals[aml_dev->cameraid]->disableMsgType(msg_type);
+}
+
+int camera_msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return 0;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ return gCameraHals[aml_dev->cameraid]->msgTypeEnabled(msg_type);
+}
+
+int camera_start_preview(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ rv = gCameraHals[aml_dev->cameraid]->startPreview();
+
+ return rv;
+}
+
+void camera_stop_preview(struct camera_device * device)
+{
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ gCameraHals[aml_dev->cameraid]->stopPreview();
+}
+
+int camera_preview_enabled(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ rv = gCameraHals[aml_dev->cameraid]->previewEnabled();
+ return rv;
+}
+
+int camera_store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+ int rv = -EINVAL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ // TODO: meta data buffer not current supported
+ rv = gCameraHals[aml_dev->cameraid]->storeMetaDataInBuffers(enable);
+ return rv;
+ //return enable ? android::INVALID_OPERATION: android::OK;
+}
+
+int camera_start_recording(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ rv = gCameraHals[aml_dev->cameraid]->startRecording();
+ return rv;
+}
+
+void camera_stop_recording(struct camera_device * device)
+{
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ gCameraHals[aml_dev->cameraid]->stopRecording();
+}
+
+int camera_recording_enabled(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ rv = gCameraHals[aml_dev->cameraid]->recordingEnabled();
+ return rv;
+}
+
+void camera_release_recording_frame(struct camera_device * device,
+ const void *opaque)
+{
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ gCameraHals[aml_dev->cameraid]->releaseRecordingFrame(opaque);
+}
+
+int camera_auto_focus(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ rv = gCameraHals[aml_dev->cameraid]->autoFocus();
+ return rv;
+}
+
+int camera_cancel_auto_focus(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ rv = gCameraHals[aml_dev->cameraid]->cancelAutoFocus();
+ return rv;
+}
+
+int camera_take_picture(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ rv = gCameraHals[aml_dev->cameraid]->takePicture();
+ return rv;
+}
+
+int camera_cancel_picture(struct camera_device * device)
+{
+ int rv = -EINVAL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ rv = gCameraHals[aml_dev->cameraid]->cancelPicture();
+ return rv;
+}
+
+int camera_set_parameters(struct camera_device * device, const char *params)
+{
+ int rv = -EINVAL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ rv = gCameraHals[aml_dev->cameraid]->setParameters(params);
+ return rv;
+}
+
+char* camera_get_parameters(struct camera_device * device)
+{
+ char* param = NULL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return NULL;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ param = gCameraHals[aml_dev->cameraid]->getParameters();
+
+ return param;
+}
+
+static void camera_put_parameters(struct camera_device *device, char *parms)
+{
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ gCameraHals[aml_dev->cameraid]->putParameters(parms);
+}
+
+int camera_send_command(struct camera_device * device,
+ int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ int rv = -EINVAL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return rv;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ rv = gCameraHals[aml_dev->cameraid]->sendCommand(cmd, arg1, arg2);
+ return rv;
+}
+
+void camera_release(struct camera_device * device)
+{
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ if(!device)
+ return;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ gCameraHals[aml_dev->cameraid]->release();
+}
+
+int camera_dump(struct camera_device * device, int fd)
+{
+ int rv = -EINVAL;
+ aml_camera_device_t* aml_dev = NULL;
+
+ if(!device)
+ return rv;
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ rv = gCameraHals[aml_dev->cameraid]->dump(fd);
+ return rv;
+}
+
+extern "C" void heaptracker_free_leaked_memory(void);
+
+int camera_device_close(hw_device_t* device)
+{
+ int ret = 0;
+ aml_camera_device_t* aml_dev = NULL;
+
+ LOGV("%s", __FUNCTION__);
+
+ android::Mutex::Autolock lock(gCameraHalDeviceLock);
+
+ if (!device) {
+ ret = -EINVAL;
+ goto done;
+ }
+
+ aml_dev = (aml_camera_device_t*) device;
+
+ if (aml_dev) {
+ if (gCameraHals[aml_dev->cameraid]) {
+ delete gCameraHals[aml_dev->cameraid];
+ gCameraHals[aml_dev->cameraid] = NULL;
+ gCamerasOpen--;
+ }
+
+ if (aml_dev->base.ops) {
+ free(aml_dev->base.ops);
+ }
+ free(aml_dev);
+ }
+done:
+#ifdef HEAPTRACKER
+ heaptracker_free_leaked_memory();
+#endif
+ return ret;
+}
+
+/*******************************************************************
+ * implementation of camera_module functions
+ *******************************************************************/
+
+/* open device handle to one of the cameras
+ *
+ * assume camera service will keep singleton of each camera
+ * so this function will always only be called once per camera instance
+ */
+
+int camera_device_open(const hw_module_t* module, const char* name,
+ hw_device_t** device)
+{
+ int rv = 0;
+ int num_cameras = 0;
+ int cameraid;
+ aml_camera_device_t* camera_device = NULL;
+ camera_device_ops_t* camera_ops = NULL;
+ android::CameraHal* camera = NULL;
+ android::CameraProperties::Properties* properties = NULL;
+
+ android::Mutex::Autolock lock(gCameraHalDeviceLock);
+
+ LOGI("camera_device open");
+
+ if (name != NULL) {
+ cameraid = atoi(name);
+ num_cameras = gCameraProperties.camerasSupported();
+
+ if(cameraid > num_cameras)
+ {
+ LOGE("camera service provided cameraid out of bounds, "
+ "cameraid = %d, num supported = %d",
+ cameraid, num_cameras);
+ rv = -EINVAL;
+ goto fail;
+ }
+
+ if(gCamerasOpen >= MAX_SIMUL_CAMERAS_SUPPORTED)
+ {
+ LOGE("maximum number of cameras already open");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ camera_device = (aml_camera_device_t*)malloc(sizeof(*camera_device));
+ if(!camera_device)
+ {
+ LOGE("camera_device allocation fail");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ camera_ops = (camera_device_ops_t*)malloc(sizeof(*camera_ops));
+ if(!camera_ops)
+ {
+ LOGE("camera_ops allocation fail");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ memset(camera_device, 0, sizeof(*camera_device));
+ memset(camera_ops, 0, sizeof(*camera_ops));
+
+ camera_device->base.common.tag = HARDWARE_DEVICE_TAG;
+ camera_device->base.common.version = 0;
+ camera_device->base.common.module = (hw_module_t *)(module);
+ camera_device->base.common.close = camera_device_close;
+ camera_device->base.ops = camera_ops;
+
+ camera_ops->set_preview_window = camera_set_preview_window;
+ camera_ops->set_callbacks = camera_set_callbacks;
+ camera_ops->enable_msg_type = camera_enable_msg_type;
+ camera_ops->disable_msg_type = camera_disable_msg_type;
+ camera_ops->msg_type_enabled = camera_msg_type_enabled;
+ camera_ops->start_preview = camera_start_preview;
+ camera_ops->stop_preview = camera_stop_preview;
+ camera_ops->preview_enabled = camera_preview_enabled;
+ camera_ops->store_meta_data_in_buffers = camera_store_meta_data_in_buffers;
+ camera_ops->start_recording = camera_start_recording;
+ camera_ops->stop_recording = camera_stop_recording;
+ camera_ops->recording_enabled = camera_recording_enabled;
+ camera_ops->release_recording_frame = camera_release_recording_frame;
+ camera_ops->auto_focus = camera_auto_focus;
+ camera_ops->cancel_auto_focus = camera_cancel_auto_focus;
+ camera_ops->take_picture = camera_take_picture;
+ camera_ops->cancel_picture = camera_cancel_picture;
+ camera_ops->set_parameters = camera_set_parameters;
+ camera_ops->get_parameters = camera_get_parameters;
+ camera_ops->put_parameters = camera_put_parameters;
+ camera_ops->send_command = camera_send_command;
+ camera_ops->release = camera_release;
+ camera_ops->dump = camera_dump;
+
+ *device = &camera_device->base.common;
+
+ // -------- vendor specific stuff --------
+
+LOGD("num_cameras=%d cameraid=%d", num_cameras, cameraid);
+ camera_device->cameraid = cameraid;
+
+ if(gCameraProperties.getProperties(cameraid, &properties) < 0)
+ {
+ LOGE("Couldn't get camera properties");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ camera = new android::CameraHal(cameraid);
+
+ if(!camera)
+ {
+ LOGE("Couldn't create instance of CameraHal class");
+ rv = -ENOMEM;
+ goto fail;
+ }
+
+ if(properties && (camera->initialize(properties) != android::NO_ERROR))
+ {
+ LOGE("Couldn't initialize camera instance");
+ rv = -ENODEV;
+ goto fail;
+ }
+
+ gCameraHals[cameraid] = camera;
+ gCamerasOpen++;
+ }
+
+ return rv;
+
+fail:
+ if(camera_device) {
+ free(camera_device);
+ camera_device = NULL;
+ }
+ if(camera_ops) {
+ free(camera_ops);
+ camera_ops = NULL;
+ }
+ if(camera) {
+ delete camera;
+ camera = NULL;
+ }
+ *device = NULL;
+ return rv;
+}
+
+int camera_get_number_of_cameras(void)
+{
+ int num_cameras = MAX_CAMERAS_SUPPORTED;
+
+ return num_cameras;
+}
+
+int camera_get_camera_info(int camera_id, struct camera_info *info)
+{
+ int rv = 0;
+ int face_value = CAMERA_FACING_BACK;
+ int orientation = 0;
+ const char *valstr = NULL;
+ android::CameraProperties::Properties* properties = NULL;
+
+LOGD("camera_get_camera_info camera_id=%d", camera_id);
+ // this going to be the first call from camera service
+ // initialize camera properties here...
+ if(gCameraProperties.initialize() != android::NO_ERROR)
+ {
+ CAMHAL_LOGEA("Unable to create or initialize CameraProperties");
+ return NULL;
+ }
+
+ //Get camera properties for camera index
+ if(gCameraProperties.getProperties(camera_id, &properties) < 0)
+ {
+ LOGE("Couldn't get camera properties");
+ rv = -EINVAL;
+ goto end;
+ }
+
+ if(properties)
+ {
+ valstr = properties->get(android::CameraProperties::FACING_INDEX);
+ if(valstr != NULL)
+ {
+ if (strcmp(valstr, (const char *) android::TICameraParameters::FACING_FRONT) == 0)
+ {
+ face_value = CAMERA_FACING_FRONT;
+ }
+ else if (strcmp(valstr, (const char *) android::TICameraParameters::FACING_BACK) == 0)
+ {
+ face_value = CAMERA_FACING_BACK;
+ }
+ }
+
+ valstr = properties->get(android::CameraProperties::ORIENTATION_INDEX);
+ if(valstr != NULL)
+ {
+ orientation = atoi(valstr);
+ }
+ }
+ else
+ {
+ CAMHAL_LOGEB("getProperties() returned a NULL property set for Camera id %d", camera_id);
+ }
+
+ info->facing = face_value;
+ info->orientation = orientation;
+
+end:
+ return rv;
+}
+
+
+
+
+
diff --git a/CameraParameters.cpp b/CameraParameters.cpp
new file mode 100644
index 0000000..2ab47a0
--- a/dev/null
+++ b/CameraParameters.cpp
@@ -0,0 +1,193 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraProperties.cpp
+*
+* This file maps the CameraHardwareInterface to the Camera interfaces on OMAP4 (mainly OMX).
+*
+*/
+
+#include "CameraHal.h"
+#include "CameraProperties.h"
+
+namespace android {
+
+const char CameraProperties::INVALID[]="prop-invalid-key";
+const char CameraProperties::CAMERA_NAME[]="prop-camera-name";
+const char CameraProperties::CAMERA_SENSOR_INDEX[]="prop-sensor-index";
+const char CameraProperties::ORIENTATION_INDEX[]="prop-orientation";
+const char CameraProperties::FACING_INDEX[]="prop-facing";
+const char CameraProperties::S3D_SUPPORTED[]="prop-s3d-supported";
+const char CameraProperties::SUPPORTED_PREVIEW_SIZES[] = "prop-preview-size-values";
+const char CameraProperties::SUPPORTED_PREVIEW_FORMATS[] = "prop-preview-format-values";
+const char CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES[] = "prop-preview-frame-rate-values";
+const char CameraProperties::SUPPORTED_PICTURE_SIZES[] = "prop-picture-size-values";
+const char CameraProperties::SUPPORTED_PICTURE_FORMATS[] = "prop-picture-format-values";
+const char CameraProperties::SUPPORTED_THUMBNAIL_SIZES[] = "prop-jpeg-thumbnail-size-values";
+const char CameraProperties::SUPPORTED_WHITE_BALANCE[] = "prop-whitebalance-values";
+const char CameraProperties::SUPPORTED_EFFECTS[] = "prop-effect-values";
+const char CameraProperties::SUPPORTED_ANTIBANDING[] = "prop-antibanding-values";
+const char CameraProperties::SUPPORTED_EXPOSURE_MODES[] = "prop-exposure-mode-values";
+const char CameraProperties::SUPPORTED_EV_MAX[] = "prop-ev-compensation-max";
+const char CameraProperties::SUPPORTED_EV_MIN[] = "prop-ev-compensation-min";
+const char CameraProperties::SUPPORTED_EV_STEP[] = "prop-ev-compensation-step";
+const char CameraProperties::SUPPORTED_ISO_VALUES[] = "prop-iso-mode-values";
+const char CameraProperties::SUPPORTED_SCENE_MODES[] = "prop-scene-mode-values";
+const char CameraProperties::SUPPORTED_FLASH_MODES[] = "prop-flash-mode-values";
+const char CameraProperties::SUPPORTED_FOCUS_MODES[] = "prop-focus-mode-values";
+const char CameraProperties::REQUIRED_PREVIEW_BUFS[] = "prop-required-preview-bufs";
+const char CameraProperties::REQUIRED_IMAGE_BUFS[] = "prop-required-image-bufs";
+const char CameraProperties::SUPPORTED_ZOOM_RATIOS[] = "prop-zoom-ratios";
+const char CameraProperties::SUPPORTED_ZOOM_STAGES[] = "prop-zoom-stages";
+const char CameraProperties::SUPPORTED_IPP_MODES[] = "prop-ipp-values";
+const char CameraProperties::SMOOTH_ZOOM_SUPPORTED[] = "prop-smooth-zoom-supported";
+const char CameraProperties::ZOOM_SUPPORTED[] = "prop-zoom-supported";
+const char CameraProperties::PREVIEW_SIZE[] = "prop-preview-size-default";
+const char CameraProperties::PREVIEW_FORMAT[] = "prop-preview-format-default";
+const char CameraProperties::PREVIEW_FRAME_RATE[] = "prop-preview-frame-rate-default";
+const char CameraProperties::ZOOM[] = "prop-zoom-default";
+const char CameraProperties::PICTURE_SIZE[] = "prop-picture-size-default";
+const char CameraProperties::PICTURE_FORMAT[] = "prop-picture-format-default";
+const char CameraProperties::JPEG_THUMBNAIL_SIZE[] = "prop-jpeg-thumbnail-size-default";
+const char CameraProperties::WHITEBALANCE[] = "prop-whitebalance-default";
+const char CameraProperties::EFFECT[] = "prop-effect-default";
+const char CameraProperties::ANTIBANDING[] = "prop-antibanding-default";
+const char CameraProperties::EXPOSURE_MODE[] = "prop-exposure-mode-default";
+const char CameraProperties::EV_COMPENSATION[] = "prop-ev-compensation-default";
+const char CameraProperties::ISO_MODE[] = "prop-iso-mode-default";
+const char CameraProperties::FOCUS_MODE[] = "prop-focus-mode-default";
+const char CameraProperties::SCENE_MODE[] = "prop-scene-mode-default";
+const char CameraProperties::FLASH_MODE[] = "prop-flash-mode-default";
+const char CameraProperties::JPEG_QUALITY[] = "prop-jpeg-quality-default";
+const char CameraProperties::CONTRAST[] = "prop-contrast-default";
+const char CameraProperties::BRIGHTNESS[] = "prop-brightness-default";
+const char CameraProperties::SATURATION[] = "prop-saturation-default";
+const char CameraProperties::SHARPNESS[] = "prop-sharpness-default";
+const char CameraProperties::IPP[] = "prop-ipp-default";
+const char CameraProperties::GBCE[] = "prop-gbce-default";
+const char CameraProperties::S3D2D_PREVIEW[] = "prop-s3d2d-preview";
+const char CameraProperties::S3D2D_PREVIEW_MODES[] = "prop-s3d2d-preview-values";
+const char CameraProperties::AUTOCONVERGENCE[] = "prop-auto-convergence";
+const char CameraProperties::AUTOCONVERGENCE_MODE[] = "prop-auto-convergence-mode";
+const char CameraProperties::MANUALCONVERGENCE_VALUES[] = "prop-manual-convergence-values";
+const char CameraProperties::VSTAB[] = "prop-vstab-default";
+const char CameraProperties::VSTAB_SUPPORTED[] = "prop-vstab-supported";
+const char CameraProperties::REVISION[] = "prop-revision";
+const char CameraProperties::FOCAL_LENGTH[] = "prop-focal-length";
+const char CameraProperties::HOR_ANGLE[] = "prop-horizontal-angle";
+const char CameraProperties::VER_ANGLE[] = "prop-vertical-angle";
+const char CameraProperties::FRAMERATE_RANGE[] = "prop-framerate-range-default";
+const char CameraProperties::FRAMERATE_RANGE_IMAGE[] = "prop-framerate-range-image-default";
+const char CameraProperties::FRAMERATE_RANGE_VIDEO[]="prop-framerate-range-video-default";
+const char CameraProperties::FRAMERATE_RANGE_SUPPORTED[]="prop-framerate-range-values";
+const char CameraProperties::SENSOR_ORIENTATION[]= "sensor-orientation";
+const char CameraProperties::SENSOR_ORIENTATION_VALUES[]= "sensor-orientation-values";
+const char CameraProperties::EXIF_MAKE[] = "prop-exif-make";
+const char CameraProperties::EXIF_MODEL[] = "prop-exif-model";
+const char CameraProperties::JPEG_THUMBNAIL_QUALITY[] = "prop-jpeg-thumbnail-quality-default";
+const char CameraProperties::MAX_FOCUS_AREAS[] = "prop-max-focus-areas";
+const char CameraProperties::MAX_FD_HW_FACES[] = "prop-max-fd-hw-faces";
+const char CameraProperties::MAX_FD_SW_FACES[] = "prop-max-fd-sw-faces";
+const char CameraProperties::AUTO_EXPOSURE_LOCK[] = "prop-auto-exposure-lock";
+const char CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED[] = "prop-auto-exposure-lock-supported";
+const char CameraProperties::AUTO_WHITEBALANCE_LOCK[] = "prop-auto-whitebalance-lock";
+const char CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED[] = "prop-auto-whitebalance-lock-supported";
+const char CameraProperties::MAX_NUM_METERING_AREAS[] = "prop-max-num-metering-areas";
+const char CameraProperties::METERING_AREAS[] = "prop-metering-areas";
+const char CameraProperties::VIDEO_SNAPSHOT_SUPPORTED[] = "prop-video-snapshot-supported";
+const char CameraProperties::VIDEO_SIZE[] = "video-size";
+const char CameraProperties::SUPPORTED_VIDEO_SIZES[] = "video-size-values";
+const char CameraProperties::PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "preferred-preview-size-for-video";
+
+
+const char CameraProperties::DEFAULT_VALUE[] = "";
+
+const char CameraProperties::PARAMS_DELIMITER []= ",";
+
+// Returns the properties class for a specific Camera
+// Each value is indexed by the CameraProperties::CameraPropertyIndex enum
+int CameraProperties::getProperties(int cameraIndex, CameraProperties::Properties** properties)
+{
+ LOG_FUNCTION_NAME;
+
+ //LOGD("%s %d cameraIndex=%d mCamerasSupported=%d", __func__, __LINE__, cameraIndex, mCamerasSupported);
+ if((unsigned int)cameraIndex >= mCamerasSupported)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ return -EINVAL;
+ }
+
+ *properties = mCameraProps+cameraIndex;
+
+ LOG_FUNCTION_NAME_EXIT;
+ return 0;
+}
+
+ssize_t CameraProperties::Properties::set(const char *prop, const char *value)
+{
+ if(!prop)
+ return -EINVAL;
+ if(!value)
+ value = DEFAULT_VALUE;
+
+ return mProperties->replaceValueFor(String8(prop), String8(value));
+}
+
+ssize_t CameraProperties::Properties::set(const char *prop, int value)
+{
+ char s_val[30];
+
+ sprintf(s_val, "%d", value);
+
+ return set(prop, s_val);
+}
+
+const char* CameraProperties::Properties::get(const char * prop)
+{
+ String8 value = mProperties->valueFor(String8(prop));
+ return value.string();
+}
+
+void CameraProperties::Properties::dump()
+{
+ for (size_t i = 0; i < mProperties->size(); i++)
+ {
+ CAMHAL_LOGDB("%s = %s\n",
+ mProperties->keyAt(i).string(),
+ mProperties->valueAt(i).string());
+ }
+}
+
+const char* CameraProperties::Properties::keyAt(unsigned int index)
+{
+ if(index < mProperties->size())
+ {
+ return mProperties->keyAt(index).string();
+ }
+ return NULL;
+}
+
+const char* CameraProperties::Properties::valueAt(unsigned int index)
+{
+ if(index < mProperties->size())
+ {
+ return mProperties->valueAt(index).string();
+ }
+ return NULL;
+}
+
+};
diff --git a/CameraProperties.cpp b/CameraProperties.cpp
new file mode 100644
index 0000000..43ff15f
--- a/dev/null
+++ b/CameraProperties.cpp
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file CameraProperties.cpp
+*
+* This file maps the CameraHardwareInterface to the Camera interfaces on OMAP4 (mainly OMX).
+*
+*/
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "CameraProperties"
+
+//#include "CameraHal.h"
+#include <utils/threads.h>
+
+#include "DebugUtils.h"
+#include "CameraProperties.h"
+
+#define CAMERA_ROOT "CameraRoot"
+#define CAMERA_INSTANCE "CameraInstance"
+
+namespace android {
+
+/*********************************************************
+ CameraProperties - public function implemetation
+**********************************************************/
+
+CameraProperties::CameraProperties() : mCamerasSupported(0)
+{
+ LOG_FUNCTION_NAME;
+
+ mCamerasSupported = 0;
+ mInitialized = 0;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+CameraProperties::~CameraProperties()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+// Initializes the CameraProperties class
+status_t CameraProperties::initialize()
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret;
+
+ Mutex::Autolock lock(mLock);
+
+ if(mInitialized)
+ return NO_ERROR;
+
+ ret = loadProperties();
+
+ mInitialized = 1;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
+ const unsigned int starting_camera,
+ const unsigned int max_camera);
+
+///Loads all the Camera related properties
+status_t CameraProperties::loadProperties()
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+
+ // adapter updates capabilities and we update camera count
+ mCamerasSupported = CameraAdapter_Capabilities(mCameraProps, mCamerasSupported, MAX_CAMERAS_SUPPORTED);
+
+ if((int)mCamerasSupported < 0) {
+ LOGE("error while getting capabilities");
+ ret = UNKNOWN_ERROR;
+ } else if (mCamerasSupported > MAX_CAMERAS_SUPPORTED) {
+ LOGE("returned too many adapaters");
+ ret = UNKNOWN_ERROR;
+ } else {
+ LOGE("num_cameras = %d", mCamerasSupported);
+
+ for (unsigned int i = 0; i < mCamerasSupported; i++) {
+ mCameraProps[i].set(CAMERA_SENSOR_INDEX, i);
+ mCameraProps[i].dump();
+ }
+ }
+
+ LOGV("mCamerasSupported = %d", mCamerasSupported);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+// Returns the number of Cameras found
+int CameraProperties::camerasSupported()
+{
+ LOG_FUNCTION_NAME;
+ return mCamerasSupported;
+}
+
+};
diff --git a/Encoder_libjpeg.cpp b/Encoder_libjpeg.cpp
new file mode 100644
index 0000000..0240fb0
--- a/dev/null
+++ b/Encoder_libjpeg.cpp
@@ -0,0 +1,462 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file Encoder_libjpeg.cpp
+*
+* This file encodes a YUV422I buffer to a jpeg
+* TODO(XXX): Need to support formats other than yuv422i
+* Change interface to pre/post-proc algo framework
+*
+*/
+
+#define LOG_TAG "CameraHAL"
+
+#include "CameraHal.h"
+#include "Encoder_libjpeg.h"
+#include "NV12_resize.h"
+
+#include <stdlib.h>
+#include <unistd.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <errno.h>
+#include <math.h>
+
+extern "C" {
+ #include "jpeglib.h"
+ #include "jerror.h"
+}
+
+#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
+
+namespace android {
+struct string_pair {
+ const char* string1;
+ const char* string2;
+};
+
+static string_pair degress_to_exif_lut [] = {
+ // degrees, exif_orientation
+ {"0", "1"},
+ {"90", "6"},
+ {"180", "3"},
+ {"270", "8"},
+};
+struct libjpeg_destination_mgr : jpeg_destination_mgr {
+ libjpeg_destination_mgr(uint8_t* input, int size);
+
+ uint8_t* buf;
+ int bufsize;
+ size_t jpegsize;
+};
+
+static void libjpeg_init_destination (j_compress_ptr cinfo) {
+ libjpeg_destination_mgr* dest = (libjpeg_destination_mgr*)cinfo->dest;
+
+ dest->next_output_byte = dest->buf;
+ dest->free_in_buffer = dest->bufsize;
+ dest->jpegsize = 0;
+}
+
+static boolean libjpeg_empty_output_buffer(j_compress_ptr cinfo) {
+ libjpeg_destination_mgr* dest = (libjpeg_destination_mgr*)cinfo->dest;
+
+ dest->next_output_byte = dest->buf;
+ dest->free_in_buffer = dest->bufsize;
+ return TRUE; // ?
+}
+
+static void libjpeg_term_destination (j_compress_ptr cinfo) {
+ libjpeg_destination_mgr* dest = (libjpeg_destination_mgr*)cinfo->dest;
+ dest->jpegsize = dest->bufsize - dest->free_in_buffer;
+}
+
+libjpeg_destination_mgr::libjpeg_destination_mgr(uint8_t* input, int size) {
+ this->init_destination = libjpeg_init_destination;
+ this->empty_output_buffer = libjpeg_empty_output_buffer;
+ this->term_destination = libjpeg_term_destination;
+
+ this->buf = input;
+ this->bufsize = size;
+
+ jpegsize = 0;
+}
+
+/* private static functions */
+static void nv21_to_yuv(uint8_t* dst, uint8_t* y, uint8_t* uv, int width) {
+ if (!dst || !y || !uv) {
+ return;
+ }
+
+ while ((width--) > 0) {
+ uint8_t y0 = y[0];
+ uint8_t v0 = uv[0];
+ uint8_t u0 = *(uv+1);
+ dst[0] = y0;
+ dst[1] = u0;
+ dst[2] = v0;
+ dst += 3;
+ y++;
+ if(!(width % 2)) uv+=2;
+ }
+}
+
+static void uyvy_to_yuv(uint8_t* dst, uint32_t* src, int width) {
+ if (!dst || !src) {
+ return;
+ }
+
+ if (width % 2) {
+ return; // not supporting odd widths
+ }
+
+ // currently, neon routine only supports multiple of 16 width
+ if (width % 16) {
+ while ((width-=2) >= 0) {
+ uint8_t u0 = (src[0] >> 0) & 0xFF;
+ uint8_t y0 = (src[0] >> 8) & 0xFF;
+ uint8_t v0 = (src[0] >> 16) & 0xFF;
+ uint8_t y1 = (src[0] >> 24) & 0xFF;
+ dst[0] = y0;
+ dst[1] = u0;
+ dst[2] = v0;
+ dst[3] = y1;
+ dst[4] = u0;
+ dst[5] = v0;
+ dst += 6;
+ src++;
+ }
+ } else {
+ int n = width;
+ asm volatile (
+ " pld [%[src], %[src_stride], lsl #2] \n\t"
+ " cmp %[n], #16 \n\t"
+ " blt 5f \n\t"
+ "0: @ 16 pixel swap \n\t"
+ " vld2.8 {q0, q1} , [%[src]]! @ q0 = uv q1 = y \n\t"
+ " vuzp.8 q0, q2 @ d1 = u d5 = v \n\t"
+ " vmov d1, d0 @ q0 = u0u1u2..u0u1u2... \n\t"
+ " vmov d5, d4 @ q2 = v0v1v2..v0v1v2... \n\t"
+ " vzip.8 d0, d1 @ q0 = u0u0u1u1u2u2... \n\t"
+ " vzip.8 d4, d5 @ q2 = v0v0v1v1v2v2... \n\t"
+ " vswp q0, q1 @ now q0 = y q1 = u q2 = v \n\t"
+ " vst3.8 {d0,d2,d4},[%[dst]]! \n\t"
+ " vst3.8 {d1,d3,d5},[%[dst]]! \n\t"
+ " sub %[n], %[n], #16 \n\t"
+ " cmp %[n], #16 \n\t"
+ " bge 0b \n\t"
+ "5: @ end \n\t"
+#ifdef NEEDS_ARM_ERRATA_754319_754320
+ " vmov s0,s0 @ add noop for errata item \n\t"
+#endif
+ : [dst] "+r" (dst), [src] "+r" (src), [n] "+r" (n)
+ : [src_stride] "r" (width)
+ : "cc", "memory", "q0", "q1", "q2"
+ );
+ }
+}
+
+static void resize_nv12(Encoder_libjpeg::params* params, uint8_t* dst_buffer) {
+ structConvImage o_img_ptr, i_img_ptr;
+
+ if (!params || !dst_buffer) {
+ return;
+ }
+
+ //input
+ i_img_ptr.uWidth = params->in_width;
+ i_img_ptr.uStride = i_img_ptr.uWidth;
+ i_img_ptr.uHeight = params->in_height;
+ i_img_ptr.eFormat = IC_FORMAT_YCbCr420_lp;
+ i_img_ptr.imgPtr = (uint8_t*) params->src;
+ i_img_ptr.clrPtr = i_img_ptr.imgPtr + (i_img_ptr.uWidth * i_img_ptr.uHeight);
+
+ //ouput
+ o_img_ptr.uWidth = params->out_width;
+ o_img_ptr.uStride = o_img_ptr.uWidth;
+ o_img_ptr.uHeight = params->out_height;
+ o_img_ptr.eFormat = IC_FORMAT_YCbCr420_lp;
+ o_img_ptr.imgPtr = dst_buffer;
+ o_img_ptr.clrPtr = o_img_ptr.imgPtr + (o_img_ptr.uWidth * o_img_ptr.uHeight);
+
+ VT_resizeFrame_Video_opt2_lp(&i_img_ptr, &o_img_ptr, NULL, 0);
+}
+
+/* public static functions */
+const char* ExifElementsTable::degreesToExifOrientation(const char* degrees) {
+ for (unsigned int i = 0; i < ARRAY_SIZE(degress_to_exif_lut); i++) {
+ if (!strcmp(degrees, degress_to_exif_lut[i].string1)) {
+ return degress_to_exif_lut[i].string2;
+ }
+ }
+ return NULL;
+}
+
+void ExifElementsTable::stringToRational(const char* str, unsigned int* num, unsigned int* den) {
+ int len;
+ char * tempVal = NULL;
+
+ if (str != NULL) {
+ len = strlen(str);
+ tempVal = (char*) malloc( sizeof(char) * (len + 1));
+ }
+
+ if (tempVal != NULL) {
+ // convert the decimal string into a rational
+ size_t den_len;
+ char *ctx;
+ unsigned int numerator = 0;
+ unsigned int denominator = 0;
+ char* temp = NULL;
+
+ memset(tempVal, '\0', len + 1);
+ strncpy(tempVal, str, len);
+ temp = strtok_r(tempVal, ".", &ctx);
+
+ if (temp != NULL)
+ numerator = atoi(temp);
+
+ if (!numerator)
+ numerator = 1;
+
+ temp = strtok_r(NULL, ".", &ctx);
+ if (temp != NULL) {
+ den_len = strlen(temp);
+ if(HUGE_VAL == den_len ) {
+ den_len = 0;
+ }
+
+ denominator = static_cast<unsigned int>(pow(10, den_len));
+ numerator = numerator * denominator + atoi(temp);
+ } else {
+ denominator = 1;
+ }
+
+ free(tempVal);
+
+ *num = numerator;
+ *den = denominator;
+ }
+}
+
+bool ExifElementsTable::isAsciiTag(const char* tag) {
+ // TODO(XXX): Add tags as necessary
+ return (strcmp(tag, TAG_GPS_PROCESSING_METHOD) == 0);
+}
+
+void ExifElementsTable::insertExifToJpeg(unsigned char* jpeg, size_t jpeg_size) {
+ ReadMode_t read_mode = (ReadMode_t)(READ_METADATA | READ_IMAGE);
+
+ ResetJpgfile();
+ if (ReadJpegSectionsFromBuffer(jpeg, jpeg_size, read_mode)) {
+ jpeg_opened = true;
+ create_EXIF(table, exif_tag_count, gps_tag_count);
+ }
+}
+
+status_t ExifElementsTable::insertExifThumbnailImage(const char* thumb, int len) {
+ status_t ret = NO_ERROR;
+
+ if ((len > 0) && jpeg_opened) {
+ ret = ReplaceThumbnailFromBuffer(thumb, len);
+ CAMHAL_LOGDB("insertExifThumbnailImage. ReplaceThumbnail(). ret=%d", ret);
+ }
+
+ return ret;
+}
+
+void ExifElementsTable::saveJpeg(unsigned char* jpeg, size_t jpeg_size) {
+ if (jpeg_opened) {
+ WriteJpegToBuffer(jpeg, jpeg_size);
+ DiscardData();
+ jpeg_opened = false;
+ }
+}
+
+/* public functions */
+ExifElementsTable::~ExifElementsTable() {
+ int num_elements = gps_tag_count + exif_tag_count;
+
+ for (int i = 0; i < num_elements; i++) {
+ if (table[i].Value) {
+ free(table[i].Value);
+ }
+ }
+
+ if (jpeg_opened) {
+ DiscardData();
+ }
+}
+
+status_t ExifElementsTable::insertElement(const char* tag, const char* value) {
+ int value_length = 0;
+ status_t ret = NO_ERROR;
+
+ if (!value || !tag) {
+ return -EINVAL;
+ }
+
+ if (position >= MAX_EXIF_TAGS_SUPPORTED) {
+ CAMHAL_LOGEA("Max number of EXIF elements already inserted");
+ return NO_MEMORY;
+ }
+
+ if (isAsciiTag(tag)) {
+ value_length = sizeof(ExifAsciiPrefix) + strlen(value + sizeof(ExifAsciiPrefix));
+ } else {
+ value_length = strlen(value);
+ }
+
+ if (IsGpsTag(tag)) {
+ table[position].GpsTag = TRUE;
+ table[position].Tag = GpsTagNameToValue(tag);
+ gps_tag_count++;
+ } else {
+ table[position].GpsTag = FALSE;
+ table[position].Tag = TagNameToValue(tag);
+ exif_tag_count++;
+ }
+
+ table[position].DataLength = 0;
+ table[position].Value = (char*) malloc(sizeof(char) * (value_length + 1));
+
+ if (table[position].Value) {
+ memcpy(table[position].Value, value, value_length + 1);
+ table[position].DataLength = value_length + 1;
+ }
+
+ position++;
+ return ret;
+}
+
+/* private member functions */
+size_t Encoder_libjpeg::encode(params* input) {
+ jpeg_compress_struct cinfo;
+ jpeg_error_mgr jerr;
+ jpeg_destination_mgr jdest;
+ uint8_t* src = NULL, *resize_src = NULL;
+ uint8_t* row_tmp = NULL;
+ uint8_t* row_src = NULL;
+ uint8_t* row_uv = NULL; // used only for NV12
+ int out_width = 0, in_width = 0;
+ int out_height = 0, in_height = 0;
+ int bpp = 2; // for uyvy
+
+ if (!input) {
+ return 0;
+ }
+
+ out_width = input->out_width;
+ in_width = input->in_width;
+ out_height = input->out_height;
+ in_height = input->in_height;
+ src = input->src;
+ input->jpeg_size = 0;
+
+ libjpeg_destination_mgr dest_mgr(input->dst, input->dst_size);
+
+ // param check...
+ if ((in_width < 2) || (out_width < 2) || (in_height < 2) || (out_height < 2) ||
+ (src == NULL) || (input->dst == NULL) || (input->quality < 1) || (input->src_size < 1) ||
+ (input->dst_size < 1) || (input->format == NULL)) {
+ goto exit;
+ }
+
+ if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ bpp = 1;
+ if ((in_width != out_width) || (in_height != out_height)) {
+ resize_src = (uint8_t*) malloc(input->dst_size);
+ resize_nv12(input, resize_src);
+ if (resize_src) src = resize_src;
+ }
+ } else if ((in_width != out_width) || (in_height != out_height)) {
+ CAMHAL_LOGEB("Encoder: resizing is not supported for this format: %s", input->format);
+ goto exit;
+ } else if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV422I)) {
+ // we currently only support yuv422i and yuv420sp
+ CAMHAL_LOGEB("Encoder: format not supported: %s", input->format);
+ goto exit;
+ }
+
+ cinfo.err = jpeg_std_error(&jerr);
+
+ jpeg_create_compress(&cinfo);
+
+ CAMHAL_LOGDB("encoding... \n\t"
+ "width: %d \n\t"
+ "height:%d \n\t"
+ "dest %p \n\t"
+ "dest size:%d \n\t"
+ "mSrc %p",
+ out_width, out_height, input->dst,
+ input->dst_size, src);
+
+ cinfo.dest = &dest_mgr;
+ cinfo.image_width = out_width;
+ cinfo.image_height = out_height;
+ cinfo.input_components = 3;
+ cinfo.in_color_space = JCS_YCbCr;
+ cinfo.input_gamma = 1;
+
+ jpeg_set_defaults(&cinfo);
+ jpeg_set_quality(&cinfo, input->quality, TRUE);
+ cinfo.dct_method = JDCT_IFAST;
+
+ jpeg_start_compress(&cinfo, TRUE);
+
+ row_tmp = (uint8_t*)malloc(out_width * 3);
+ row_src = src;
+ row_uv = src + out_width * out_height * bpp;
+
+ while ((cinfo.next_scanline < cinfo.image_height) && !mCancelEncoding) {
+ JSAMPROW row[1]; /* pointer to JSAMPLE row[s] */
+
+ // convert input yuv format to yuv444
+ if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ nv21_to_yuv(row_tmp, row_src, row_uv, out_width);
+ } else {
+ uyvy_to_yuv(row_tmp, (uint32_t*)row_src, out_width);
+ }
+
+ row[0] = row_tmp;
+ jpeg_write_scanlines(&cinfo, row, 1);
+ row_src = row_src + out_width*bpp;
+
+ // move uv row if input format needs it
+ if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (!(cinfo.next_scanline % 2))
+ row_uv = row_uv + out_width * bpp;
+ }
+ }
+
+ // no need to finish encoding routine if we are prematurely stopping
+ // we will end up crashing in dest_mgr since data is incomplete
+ if (!mCancelEncoding)
+ jpeg_finish_compress(&cinfo);
+ jpeg_destroy_compress(&cinfo);
+
+ if (resize_src) free(resize_src);
+ if (row_tmp) free(row_tmp);
+
+ exit:
+ input->jpeg_size = dest_mgr.jpegsize;
+ return dest_mgr.jpegsize;
+}
+
+} // namespace android
diff --git a/MemoryManager.cpp b/MemoryManager.cpp
new file mode 100644
index 0000000..38bb7d9
--- a/dev/null
+++ b/MemoryManager.cpp
@@ -0,0 +1,227 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#define LOG_TAG "CameraHAL"
+
+
+#include "CameraHal.h"
+#include "TICameraParameters.h"
+
+extern "C" {
+
+//#include <ion.h>
+
+//#include <timm_osal_interfaces.h>
+//#include <timm_osal_trace.h>
+
+
+};
+
+namespace android {
+
+///@todo Move these constants to a common header file, preferably in tiler.h
+#define STRIDE_8BIT (4 * 1024)
+#define STRIDE_16BIT (4 * 1024)
+
+#define ALLOCATION_2D 2
+
+///Utility Macro Declarations
+
+/*--------------------MemoryManager Class STARTS here-----------------------------*/
+void* MemoryManager::allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs)
+{
+ LOG_FUNCTION_NAME;
+ return NULL;
+#if 0
+ if(mIonFd == 0)
+ {
+ mIonFd = ion_open();
+ if(mIonFd == 0)
+ {
+ CAMHAL_LOGEA("ion_open failed!!!");
+ return NULL;
+ }
+ }
+
+ ///We allocate numBufs+1 because the last entry will be marked NULL to indicate end of array, which is used when freeing
+ ///the buffers
+ const uint numArrayEntriesC = (uint)(numBufs+1);
+
+ ///Allocate a buffer array
+ uint32_t *bufsArr = new uint32_t [numArrayEntriesC];
+ if(!bufsArr)
+ {
+ CAMHAL_LOGEB("Allocation failed when creating buffers array of %d uint32_t elements", numArrayEntriesC);
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+ }
+
+ ///Initialize the array with zeros - this will help us while freeing the array in case of error
+ ///If a value of an array element is NULL, it means we didnt allocate it
+ memset(bufsArr, 0, sizeof(*bufsArr) * numArrayEntriesC);
+
+ //2D Allocations are not supported currently
+ if(bytes != 0)
+ {
+ struct ion_handle *handle;
+ int mmap_fd;
+
+ ///1D buffers
+ for (int i = 0; i < numBufs; i++)
+ {
+ int ret = ion_alloc(mIonFd, bytes, 0, 1 << ION_HEAP_TYPE_CARVEOUT, &handle);
+ if(ret < 0)
+ {
+ CAMHAL_LOGEB("ion_alloc resulted in error %d", ret);
+ goto error;
+ }
+
+ CAMHAL_LOGDB("Before mapping, handle = %x, nSize = %d", handle, bytes);
+ if ((ret = ion_map(mIonFd, handle, bytes, PROT_READ | PROT_WRITE, MAP_SHARED, 0,
+ (unsigned char**)&bufsArr[i], &mmap_fd)) < 0)
+ {
+ CAMHAL_LOGEB("Userspace mapping of ION buffers returned error %d", ret);
+ ion_free(mIonFd, handle);
+ goto error;
+ }
+
+ mIonHandleMap.add(bufsArr[i], (unsigned int)handle);
+ mIonFdMap.add(bufsArr[i], (unsigned int) mmap_fd);
+ mIonBufLength.add(bufsArr[i], (unsigned int) bytes);
+ }
+
+ }
+ else // If bytes is not zero, then it is a 2-D tiler buffer request
+ {
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return (void*)bufsArr;
+
+error:
+ LOGE("Freeing buffers already allocated after error occurred");
+ freeBuffer(bufsArr);
+
+ if ( NULL != mErrorNotifier.get() )
+ {
+ mErrorNotifier->errorNotify(-ENOMEM);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NULL;
+#endif
+}
+
+//TODO: Get needed data to map tiler buffers
+//Return dummy data for now
+uint32_t * MemoryManager::getOffsets()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NULL;
+}
+
+int MemoryManager::getFd()
+{
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return -1;
+}
+
+int MemoryManager::freeBuffer(void* buf)
+{
+ return -1;
+#if 0
+ status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
+ uint32_t *bufEntry = (uint32_t*)buf;
+
+ if(!bufEntry)
+ {
+ CAMHAL_LOGEA("NULL pointer passed to freebuffer");
+ LOG_FUNCTION_NAME_EXIT;
+ return BAD_VALUE;
+ }
+
+ while(*bufEntry)
+ {
+ unsigned int ptr = (unsigned int) *bufEntry++;
+ if(mIonBufLength.valueFor(ptr))
+ {
+ munmap((void *)ptr, mIonBufLength.valueFor(ptr));
+ close(mIonFdMap.valueFor(ptr));
+ ion_free(mIonFd, (ion_handle*)mIonHandleMap.valueFor(ptr));
+ mIonHandleMap.removeItem(ptr);
+ mIonBufLength.removeItem(ptr);
+ mIonFdMap.removeItem(ptr);
+ }
+ else
+ {
+ CAMHAL_LOGEA("Not a valid Memory Manager buffer");
+ }
+ }
+
+ ///@todo Check if this way of deleting array is correct, else use malloc/free
+ uint32_t * bufArr = (uint32_t*)buf;
+ delete [] bufArr;
+
+ if(mIonBufLength.size() == 0)
+ {
+ if(mIonFd)
+ {
+ ion_close(mIonFd);
+ mIonFd = 0;
+ }
+ }
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+#endif
+}
+
+status_t MemoryManager::setErrorHandler(ErrorNotifier *errorNotifier)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ if ( NULL == errorNotifier )
+ {
+ CAMHAL_LOGEA("Invalid Error Notifier reference");
+ ret = -EINVAL;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+ mErrorNotifier = errorNotifier;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+};
+
+
+/*--------------------MemoryManager Class ENDS here-----------------------------*/
diff --git a/NV12_resize.c b/NV12_resize.c
new file mode 100644
index 0000000..03c049e
--- a/dev/null
+++ b/NV12_resize.c
@@ -0,0 +1,307 @@
+#include "NV12_resize.h"
+
+//#define LOG_NDEBUG 0
+#define LOG_NIDEBUG 0
+#define LOG_NDDEBUG 0
+
+#define LOG_TAG "NV12_resize"
+#define STRIDE 4096
+#include <utils/Log.h>
+
+/*==========================================================================
+* Function Name : VT_resizeFrame_Video_opt2_lp
+*
+* Description : Resize a yuv frame.
+*
+* Input(s) : input_img_ptr -> Input Image Structure
+* : output_img_ptr -> Output Image Structure
+* : cropout -> crop structure
+*
+* Value Returned : mmBool -> FALSE on error TRUE on success
+* NOTE:
+* Not tested for crop funtionallity.
+* faster version.
+============================================================================*/
+mmBool
+VT_resizeFrame_Video_opt2_lp
+(
+ structConvImage* i_img_ptr, /* Points to the input image */
+ structConvImage* o_img_ptr, /* Points to the output image */
+ IC_rect_type* cropout, /* how much to resize to in final image */
+ mmUint16 dummy /* Transparent pixel value */
+ )
+{
+ LOGV("VT_resizeFrame_Video_opt2_lp+");
+
+ mmUint16 row,col;
+ mmUint32 resizeFactorX;
+ mmUint32 resizeFactorY;
+
+
+ mmUint16 x, y;
+
+ mmUchar* ptr8;
+ mmUchar *ptr8Cb, *ptr8Cr;
+
+
+ mmUint16 xf, yf;
+ mmUchar* inImgPtrY;
+ mmUchar* inImgPtrU;
+ mmUchar* inImgPtrV;
+ mmUint32 cox, coy, codx, cody;
+ mmUint16 idx,idy, idxC;
+
+ if(i_img_ptr->uWidth == o_img_ptr->uWidth)
+ {
+ if(i_img_ptr->uHeight == o_img_ptr->uHeight)
+ {
+ LOGV("************************f(i_img_ptr->uHeight == o_img_ptr->uHeight) are same *********************\n");
+ LOGV("************************(i_img_ptr->width == %d" , i_img_ptr->uWidth );
+ LOGV("************************(i_img_ptr->uHeight == %d" , i_img_ptr->uHeight );
+ LOGV("************************(o_img_ptr->width == %d" ,o_img_ptr->uWidth );
+ LOGV("************************(o_img_ptr->uHeight == %d" , o_img_ptr->uHeight );
+ }
+ }
+
+ if (!i_img_ptr || !i_img_ptr->imgPtr ||
+ !o_img_ptr || !o_img_ptr->imgPtr)
+ {
+ LOGE("Image Point NULL");
+ LOGV("VT_resizeFrame_Video_opt2_lp-");
+ return FALSE;
+ }
+
+ inImgPtrY = (mmUchar *) i_img_ptr->imgPtr + i_img_ptr->uOffset;
+ inImgPtrU = (mmUchar *) i_img_ptr->clrPtr + i_img_ptr->uOffset/2;
+ inImgPtrV = (mmUchar*)inImgPtrU + 1;
+
+ if (cropout == NULL)
+ {
+ cox = 0;
+ coy = 0;
+ codx = o_img_ptr->uWidth;
+ cody = o_img_ptr->uHeight;
+ }
+ else
+ {
+ cox = cropout->x;
+ coy = cropout->y;
+ codx = cropout->uWidth;
+ cody = cropout->uHeight;
+ }
+ idx = i_img_ptr->uWidth;
+ idy = i_img_ptr->uHeight;
+
+ /* make sure valid input size */
+ if (idx < 1 || idy < 1 || i_img_ptr->uStride < 1)
+ {
+ LOGE("idx or idy less then 1 idx = %d idy = %d stride = %d", idx, idy, i_img_ptr->uStride);
+ LOGV("VT_resizeFrame_Video_opt2_lp-");
+ return FALSE;
+ }
+
+ resizeFactorX = ((idx-1)<<9) / codx;
+ resizeFactorY = ((idy-1)<<9) / cody;
+
+ if(i_img_ptr->eFormat == IC_FORMAT_YCbCr420_lp &&
+ o_img_ptr->eFormat == IC_FORMAT_YCbCr420_lp)
+ {
+ ptr8 = (mmUchar*)o_img_ptr->imgPtr + cox + coy*o_img_ptr->uWidth;
+
+
+ ////////////////////////////for Y//////////////////////////
+ for (row=0; row < cody; row++)
+ {
+ mmUchar *pu8Yrow1 = NULL;
+ mmUchar *pu8Yrow2 = NULL;
+ y = (mmUint16) ((mmUint32) (row*resizeFactorY) >> 9);
+ yf = (mmUchar) ((mmUint32)((row*resizeFactorY) >> 6) & 0x7);
+ pu8Yrow1 = inImgPtrY + (y) * i_img_ptr->uStride;
+ pu8Yrow2 = pu8Yrow1 + i_img_ptr->uStride;
+
+ for (col=0; col < codx; col++)
+ {
+ mmUchar in11, in12, in21, in22;
+ mmUchar *pu8ptr1 = NULL;
+ mmUchar *pu8ptr2 = NULL;
+ mmUchar w;
+ mmUint16 accum_1;
+ //mmUint32 accum_W;
+
+
+
+ x = (mmUint16) ((mmUint32) (col*resizeFactorX) >> 9);
+ xf = (mmUchar) ((mmUint32) ((col*resizeFactorX) >> 6) & 0x7);
+
+
+ //accum_W = 0;
+ accum_1 = 0;
+
+ pu8ptr1 = pu8Yrow1 + (x);
+ pu8ptr2 = pu8Yrow2 + (x);
+
+ /* A pixel */
+ //in = *(inImgPtrY + (y)*idx + (x));
+ in11 = *(pu8ptr1);
+
+ w = bWeights[xf][yf][0];
+ accum_1 = (w * in11);
+ //accum_W += (w);
+
+ /* B pixel */
+ //in = *(inImgPtrY + (y)*idx + (x+1));
+ in12 = *(pu8ptr1+1);
+ w = bWeights[xf][yf][1];
+ accum_1 += (w * in12);
+ //accum_W += (w);
+
+ /* C pixel */
+ //in = *(inImgPtrY + (y+1)*idx + (x));
+ in21 = *(pu8ptr2);
+ w = bWeights[xf][yf][3];
+ accum_1 += (w * in21);
+ //accum_W += (w);
+
+ /* D pixel */
+ //in = *(inImgPtrY + (y+1)*idx + (x+1));
+ in22 = *(pu8ptr2+1);
+ w = bWeights[xf][yf][2];
+ accum_1 += (w * in22);
+ //accum_W += (w);
+
+ /* divide by sum of the weights */
+ //accum_1 /= (accum_W);
+ //accum_1 = (accum_1/64);
+ accum_1 = (accum_1>>6);
+ *ptr8 = (mmUchar)accum_1 ;
+
+
+ ptr8++;
+ }
+ ptr8 = ptr8 + (o_img_ptr->uStride - codx);
+ }
+ ////////////////////////////for Y//////////////////////////
+
+ ///////////////////////////////for Cb-Cr//////////////////////
+
+ ptr8Cb = (mmUchar*)o_img_ptr->clrPtr + cox + coy*o_img_ptr->uWidth;
+
+ ptr8Cr = (mmUchar*)(ptr8Cb+1);
+
+ idxC = (idx>>1);
+ for (row=0; row < (((cody)>>1)); row++)
+ {
+ mmUchar *pu8Cbr1 = NULL;
+ mmUchar *pu8Cbr2 = NULL;
+ mmUchar *pu8Crr1 = NULL;
+ mmUchar *pu8Crr2 = NULL;
+
+ y = (mmUint16) ((mmUint32) (row*resizeFactorY) >> 9);
+ yf = (mmUchar) ((mmUint32)((row*resizeFactorY) >> 6) & 0x7);
+
+ pu8Cbr1 = inImgPtrU + (y) * i_img_ptr->uStride;
+ pu8Cbr2 = pu8Cbr1 + i_img_ptr->uStride;
+ pu8Crr1 = inImgPtrV + (y) * i_img_ptr->uStride;
+ pu8Crr2 = pu8Crr1 + i_img_ptr->uStride;
+
+ for (col=0; col < (((codx)>>1)); col++)
+ {
+ mmUchar in11, in12, in21, in22;
+ mmUchar *pu8Cbc1 = NULL;
+ mmUchar *pu8Cbc2 = NULL;
+ mmUchar *pu8Crc1 = NULL;
+ mmUchar *pu8Crc2 = NULL;
+
+ mmUchar w;
+ mmUint16 accum_1Cb, accum_1Cr;
+ //mmUint32 accum_WCb, accum_WCr;
+
+
+ x = (mmUint16) ((mmUint32) (col*resizeFactorX) >> 9);
+ xf = (mmUchar) ((mmUint32) ((col*resizeFactorX) >> 6) & 0x7);
+
+
+ //accum_WCb = accum_WCr = 0;
+ accum_1Cb = accum_1Cr = 0;
+
+ pu8Cbc1 = pu8Cbr1 + (x*2);
+ pu8Cbc2 = pu8Cbr2 + (x*2);
+ pu8Crc1 = pu8Crr1 + (x*2);
+ pu8Crc2 = pu8Crr2 + (x*2);
+
+
+
+ /* A pixel */
+ w = bWeights[xf][yf][0];
+
+ in11 = *(pu8Cbc1);
+ accum_1Cb = (w * in11);
+ // accum_WCb += (w);
+
+ in11 = *(pu8Crc1);
+ accum_1Cr = (w * in11);
+ //accum_WCr += (w);
+
+ /* B pixel */
+ w = bWeights[xf][yf][1];
+
+ in12 = *(pu8Cbc1+2);
+ accum_1Cb += (w * in12);
+ //accum_WCb += (w);
+
+ in12 = *(pu8Crc1+2);
+ accum_1Cr += (w * in12);
+ //accum_WCr += (w);
+
+ /* C pixel */
+ w = bWeights[xf][yf][3];
+
+ in21 = *(pu8Cbc2);
+ accum_1Cb += (w * in21);
+ //accum_WCb += (w);
+
+ in21 = *(pu8Crc2);
+ accum_1Cr += (w * in21);
+ //accum_WCr += (w);
+
+ /* D pixel */
+ w = bWeights[xf][yf][2];
+
+ in22 = *(pu8Cbc2+2);
+ accum_1Cb += (w * in22);
+ //accum_WCb += (w);
+
+ in22 = *(pu8Crc2+2);
+ accum_1Cr += (w * in22);
+ //accum_WCr += (w);
+
+ /* divide by sum of the weights */
+ //accum_1Cb /= (accum_WCb);
+ accum_1Cb = (accum_1Cb>>6);
+ *ptr8Cb = (mmUchar)accum_1Cb ;
+
+
+ accum_1Cr = (accum_1Cr >> 6);
+ *ptr8Cr = (mmUchar)accum_1Cr ;
+
+ ptr8Cb++;
+ ptr8Cr++;
+
+ ptr8Cb++;
+ ptr8Cr++;
+ }
+ ptr8Cb = ptr8Cb + (o_img_ptr->uStride-codx);
+ ptr8Cr = ptr8Cr + (o_img_ptr->uStride-codx);
+ }
+ ///////////////////For Cb- Cr////////////////////////////////////////
+ }
+ else
+ {
+ LOGE("eFormat not supported");
+ LOGV("VT_resizeFrame_Video_opt2_lp-");
+ return FALSE;
+ }
+ LOGV("success");
+ LOGV("VT_resizeFrame_Video_opt2_lp-");
+ return TRUE;
+}
diff --git a/SensorListener.cpp b/SensorListener.cpp
new file mode 100644
index 0000000..ab15b14
--- a/dev/null
+++ b/SensorListener.cpp
@@ -0,0 +1,233 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file SensorListener.cpp
+*
+* This file listens and propogates sensor events to CameraHal.
+*
+*/
+
+#define LOG_TAG "CameraHAL"
+
+#include "SensorListener.h"
+#include "CameraHal.h"
+
+#include <stdint.h>
+#include <math.h>
+#include <sys/types.h>
+
+namespace android {
+
+/*** static declarations ***/
+static const float RADIANS_2_DEG = (float) (180 / M_PI);
+// measured values on device...might need tuning
+static const int DEGREES_90_THRESH = 50;
+static const int DEGREES_180_THRESH = 170;
+static const int DEGREES_270_THRESH = 250;
+
+static int sensor_events_listener(int fd, int events, void* data)
+{
+ SensorListener* listener = (SensorListener*) data;
+ ssize_t num_sensors;
+ ASensorEvent sen_events[8];
+ while ((num_sensors = listener->mSensorEventQueue->read(sen_events, 8)) > 0) {
+ for (int i = 0; i < num_sensors; i++) {
+ if (sen_events[i].type == Sensor::TYPE_ACCELEROMETER) {
+ float x = sen_events[i].vector.azimuth;
+ float y = sen_events[i].vector.pitch;
+ float z = sen_events[i].vector.roll;
+ float radius = 0;
+ int tilt = 0, orient = 0;
+
+ /*CAMHAL_LOGVA("ACCELEROMETER EVENT");
+ CAMHAL_LOGVB(" azimuth = %f pitch = %f roll = %f",
+ sen_events[i].vector.azimuth,
+ sen_events[i].vector.pitch,
+ sen_events[i].vector.roll);*/
+ // see http://en.wikipedia.org/wiki/Spherical_coordinate_system#Cartesian_coordinates
+ // about conversion from cartesian to spherical for orientation calculations
+ radius = (float) sqrt(x * x + y * y + z * z);
+ tilt = (int) asinf(z / radius) * RADIANS_2_DEG;
+ orient = (int) atan2f(-x, y) * RADIANS_2_DEG;
+
+ if (orient < 0) {
+ orient += 360;
+ }
+
+ if (orient >= DEGREES_270_THRESH) {
+ orient = 270;
+ } else if (orient >= DEGREES_180_THRESH) {
+ orient = 180;
+ } else if (orient >= DEGREES_90_THRESH) {
+ orient = 90;
+ } else {
+ orient = 0;
+ }
+ listener->handleOrientation(orient, tilt);
+ //CAMHAL_LOGVB(" tilt = %d orientation = %d", tilt, orient);
+ } else if (sen_events[i].type == Sensor::TYPE_GYROSCOPE) {
+ CAMHAL_LOGVA("GYROSCOPE EVENT");
+ }
+ }
+ }
+
+ if (num_sensors < 0 && num_sensors != -EAGAIN) {
+ CAMHAL_LOGEB("reading events failed: %s", strerror(-num_sensors));
+ }
+
+ return 1;
+}
+
+/****** public - member functions ******/
+SensorListener::SensorListener() {
+ LOG_FUNCTION_NAME;
+
+ sensorsEnabled = 0;
+ mOrientationCb = NULL;
+ mSensorEventQueue = NULL;
+ mSensorLooperThread = NULL;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+SensorListener::~SensorListener() {
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGDA("Kill looper thread");
+ if (mSensorLooperThread.get()) {
+ // 1. Request exit
+ // 2. Wake up looper which should be polling for an event
+ // 3. Wait for exit
+ mSensorLooperThread->requestExit();
+ mSensorLooperThread->wake();
+ mSensorLooperThread->join();
+ mSensorLooperThread.clear();
+ mSensorLooperThread = NULL;
+ }
+
+ CAMHAL_LOGDA("Kill looper");
+ if (mLooper.get()) {
+ mLooper->removeFd(mSensorEventQueue->getFd());
+ mLooper.clear();
+ mLooper = NULL;
+ }
+ CAMHAL_LOGDA("SensorListener destroyed");
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t SensorListener::initialize() {
+ status_t ret = NO_ERROR;
+ SensorManager& mgr(SensorManager::getInstance());
+
+ LOG_FUNCTION_NAME;
+
+ sp<Looper> mLooper;
+
+ mSensorEventQueue = mgr.createEventQueue();
+ if (mSensorEventQueue == NULL) {
+ CAMHAL_LOGEA("createEventQueue returned NULL");
+ ret = NO_INIT;
+ goto out;
+ }
+
+ mLooper = new Looper(false);
+ mLooper->addFd(mSensorEventQueue->getFd(), 0, ALOOPER_EVENT_INPUT, sensor_events_listener, this);
+
+ if (mSensorLooperThread.get() == NULL)
+ mSensorLooperThread = new SensorLooperThread(mLooper.get());
+
+ if (mSensorLooperThread.get() == NULL) {
+ CAMHAL_LOGEA("Couldn't create sensor looper thread");
+ ret = NO_MEMORY;
+ goto out;
+ }
+
+ ret = mSensorLooperThread->run("sensor looper thread", PRIORITY_URGENT_DISPLAY);
+ if (ret == INVALID_OPERATION){
+ CAMHAL_LOGDA("thread already running ?!?");
+ } else if (ret != NO_ERROR) {
+ CAMHAL_LOGEA("couldn't run thread");
+ goto out;
+ }
+
+ out:
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+void SensorListener::setCallbacks(orientation_callback_t orientation_cb, void *cookie) {
+ LOG_FUNCTION_NAME;
+
+ if (orientation_cb) {
+ mOrientationCb = orientation_cb;
+ }
+ mCbCookie = cookie;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void SensorListener::handleOrientation(uint32_t orientation, uint32_t tilt) {
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(&mLock);
+
+ if (mOrientationCb && (sensorsEnabled & SENSOR_ORIENTATION)) {
+ mOrientationCb(orientation, tilt, mCbCookie);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void SensorListener::enableSensor(sensor_type_t type) {
+ Sensor const* sensor;
+ SensorManager& mgr(SensorManager::getInstance());
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(&mLock);
+
+ if ((type & SENSOR_ORIENTATION) && !(sensorsEnabled & SENSOR_ORIENTATION)) {
+ sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER);
+ CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
+ mSensorEventQueue->enableSensor(sensor);
+ mSensorEventQueue->setEventRate(sensor, ms2ns(100));
+ sensorsEnabled |= SENSOR_ORIENTATION;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void SensorListener::disableSensor(sensor_type_t type) {
+ Sensor const* sensor;
+ SensorManager& mgr(SensorManager::getInstance());
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(&mLock);
+
+ if ((type & SENSOR_ORIENTATION) && (sensorsEnabled & SENSOR_ORIENTATION)) {
+ sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER);
+ CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
+ mSensorEventQueue->disableSensor(sensor);
+ sensorsEnabled &= ~SENSOR_ORIENTATION;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+} // namespace android
diff --git a/TICameraParameters.cpp b/TICameraParameters.cpp
new file mode 100644
index 0000000..221cff4
--- a/dev/null
+++ b/TICameraParameters.cpp
@@ -0,0 +1,202 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+
+#define LOG_TAG "CameraHAL"
+#include <utils/Log.h>
+
+#include <string.h>
+#include <stdlib.h>
+#include <TICameraParameters.h>
+#include "CameraHal.h"
+
+namespace android {
+
+//TI extensions to camera mode
+const char TICameraParameters::HIGH_PERFORMANCE_MODE[] = "high-performance";
+const char TICameraParameters::HIGH_QUALITY_MODE[] = "high-quality";
+const char TICameraParameters::HIGH_QUALITY_ZSL_MODE[] = "high-quality-zsl";
+const char TICameraParameters::VIDEO_MODE[] = "video-mode";
+
+// TI extensions to standard android Parameters
+const char TICameraParameters::KEY_SUPPORTED_CAMERAS[] = "camera-indexes";
+const char TICameraParameters::KEY_CAMERA[] = "camera-index";
+const char TICameraParameters::KEY_SHUTTER_ENABLE[] = "shutter-enable";
+const char TICameraParameters::KEY_CAMERA_NAME[] = "camera-name";
+const char TICameraParameters::KEY_BURST[] = "burst-capture";
+const char TICameraParameters::KEY_CAP_MODE[] = "mode";
+const char TICameraParameters::KEY_VNF[] = "vnf";
+const char TICameraParameters::KEY_SATURATION[] = "saturation";
+const char TICameraParameters::KEY_BRIGHTNESS[] = "brightness";
+const char TICameraParameters::KEY_EXPOSURE_MODE[] = "exposure";
+const char TICameraParameters::KEY_SUPPORTED_EXPOSURE[] = "exposure-mode-values";
+const char TICameraParameters::KEY_CONTRAST[] = "contrast";
+const char TICameraParameters::KEY_SHARPNESS[] = "sharpness";
+const char TICameraParameters::KEY_ISO[] = "iso";
+const char TICameraParameters::KEY_SUPPORTED_ISO_VALUES[] = "iso-mode-values";
+const char TICameraParameters::KEY_SUPPORTED_IPP[] = "ipp-values";
+const char TICameraParameters::KEY_IPP[] = "ipp";
+const char TICameraParameters::KEY_MAN_EXPOSURE[] = "manual-exposure";
+const char TICameraParameters::KEY_METERING_MODE[] = "meter-mode";
+const char TICameraParameters::KEY_PADDED_WIDTH[] = "padded-width";
+const char TICameraParameters::KEY_PADDED_HEIGHT[] = "padded-height";
+const char TICameraParameters::KEY_EXP_BRACKETING_RANGE[] = "exp-bracketing-range";
+const char TICameraParameters::KEY_TEMP_BRACKETING[] = "temporal-bracketing";
+const char TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS[] = "temporal-bracketing-range-positive";
+const char TICameraParameters::KEY_TEMP_BRACKETING_RANGE_NEG[] = "temporal-bracketing-range-negative";
+const char TICameraParameters::KEY_S3D_SUPPORTED[] = "s3d-supported";
+const char TICameraParameters::KEY_MEASUREMENT_ENABLE[] = "measurement";
+const char TICameraParameters::KEY_GBCE[] = "gbce";
+const char TICameraParameters::KEY_GLBCE[] = "glbce";
+const char TICameraParameters::KEY_CURRENT_ISO[] = "current-iso";
+const char TICameraParameters::KEY_SENSOR_ORIENTATION[] = "sensor-orientation";
+const char TICameraParameters::KEY_SENSOR_ORIENTATION_VALUES[] = "sensor-orientation-values";
+const char TICameraParameters::KEY_MINFRAMERATE[] = "min-framerate";
+const char TICameraParameters::KEY_MAXFRAMERATE[] = "max-framerate";
+const char TICameraParameters::KEY_RECORDING_HINT[] = "internal-recording-hint";
+const char TICameraParameters::KEY_AUTO_FOCUS_LOCK[] = "auto-focus-lock";
+
+//TI extensions for enabling/disabling GLBCE
+const char TICameraParameters::GLBCE_ENABLE[] = "enable";
+const char TICameraParameters::GLBCE_DISABLE[] = "disable";
+
+//TI extensions for enabling/disabling GBCE
+const char TICameraParameters::GBCE_ENABLE[] = "enable";
+const char TICameraParameters::GBCE_DISABLE[] = "disable";
+
+//TI extensions for enabling/disabling measurement
+const char TICameraParameters::MEASUREMENT_ENABLE[] = "enable";
+const char TICameraParameters::MEASUREMENT_DISABLE[] = "disable";
+
+//TI extensions for zoom
+const char TICameraParameters::ZOOM_SUPPORTED[] = "true";
+const char TICameraParameters::ZOOM_UNSUPPORTED[] = "false";
+
+// TI extensions for 2D Preview in Stereo Mode
+const char TICameraParameters::KEY_S3D2D_PREVIEW[] = "s3d2d-preview";
+const char TICameraParameters::KEY_S3D2D_PREVIEW_MODE[] = "s3d2d-preview-values";
+
+//TI extensions for SAC/SMC
+const char TICameraParameters::KEY_AUTOCONVERGENCE[] = "auto-convergence";
+const char TICameraParameters::KEY_AUTOCONVERGENCE_MODE[] = "auto-convergence-mode";
+const char TICameraParameters::KEY_MANUALCONVERGENCE_VALUES[] = "manual-convergence-values";
+
+//TI extensions for setting EXIF tags
+const char TICameraParameters::KEY_EXIF_MODEL[] = "exif-model";
+const char TICameraParameters::KEY_EXIF_MAKE[] = "exif-make";
+
+//TI extensions for additiona GPS data
+const char TICameraParameters::KEY_GPS_MAPDATUM[] = "gps-mapdatum";
+const char TICameraParameters::KEY_GPS_VERSION[] = "gps-version";
+const char TICameraParameters::KEY_GPS_DATESTAMP[] = "gps-datestamp";
+
+//TI extensions for enabling/disabling shutter sound
+const char TICameraParameters::SHUTTER_ENABLE[] = "true";
+const char TICameraParameters::SHUTTER_DISABLE[] = "false";
+
+//TI extensions for Temporal Bracketing
+const char TICameraParameters::BRACKET_ENABLE[] = "enable";
+const char TICameraParameters::BRACKET_DISABLE[] = "disable";
+
+//TI extensions to Image post-processing
+const char TICameraParameters::IPP_LDCNSF[] = "ldc-nsf";
+const char TICameraParameters::IPP_LDC[] = "ldc";
+const char TICameraParameters::IPP_NSF[] = "nsf";
+const char TICameraParameters::IPP_NONE[] = "off";
+
+// TI extensions to standard android pixel formats
+const char TICameraParameters::PIXEL_FORMAT_RAW[] = "raw";
+const char TICameraParameters::PIXEL_FORMAT_JPS[] = "jps";
+const char TICameraParameters::PIXEL_FORMAT_MPO[] = "mpo";
+const char TICameraParameters::PIXEL_FORMAT_RAW_JPEG[] = "raw+jpeg";
+const char TICameraParameters::PIXEL_FORMAT_RAW_MPO[] = "raw+mpo";
+
+// TI extensions to standard android scene mode settings
+const char TICameraParameters::SCENE_MODE_SPORT[] = "sport";
+const char TICameraParameters::SCENE_MODE_CLOSEUP[] = "closeup";
+const char TICameraParameters::SCENE_MODE_AQUA[] = "aqua";
+const char TICameraParameters::SCENE_MODE_SNOWBEACH[] = "snow-beach";
+const char TICameraParameters::SCENE_MODE_MOOD[] = "mood";
+const char TICameraParameters::SCENE_MODE_NIGHT_INDOOR[] = "night-indoor";
+const char TICameraParameters::SCENE_MODE_DOCUMENT[] = "document";
+const char TICameraParameters::SCENE_MODE_BARCODE[] = "barcode";
+const char TICameraParameters::SCENE_MODE_VIDEO_SUPER_NIGHT[] = "super-night";
+const char TICameraParameters::SCENE_MODE_VIDEO_CINE[] = "cine";
+const char TICameraParameters::SCENE_MODE_VIDEO_OLD_FILM[] = "old-film";
+
+// TI extensions to standard android white balance values.
+const char TICameraParameters::WHITE_BALANCE_TUNGSTEN[] = "tungsten";
+const char TICameraParameters::WHITE_BALANCE_HORIZON[] = "horizon";
+const char TICameraParameters::WHITE_BALANCE_SUNSET[] = "sunset";
+const char TICameraParameters::WHITE_BALANCE_FACE[] = "face-priority";
+
+// TI extensions to standard android focus modes.
+const char TICameraParameters::FOCUS_MODE_PORTRAIT[] = "portrait";
+const char TICameraParameters::FOCUS_MODE_EXTENDED[] = "extended";
+const char TICameraParameters::FOCUS_MODE_FACE[] = "face-priority";
+
+// TI extensions to add values for effect settings.
+const char TICameraParameters::EFFECT_NATURAL[] = "natural";
+const char TICameraParameters::EFFECT_VIVID[] = "vivid";
+const char TICameraParameters::EFFECT_COLOR_SWAP[] = "color-swap";
+const char TICameraParameters::EFFECT_BLACKWHITE[] = "blackwhite";
+
+// TI extensions to add exposure preset modes
+const char TICameraParameters::EXPOSURE_MODE_OFF[] = "off";
+const char TICameraParameters::EXPOSURE_MODE_AUTO[] = "auto";
+const char TICameraParameters::EXPOSURE_MODE_NIGHT[] = "night";
+const char TICameraParameters::EXPOSURE_MODE_BACKLIGHT[] = "backlighting";
+const char TICameraParameters::EXPOSURE_MODE_SPOTLIGHT[] = "spotlight";
+const char TICameraParameters::EXPOSURE_MODE_SPORTS[] = "sports";
+const char TICameraParameters::EXPOSURE_MODE_SNOW[] = "snow";
+const char TICameraParameters::EXPOSURE_MODE_BEACH[] = "beach";
+const char TICameraParameters::EXPOSURE_MODE_APERTURE[] = "aperture";
+const char TICameraParameters::EXPOSURE_MODE_SMALL_APERTURE[] = "small-aperture";
+const char TICameraParameters::EXPOSURE_MODE_FACE[] = "face-priority";
+
+// TI extensions to add iso values
+const char TICameraParameters::ISO_MODE_AUTO[] = "auto";
+const char TICameraParameters::ISO_MODE_100[] = "100";
+const char TICameraParameters::ISO_MODE_200[] = "200";
+const char TICameraParameters::ISO_MODE_400[] = "400";
+const char TICameraParameters::ISO_MODE_800[] = "800";
+const char TICameraParameters::ISO_MODE_1000[] = "1000";
+const char TICameraParameters::ISO_MODE_1200[] = "1200";
+const char TICameraParameters::ISO_MODE_1600[] = "1600";
+
+// TI extensions to add auto convergence values
+const char TICameraParameters::AUTOCONVERGENCE_MODE_DISABLE[] = "mode-disable";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_FRAME[] = "mode-frame";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_CENTER[] = "mode-center";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_FFT[] = "mode-fft";
+const char TICameraParameters::AUTOCONVERGENCE_MODE_MANUAL[] = "mode-manual";
+
+//TI values for camera direction
+const char TICameraParameters::FACING_FRONT[]="front";
+const char TICameraParameters::FACING_BACK[]="back";
+
+//TI extensions to flash settings
+const char TICameraParameters::FLASH_MODE_FILL_IN[] = "fill-in";
+
+//TI extensions to add sensor orientation parameters
+const char TICameraParameters::ORIENTATION_SENSOR_NONE[] = "0";
+const char TICameraParameters::ORIENTATION_SENSOR_90[] = "90";
+const char TICameraParameters::ORIENTATION_SENSOR_180[] = "180";
+const char TICameraParameters::ORIENTATION_SENSOR_270[] = "270";
+};
+
diff --git a/V4L2/CameraSetting.cpp b/V4L2/CameraSetting.cpp
deleted file mode 100755
index cd180be..0000000
--- a/V4L2/CameraSetting.cpp
+++ b/dev/null
@@ -1,226 +0,0 @@
-
-#include <camera/CameraHardwareInterface.h>
-#include "CameraSetting.h"
-#include "amlogic_camera_para.h"
-
-extern "C" {
-int set_white_balance(int camera_fd,const char *swb);
-int SetExposure(int camera_fd,const char *sbn);
-int set_effect(int camera_fd,const char *sef);
-int set_banding(int camera_fd,const char *snm);
-#ifdef AMLOGIC_FLASHLIGHT_SUPPORT
-int set_flash_mode(const char *sfm);
-#endif
-}
-namespace android {
-status_t CameraSetting::InitParameters(CameraParameters& pParameters,String8 PrevFrameSize,String8 PicFrameSize)
-{
- LOGE("use default InitParameters");
- //set the limited & the default parameter
-//==========================must set parameter for CTS will check them
- pParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS,CameraParameters::PIXEL_FORMAT_YUV420SP);
- pParameters.setPreviewFormat(CameraParameters::PIXEL_FORMAT_YUV420SP);
-
- pParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, CameraParameters::PIXEL_FORMAT_JPEG);
- pParameters.setPictureFormat(CameraParameters::PIXEL_FORMAT_JPEG);
-
- pParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,"15,20");
- pParameters.setPreviewFrameRate(15);
-
- //must have >2 sizes and contain "0x0"
- pParameters.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, "180x160,0x0");
- pParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, 180);
- pParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, 160);
-
- pParameters.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES,CameraParameters::FOCUS_MODE_FIXED);
- pParameters.set(CameraParameters::KEY_FOCUS_MODE,CameraParameters::FOCUS_MODE_FIXED);
-
- pParameters.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING,"50hz,60hz");
- pParameters.set(CameraParameters::KEY_ANTIBANDING,"50hz");
-
- pParameters.set(CameraParameters::KEY_FOCAL_LENGTH,"4.31");
-
- pParameters.set(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE,"54.8");
- pParameters.set(CameraParameters::KEY_VERTICAL_VIEW_ANGLE,"42.5");
-
-//==========================
-
- pParameters.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE,"auto,daylight,incandescent,fluorescent");
- pParameters.set(CameraParameters::KEY_WHITE_BALANCE,"auto");
-
- pParameters.set(CameraParameters::KEY_SUPPORTED_EFFECTS,"none,negative,sepia");
- pParameters.set(CameraParameters::KEY_EFFECT,"none");
-#ifdef AMLOGIC_FLASHLIGHT_SUPPORT
- pParameters.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES,"on,off,torch");
- pParameters.set(CameraParameters::KEY_FLASH_MODE,"on");
-#endif
- //pParameters.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES,"auto,night,snow");
- //pParameters.set(CameraParameters::KEY_SCENE_MODE,"auto");
-
- pParameters.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION,4);
- pParameters.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION,-4);
- pParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP,1);
- pParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION,0);
-
-#if 1
- pParameters.set(CameraParameters::KEY_ZOOM_SUPPORTED,CameraParameters::TRUE);
- pParameters.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED,1);
-
- pParameters.set(CameraParameters::KEY_ZOOM_RATIOS,"100,120,140,160,180,200,220,280,300");
- pParameters.set(CameraParameters::KEY_MAX_ZOOM,8); //think the zoom ratios as a array, the max zoom is the max index
- pParameters.set(CameraParameters::KEY_ZOOM,0);//default should be 0
-#endif
- pParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,PrevFrameSize);
- if(PrevFrameSize.find("800x600") != -1)
- pParameters.setPreviewSize(800, 600);
- else
- pParameters.setPreviewSize(640, 480);
- pParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, PicFrameSize);
- if(PrevFrameSize.find("1600x1200") != -1)
- pParameters.setPictureSize(1600, 1200);
- else
- pParameters.setPictureSize(640, 480);
- pParameters.set(CameraParameters::KEY_JPEG_QUALITY,90);
- pParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY,90);
-
- pParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE,"10500,26623");
- //pParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,"(10500,26623),(12000,26623),(30000,30000)");
- pParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,"(10500,26623)");
- pParameters.set(CameraParameters::KEY_FOCUS_DISTANCES,"0.95,1.9,Infinity");
- return NO_ERROR;
-}
-
-
-//write parameter to v4l2 driver,
-//check parameter if valid, if un-valid first should correct it ,and return the INVALID_OPERTIONA
-status_t CameraSetting::SetParameters(CameraParameters& pParameters)
-{
- LOGE("use default SetParameters");
- status_t rtn = NO_ERROR;
- //check zoom value
- int zoom = pParameters.getInt(CameraParameters::KEY_ZOOM);
- int maxzoom = pParameters.getInt(CameraParameters::KEY_MAX_ZOOM);
- if((zoom > maxzoom) || (zoom < 0))
- {
- rtn = INVALID_OPERATION;
- pParameters.set(CameraParameters::KEY_ZOOM, maxzoom);
- }
-
- m_hParameter = pParameters;
- int min_fps,max_fps;
- const char *white_balance=NULL;
- const char *exposure=NULL;
- const char *effect=NULL;
- //const char *night_mode=NULL;
- const char *qulity=NULL;
- const char *banding=NULL;
- const char *flashmode=NULL;
-
- white_balance=pParameters.get(CameraParameters::KEY_WHITE_BALANCE);
- exposure=pParameters.get(CameraParameters::KEY_EXPOSURE_COMPENSATION);
- effect=pParameters.get(CameraParameters::KEY_EFFECT);
- banding=pParameters.get(CameraParameters::KEY_ANTIBANDING);
- qulity=pParameters.get(CameraParameters::KEY_JPEG_QUALITY);
- flashmode = pParameters.get(CameraParameters::KEY_FLASH_MODE);
- if(exposure)
- SetExposure(m_iDevFd,exposure);
- if(white_balance)
- set_white_balance(m_iDevFd,white_balance);
- if(effect)
- set_effect(m_iDevFd,effect);
- if(banding)
- set_banding(m_iDevFd,banding);
-#ifdef AMLOGIC_FLASHLIGHT_SUPPORT
- if(flashmode)
- set_flash_mode(flashmode);
-#endif
- pParameters.getPreviewFpsRange(&min_fps, &max_fps);
- if((min_fps<0)||(max_fps<0)||(max_fps<min_fps))
- {
- rtn = INVALID_OPERATION;
- }
- return rtn;
-}
-
-static char* sCameraMirrorMode[] = {
-#ifdef AMLOGIC_BACK_CAMERA_SUPPORT
- "Disble", // cam id 0
-#endif
-#ifdef AMLOGIC_FRONT_CAMERA_SUPPORT
- "Enable", // cam id 1
-#endif
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- "Disble",
-#endif
- NULL
-};
-
-
-const char* CameraSetting::GetInfo(int InfoId)
-{
- LOGE("use default GetInfo");
- switch(InfoId)
- {
- /*case CAMERA_EXIF_MAKE:
- return "Amlogic";
- case CAMERA_EXIF_MODEL:
- return "DEFUALT";*/
- case CAMERA_MIRROR_MODE:
- return (const char*)sCameraMirrorMode[m_iCamId];
- default:
- return NULL;
- }
-}
-
-/*int CameraSetting::GetDelay(int Processid)
-{
- if(CAMERA_PREVIEW == Processid)
- return 1000;
- else
- return 0;
-}*/
-
-CameraSetting* getCameraSetting()
-{
- LOGE("return default CameraSetting");
- return new CameraSetting();
-}
-
-static CameraInfo sCameraInfo[] = {
-#ifdef AMLOGIC_BACK_CAMERA_SUPPORT
- {
- CAMERA_FACING_BACK,
- 90, /* orientation */
- },
-#endif
-#ifdef AMLOGIC_FRONT_CAMERA_SUPPORT
-#ifdef AMLOGIC_BACK_CAMERA_SUPPORT
- {
- CAMERA_FACING_FRONT,
- 270, /* orientation */
- },
-#else //make a virtual back-faceing camera for cts while only front-faceing camera
- {
- CAMERA_FACING_BACK,
- 270, /* orientation */
- },
-#endif //AMLOGIC_BACK_CAMERA_SUPPORT
-#endif //AMLOGIC_FRONT_CAMERA_SUPPORT
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- {
- CAMERA_USB,
- 0, /* orientation */
- },
-#endif
-};
-
-extern "C" int HAL_getNumberOfCameras()
-{
- return sizeof(sCameraInfo) / sizeof(sCameraInfo[0]);
-}
-
-extern "C" void HAL_getCameraInfo(int cameraId, struct CameraInfo* cameraInfo)
-{
- memcpy(cameraInfo, &sCameraInfo[cameraId], sizeof(CameraInfo));
-}
-}
diff --git a/V4L2/CameraSetting.h b/V4L2/CameraSetting.h
deleted file mode 100755
index 418fada..0000000
--- a/V4L2/CameraSetting.h
+++ b/dev/null
@@ -1,43 +0,0 @@
-#ifndef AMLOGIC_CAMERA_CUSTOMIZE_SETTING
-#define AMLOGIC_CAMERA_CUSTOMIZE_SETTING
-
-
-#include <camera/CameraParameters.h>
-
-namespace android {
-
-//used as getinfo parameters
-//#define CAMERA_EXIF_MAKE (0)
-//#define CAMERA_EXIF_MODEL (1)
-#define CAMERA_MIRROR_MODE (2)
-
-//used as GetDelay parameters
-#define CAMERA_PREVIEW (0)
-
-
-class CameraSetting
-{
- public:
- CameraSetting(){m_pDevName = NULL;m_iDevFd = -1;m_iCamId = -1;}
- virtual ~CameraSetting(){if(m_pDevName) delete m_pDevName;}
- int m_iDevFd;
- int m_iCamId;
- char* m_pDevName;
- CameraParameters m_hParameter;
-
- virtual status_t InitParameters(CameraParameters& pParameters,String8 PrevFrameSize,String8 PicFrameSize);
- virtual status_t SetParameters(CameraParameters& pParameters);
- virtual const char* GetInfo(int InfoId);
-
- //return the time (in us) cost for process,this is used by preview thread to decide how many times to sleep between get frames
- //most time it is the cost time of function get previewframe.
- //virtual int GetDelay(int Processid);
-};
-
-CameraSetting* getCameraSetting();
-
-}
-
-
-
-#endif
diff --git a/V4L2/OpCameraHardware.c b/V4L2/OpCameraHardware.c
deleted file mode 100755
index d54fb22..0000000
--- a/V4L2/OpCameraHardware.c
+++ b/dev/null
@@ -1,223 +0,0 @@
-#define LOG_NDEBUG 0
-//#define NDEBUG 0
-#define LOG_TAG "OpCameraHardware"
-#include <utils/Log.h>
-#include "amlogic_camera_para.h"
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <sys/ioctl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <string.h>
-#include <errno.h>
-#include <cutils/properties.h>
-#include <linux/videodev2.h>
-#ifdef AMLOGIC_FLASHLIGHT_SUPPORT
-#include <media/amlogic/flashlight.h>
-#include <stdbool.h>
-int get_flash_mode(void);
-#endif
-
-int set_white_balance(int camera_fd,const char *swb)
-{
- int ret = 0;
- struct v4l2_control ctl;
- if(camera_fd<0)
- return -1;
-
- ctl.id = V4L2_CID_DO_WHITE_BALANCE;
-
- if(strcasecmp(swb,"auto")==0)
- ctl.value=CAM_WB_AUTO;
- else if(strcasecmp(swb,"daylight")==0)
- ctl.value=CAM_WB_DAYLIGHT;
- else if(strcasecmp(swb,"incandescent")==0)
- ctl.value=CAM_WB_INCANDESCENCE;
- else if(strcasecmp(swb,"fluorescent")==0)
- ctl.value=CAM_WB_FLUORESCENT;
-
- if(ioctl(camera_fd, VIDIOC_S_CTRL, &ctl)<0)
- {
- ret = -1;
- LOGV("AMLOGIC CAMERA SetParametersToDriver fail !! ");
- }
- return ret ;
-}
-
-int SetExposure(int camera_fd,const char *sbn)
-{
- int ret = 0;
- struct v4l2_control ctl;
- if(camera_fd<0)
- return -1;
-
- ctl.id = V4L2_CID_EXPOSURE;
-
- if(strcasecmp(sbn,"4")==0)
- ctl.value=EXPOSURE_P4_STEP;
- else if(strcasecmp(sbn,"3")==0)
- ctl.value=EXPOSURE_P3_STEP;
- else if(strcasecmp(sbn,"2")==0)
- ctl.value=EXPOSURE_P2_STEP;
- else if(strcasecmp(sbn,"1")==0)
- ctl.value=EXPOSURE_P1_STEP;
- else if(strcasecmp(sbn,"0")==0)
- ctl.value=EXPOSURE_0_STEP;
- else if(strcasecmp(sbn,"-1")==0)
- ctl.value=EXPOSURE_N1_STEP;
- else if(strcasecmp(sbn,"-2")==0)
- ctl.value=EXPOSURE_N2_STEP;
- else if(strcasecmp(sbn,"-3")==0)
- ctl.value=EXPOSURE_N3_STEP;
- else if(strcasecmp(sbn,"-4")==0)
- ctl.value=EXPOSURE_N4_STEP;
-
- if(ioctl(camera_fd, VIDIOC_S_CTRL, &ctl)<0)
- {
- ret = -1;
- LOGV("AMLOGIC CAMERA SetParametersToDriver fail !! ");
- }
-
- return ret ;
-}
-
-
-int set_effect(int camera_fd,const char *sef)
-{
- int ret = 0;
- struct v4l2_control ctl;
- if(camera_fd<0)
- return -1;
-
- ctl.id = V4L2_CID_COLORFX;
-
- if(strcasecmp(sef,"none")==0)
- ctl.value=CAM_EFFECT_ENC_NORMAL;
- else if(strcasecmp(sef,"negative")==0)
- ctl.value=CAM_EFFECT_ENC_COLORINV;
- else if(strcasecmp(sef,"sepia")==0)
- ctl.value=CAM_EFFECT_ENC_SEPIA;
-
- if(ioctl(camera_fd, VIDIOC_S_CTRL, &ctl)<0)
- {
- ret = -1;
- LOGV("AMLOGIC CAMERA SetParametersToDriver fail !! ");
- }
-
- return ret ;
-}
-
-int set_night_mode(int camera_fd,const char *snm)
-{
- int ret = 0;
- struct v4l2_control ctl;
- if(camera_fd<0)
- return -1;
-
- if(strcasecmp(snm,"auto")==0)
- ctl.value=CAM_NM_AUTO;
- else if(strcasecmp(snm,"night")==0)
- ctl.value=CAM_NM_ENABLE;
-
- ctl.id = V4L2_CID_DO_WHITE_BALANCE;
-
- if(ioctl(camera_fd, VIDIOC_S_CTRL, &ctl)<0)
- {
- ret = -1;
- LOGV("AMLOGIC CAMERA SetParametersToDriver fail !! ");
- }
- return ret ;
-}
-
-int set_banding(int camera_fd,const char *snm)
-{
- int ret = 0;
- struct v4l2_control ctl;
- if(camera_fd<0)
- return -1;
-
- if(strcasecmp(snm,"50hz")==0)
- ctl.value=CAM_NM_AUTO;
- else if(strcasecmp(snm,"60hz")==0)
- ctl.value=CAM_NM_ENABLE;
-
- ctl.id = V4L2_CID_WHITENESS;
-
- if(ioctl(camera_fd, VIDIOC_S_CTRL, &ctl)<0)
- {
- ret = -1;
- LOGV("AMLOGIC CAMERA SetParametersToDriver fail !! ");
- }
- return ret ;
-}
-
-#ifdef AMLOGIC_FLASHLIGHT_SUPPORT
-int set_flash(bool mode)
-{
- int flag = 0;
- FILE* fp = NULL;
- if(mode){
- flag = get_flash_mode();
- if(flag == FLASHLIGHT_OFF ||flag == FLASHLIGHT_AUTO)//handle AUTO case on camera driver
- return 0;
- else if(flag == -1)
- return -1;
- }
- fp = fopen("/sys/class/flashlight/flashlightctrl","w");
- if(fp == NULL){
- LOGE("open file fail\n");
- return -1;
- }
- fputc((int)(mode+'0'),fp);
- fclose(fp);
- return 0;
-}
-
-int set_flash_mode(const char *sfm)
-{
- int value = 0;
- FILE* fp = NULL;
- if(strcasecmp(sfm,"auto")==0)
- value=FLASHLIGHT_AUTO;
- else if(strcasecmp(sfm,"on")==0)
- value=FLASHLIGHT_ON;
- else if(strcasecmp(sfm,"off")==0)
- value=FLASHLIGHT_OFF;
- else if(strcasecmp(sfm,"off")==0)
- value=FLASHLIGHT_TORCH;
- else
- value=FLASHLIGHT_OFF;
-
- fp = fopen("/sys/class/flashlight/flashlightflag","w");
- if(fp == NULL){
- LOGE("open file fail\n");
- return -1;
- }
- fputc((int)(value+'0'),fp);
- fclose(fp);
- if(value == FLASHLIGHT_TORCH)//open flashlight immediately
- set_flash(true);
- else if(value == FLASHLIGHT_OFF)
- set_flash(false);
- return 0 ;
-}
-
-int get_flash_mode(void)
-{
- int value = 0;
- FILE* fp = NULL;
-
- fp = fopen("/sys/class/flashlight/flashlightflag","r");
- if(fp == NULL){
- LOGE("open file fail\n");
- return -1;
- }
- value=fgetc(fp);
- fclose(fp);
- return value-'0';
-}
-#endif
-
diff --git a/V4L2/V4L2Camera.cpp b/V4L2/V4L2Camera.cpp
deleted file mode 100755
index a7a5cf7..0000000
--- a/V4L2/V4L2Camera.cpp
+++ b/dev/null
@@ -1,776 +0,0 @@
-//#define NDEBUG 0
-#define LOG_TAG "V4L2Camera"
-#define LOG_NDEBUG 0
-
-#include <utils/Log.h>
-#include "V4L2Camera.h"
-#include <linux/videodev2.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <sys/ioctl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <string.h>
-#include <errno.h>
-
-#include <jpegenc/amljpeg_enc.h>
-#include <cutils/properties.h>
-#include <sys/time.h>
-#include <unistd.h>
-
-extern "C" unsigned char* getExifBuf(char** attrlist, int attrCount, int* exifLen,int thumbnailLen,char* thumbnaildata);
-#include "../util.h"
-namespace android {
-
-#define V4L2_PREVIEW_BUFF_NUM (6)
-#define V4L2_TAKEPIC_BUFF_NUM (1)
-
-
-V4L2Camera::V4L2Camera(char* devname,int camid)
-{
- m_pSetting = getCameraSetting();
-
- int namelen = strlen(devname)+1;
- m_pSetting->m_pDevName = new char[namelen];
- strcpy(m_pSetting->m_pDevName,devname);
- m_pSetting->m_iCamId = camid;
-
- m_V4L2BufNum = 0;
- pV4L2Frames = NULL;
- pV4L2FrameSize = NULL;
- m_iPicIdx = -1;
- HAL_getCameraInfo(camid,&m_curCameraInfo);
-}
-
-V4L2Camera::~V4L2Camera()
-{
- delete m_pSetting;
-}
-
-#define SYSFILE_CAMERA_SET_PARA "/sys/class/vm/attr2"
-#define SYSFILE_CAMERA_SET_MIRROR "/sys/class/vm/mirror"
-static int writefile(char* path,char* content)
-{
-#ifndef AMLOGIC_USB_CAMERA_SUPPORT
- FILE* fp = fopen(path, "w+");
-
- LOGD("Write file %s(%p) content %s", path, fp, content);
-
- if (fp) {
- while( ((*content) != '\0') ) {
- if (EOF == fputc(*content,fp))
- LOGD("write char fail");
- content++;
- }
-
- fclose(fp);
- }
- else
- LOGD("open file fail\n");
-#endif
- return 1;
-}
-
-status_t V4L2Camera::Open()
-{
- struct v4l2_control ctl;
- int temp_id=-1;
- if((m_pSetting->m_iCamId==1)&&(m_pSetting->m_iDevFd == -1)){
- LOGD("*****open %s\n", "video0+++");
- temp_id = open("/dev/video0", O_RDWR);
- if (temp_id >=0){
- LOGD("*****open %s success %d \n", "video0+++",temp_id);
- writefile(SYSFILE_CAMERA_SET_PARA,"1");
- close(temp_id);
- usleep(100);
- }
- }
-
- if(m_pSetting->m_iDevFd == -1){
- m_pSetting->m_iDevFd = open(m_pSetting->m_pDevName, O_RDWR);
- if (m_pSetting->m_iDevFd != -1){
- LOGD("open %s success %d \n", m_pSetting->m_pDevName,m_pSetting->m_iDevFd);
-
-# ifndef AMLOGIC_USB_CAMERA_SUPPORT
- const char* mirror = m_pSetting->GetInfo(CAMERA_MIRROR_MODE);
- if((mirror)&&((strcasecmp(mirror, "1")==0)||(strcasecmp(mirror, "enable")==0)||(strcasecmp(mirror, "true")==0))){
- LOGD("*****Enable camera %d L-R mirror",m_pSetting->m_iCamId);
- writefile(SYSFILE_CAMERA_SET_MIRROR,"1");
- }else{
- LOGD("*****Enable camera %d normal mode",m_pSetting->m_iCamId);
- writefile(SYSFILE_CAMERA_SET_MIRROR,"0");
- }
-# endif
-
- return NO_ERROR;
- }else{
- LOGD("open %s fail\n",m_pSetting->m_pDevName);
- return UNKNOWN_ERROR;
- }
- }
- return NO_ERROR;
-}
-status_t V4L2Camera::Close()
-{
- if(m_pSetting->m_iDevFd != -1)
- {
- close(m_pSetting->m_iDevFd);
- m_pSetting->m_iDevFd = -1;
- }
- return NO_ERROR;
-}
-
-status_t V4L2Camera::InitParameters(CameraParameters& pParameters)
-{
- String8 prevFrameSize(""),picFrameSize("");
- int ret = NO_ERROR,prevPixelFmt,picPixelFmt;
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- prevPixelFmt = V4L2_PIX_FMT_YUYV;
- picPixelFmt = V4L2_PIX_FMT_YUYV;
-#else
- prevPixelFmt = V4L2_PIX_FMT_NV21;
- picPixelFmt = V4L2_PIX_FMT_RGB24;
-#endif
- ret = V4L2_GetValidFrameSize(prevPixelFmt,prevFrameSize);
- if(ret != NO_ERROR)
- return ret;
- ret = V4L2_GetValidFrameSize(picPixelFmt,picFrameSize);
- if(ret != NO_ERROR)
- return ret;
- ret = m_pSetting->InitParameters(pParameters,prevFrameSize,picFrameSize);
- return ret;
-}
-
-//write parameter to v4l2 driver,
-//check parameter if valid, if un-valid first should correct it ,and return the INVALID_OPERTIONA
-status_t V4L2Camera::SetParameters(CameraParameters& pParameters)
-{
- return m_pSetting->SetParameters(pParameters);
-}
-
-status_t V4L2Camera::StartPreview()
-{
- int w,h,pixelFormat;
- m_bFirstFrame = true;
- if(m_pSetting != NULL )
- m_pSetting->m_hParameter.getPreviewSize(&w,&h);
- else
- LOGE("%s,%d m_pSetting=NULL",__FILE__, __LINE__);
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- if(NO_ERROR != Open())
- return UNKNOWN_ERROR;
- pixelFormat = V4L2_PIX_FMT_YUYV;
-#else
- pixelFormat = V4L2_PIX_FMT_NV21;
-#endif
- if( (NO_ERROR == V4L2_BufferInit(w,h,V4L2_PREVIEW_BUFF_NUM,pixelFormat))
- && (V4L2_StreamOn() == NO_ERROR))
- return NO_ERROR;
- else
- return UNKNOWN_ERROR;
-}
-status_t V4L2Camera::StopPreview()
-{
- V4L2_StreamOff();
- V4L2_BufferUnInit();
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- Close();
-#endif
- return NO_ERROR;
-
-}
-
-status_t V4L2Camera::TakePicture()
-{
- int w,h,pixelFormat;
- m_pSetting->m_hParameter.getPictureSize(&w,&h);
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- if(NO_ERROR != Open())
- return UNKNOWN_ERROR;
- pixelFormat = V4L2_PIX_FMT_YUYV;
-#else
- pixelFormat = V4L2_PIX_FMT_RGB24;
-#endif
- if(NO_ERROR != V4L2_BufferInit(w,h,V4L2_TAKEPIC_BUFF_NUM,pixelFormat)) {
- LOGD(" open devices error in w:%d,h:%d,fmt:%x\n",w,h,pixelFormat);
- return UNKNOWN_ERROR;
- }
- V4L2_BufferEnQue(0);
- V4L2_StreamOn();
- m_iPicIdx = V4L2_BufferDeQue();
- V4L2_StreamOff();
- return NO_ERROR;
-}
-
-status_t V4L2Camera::TakePictureEnd()
-{
- int ret;
- m_iPicIdx = -1;
- ret=V4L2_BufferUnInit();
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- ret=Close();
-#endif
- return ret;
-}
-
-status_t V4L2Camera::GetPreviewFrame(uint8_t* framebuf)
-{
- int i=0;
- if(m_bFirstFrame)
- {
- for(i=0;i<V4L2_PREVIEW_BUFF_NUM;i++)
- V4L2_BufferEnQue(i);
- m_bFirstFrame = false;
- return NO_INIT;
- }
- else
- {
- int idx = V4L2_BufferDeQue();
- if (idx >= 0) {
- memcpy((char*)framebuf,pV4L2Frames[idx],pV4L2FrameSize[idx]);
- V4L2_BufferEnQue(idx);
- return NO_ERROR;
- }
- else {
- LOGE("V4L2Camera::GetPreviewFrame(%p) idx=%d\n", framebuf, idx);
- return NO_MEMORY;
- }
- }
-}
-
-status_t V4L2Camera::GetRawFrame(uint8_t* framebuf)
-{
- if(m_iPicIdx!=-1)
- {
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- int w,h;
- m_pSetting->m_hParameter.getPictureSize(&w,&h);
- yuyv422_to_rgb16((unsigned char*)pV4L2Frames[m_iPicIdx],framebuf,pV4L2FrameSize[m_iPicIdx]);
-#else
- memcpy(framebuf,pV4L2Frames[m_iPicIdx],pV4L2FrameSize[m_iPicIdx]);
-#endif
- }
- else
- LOGD("GetRawFraem index -1");
- return NO_ERROR;
-}
-
-
-inline int CalIntLen(int content)
-{
- int len = 1;
- while( (content = content/10) > 0 ) len++;
- return len;
-}
-
-int extraSmallImg(unsigned char* SrcImg,int SrcW,int SrcH,unsigned char* DstImg,int DstW,int DstH)
-{
- int skipW = SrcW/DstW;
- int skipH = SrcH/DstH;
-
- //LOGD("skipw = %d, skipH=%d",skipW,skipH);
-
- unsigned char* dst = DstImg;
-
- unsigned char* srcrow = SrcImg;
- unsigned char* srcrowidx = srcrow;
-
- int i = 0,j = 0;
- for(;i<DstH;i++)
- {
- //LOGD("srcrow = %d,dst = %d",srcrow-SrcImg,dst-DstImg);
- for(j = 0;j<DstW;j++)
- {
- dst[0] = srcrowidx[0];
- dst[1] = srcrowidx[1];
- dst[2] = srcrowidx[2];
- dst+=3;
- srcrowidx+=3*skipW;
- }
- // LOGD("srcrowidx end = %d",srcrowidx-SrcImg);
-
- srcrow += skipH*SrcW*3;
- srcrowidx = srcrow;
- }
-
- return 1;
-}
-
-int V4L2Camera::GenExif(unsigned char** pExif,int* exifLen,uint8_t* framebuf)
-{
- #define MAX_EXIF_COUNT (20)
- char* exiflist[MAX_EXIF_COUNT]={0},CameraMake[20],CameraModel[20];
- int i = 0,curend = 0;
-
- //Make
- exiflist[i] = new char[64];
- property_get("ro.camera.exif.make",CameraMake,"Amlogic");
- sprintf(exiflist[i],"Make=%d %s",strlen(CameraMake),CameraMake);
- i++;
-
- //Model
- exiflist[i] = new char[64];
- property_get("ro.camera.exif.model",CameraModel,"DEFUALT");
- sprintf(exiflist[i],"Model=%d %s",strlen(CameraModel),CameraModel);
- i++;
-
- //Image width,height
- int width,height;
- m_pSetting->m_hParameter.getPictureSize(&width,&height);
-
- exiflist[i] = new char[64];
- sprintf(exiflist[i],"ImageWidth=%d %d",CalIntLen(width),width);
- i++;
-
- exiflist[i] = new char[64];
- sprintf(exiflist[i],"ImageLength=%d %d",CalIntLen(height),height);
- i++;
-
- //Image orientation
- int orientation = m_pSetting->m_hParameter.getInt(CameraParameters::KEY_ROTATION);
- //covert 0 90 180 270 to 0 1 2 3
- LOGE("get orientaion %d",orientation);
-#ifdef AMLOGIC_FRONT_CAMERA_SUPPORT
-#ifndef AMLOGIC_BACK_CAMERA_SUPPORT //only front faceing
- if(m_curCameraInfo.facing == CAMERA_FACING_BACK){
- orientation = (m_curCameraInfo.orientation*2-orientation+360)%360;
- }
-#endif
-#endif
- if(orientation == 0)
- orientation = 1;
- else if(orientation == 90)
- orientation = 6;
- else if(orientation == 180)
- orientation = 3;
- else if(orientation == 270)
- orientation = 8;
- exiflist[i] = new char[64];
- sprintf(exiflist[i],"Orientation=%d %d",CalIntLen(orientation),orientation);
- i++;
-
- //focal length RATIONAL
- float focallen = m_pSetting->m_hParameter.getFloat(CameraParameters::KEY_FOCAL_LENGTH);
- int focalNum = focallen*1000;
- int focalDen = 1000;
- exiflist[i] = new char[64];
- sprintf(exiflist[i],"FocalLength=%d %d/%d",CalIntLen(focalNum)+CalIntLen(focalDen)+1,focalNum,focalDen);
- i++;
-
- //add gps information
- //latitude info
- char* latitudestr = (char*)m_pSetting->m_hParameter.get(CameraParameters::KEY_GPS_LATITUDE);
- if(latitudestr!=NULL)
- {
- int offset = 0;
- float latitude = m_pSetting->m_hParameter.getFloat(CameraParameters::KEY_GPS_LATITUDE);
- if(latitude < 0.0)
- {
- offset = 1;
- latitude*= (float)(-1);
- }
-
- int latitudedegree = latitude;
- float latitudeminuts = (latitude-(float)latitudedegree)*60;
- int latitudeminuts_int = latitudeminuts;
- float latituseconds = (latitudeminuts-(float)latitudeminuts_int)*60;
- int latituseconds_int = latituseconds;
- exiflist[i] = new char[256];
- sprintf(exiflist[i],"GPSLatitude=%d %d/%d,%d/%d,%d/%d",CalIntLen(latitudedegree)+CalIntLen(latitudeminuts_int)+CalIntLen(latituseconds_int)+8,latitudedegree,1,latitudeminuts_int,1,latituseconds_int,1);
- i++;
-
- exiflist[i] = new char[64];
- if(offset == 1)
- sprintf(exiflist[i],"GPSLatitudeRef=1 S");
- else
- sprintf(exiflist[i],"GPSLatitudeRef=1 N ");
- i++;
- }
-
- //Longitude info
- char* longitudestr = (char*)m_pSetting->m_hParameter.get(CameraParameters::KEY_GPS_LONGITUDE);
- if(longitudestr!=NULL)
- {
- int offset = 0;
- float longitude = m_pSetting->m_hParameter.getFloat(CameraParameters::KEY_GPS_LONGITUDE);
- if(longitude < 0.0)
- {
- offset = 1;
- longitude*= (float)(-1);
- }
-
- int longitudedegree = longitude;
- float longitudeminuts = (longitude-(float)longitudedegree)*60;
- int longitudeminuts_int = longitudeminuts;
- float longitudeseconds = (longitudeminuts-(float)longitudeminuts_int)*60;
- int longitudeseconds_int = longitudeseconds;
- exiflist[i] = new char[256];
- sprintf(exiflist[i],"GPSLongitude=%d %d/%d,%d/%d,%d/%d",CalIntLen(longitudedegree)+CalIntLen(longitudeminuts_int)+CalIntLen(longitudeseconds_int)+8,longitudedegree,1,longitudeminuts_int,1,longitudeseconds_int,1);
- i++;
-
- exiflist[i] = new char[64];
- if(offset == 1)
- sprintf(exiflist[i],"GPSLongitudeRef=1 W");
- else
- sprintf(exiflist[i],"GPSLongitudeRef=1 E");
- i++;
- }
-
- //Altitude info
- char* altitudestr = (char*)m_pSetting->m_hParameter.get(CameraParameters::KEY_GPS_ALTITUDE);
- if(altitudestr!=NULL)
- {
- int offset = 0;
- float altitude = m_pSetting->m_hParameter.getFloat(CameraParameters::KEY_GPS_ALTITUDE);
- if(altitude < 0.0)
- {
- offset = 1;
- altitude*= (float)(-1);
- }
-
- int altitudenum = altitude*1000;
- int altitudedec= 1000;
- exiflist[i] = new char[256];
- sprintf(exiflist[i],"GPSAltitude=%d %d/%d",CalIntLen(altitudenum)+CalIntLen(altitudedec)+1,altitudenum,altitudedec);
- i++;
-
- exiflist[i] = new char[64];
- sprintf(exiflist[i],"GPSAltitudeRef=1 %d",offset);
- i++;
- }
-
- //date stamp & time stamp
- time_t times = m_pSetting->m_hParameter.getInt(CameraParameters::KEY_GPS_TIMESTAMP);
- if(times != -1)
- {
- struct tm tmstruct;
- tmstruct = *(gmtime(&times));//convert to standard time
-
- //date
- exiflist[i] = new char[128];
- char timestr[30];
- strftime(timestr, 20, "%Y:%m:%d", &tmstruct);
- sprintf(exiflist[i],"GPSDateStamp=%d %s",strlen(timestr),timestr);
- i++;
-
- //time
- exiflist[i] = new char[128];
- sprintf(exiflist[i],"GPSTimeStamp=%d %d/%d,%d/%d,%d/%d",CalIntLen(tmstruct.tm_hour)+CalIntLen(tmstruct.tm_min)+CalIntLen(tmstruct.tm_sec)+8,tmstruct.tm_hour,1,tmstruct.tm_min,1,tmstruct.tm_sec,1);
- i++;
- }
-
- //datetime of photo
- {
- time_t curtime = 0;
- time(&curtime);
- struct tm tmstruct;
- tmstruct = *(localtime(&times)); //convert to local time
-
- //date&time
- exiflist[i] = new char[64];
- char timestr[30];
- strftime(timestr, 30, "%Y:%m:%d %H:%M:%S", &tmstruct);
- sprintf(exiflist[i],"DateTime=%d %s",strlen(timestr),timestr);
- i++;
- }
-
- //processing method
- char* processmethod = (char*)m_pSetting->m_hParameter.get(CameraParameters::KEY_GPS_PROCESSING_METHOD);
- if(processmethod!=NULL)
- {
- char ExifAsciiPrefix[] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };//asicii
-
- exiflist[i] = new char[128];
- int len = sizeof(ExifAsciiPrefix)+strlen(processmethod);
- sprintf(exiflist[i],"GPSProcessingMethod=%d ",len);
- int curend = strlen(exiflist[i]);
- memcpy(exiflist[i]+curend,ExifAsciiPrefix,8);
- memcpy(exiflist[i]+curend+8,processmethod,strlen(processmethod));
- i++;
- }
-
- //print exif
- int j = 0;
- for(;j<MAX_EXIF_COUNT;j++)
- {
- if(exiflist[j]!=NULL)
- LOGE("EXIF %s",exiflist[j]);
- }
-
- //thumbnail
- int thumbnailsize = 0;
- char* thumbnaildata = NULL;
- int thumbnailwidth = m_pSetting->m_hParameter.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
- int thumbnailheight = m_pSetting->m_hParameter.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
- if(thumbnailwidth > 0 )
- {
- // LOGE("creat thumbnail data");
- //create thumbnail data
- unsigned char* rgbdata = (unsigned char*)new char[thumbnailwidth*thumbnailheight*3];
- extraSmallImg(framebuf,width,height,rgbdata,thumbnailwidth,thumbnailheight);
-
- //code the thumbnail to jpeg
- thumbnaildata = new char[thumbnailwidth*thumbnailheight*3];
- jpeg_enc_t enc;
- enc.width = thumbnailwidth;
- enc.height = thumbnailheight;
- enc.quality = 90;
- enc.idata = (unsigned char*)rgbdata;
- enc.odata = (unsigned char*)thumbnaildata;
- enc.ibuff_size = thumbnailwidth*thumbnailheight*3;
- enc.obuff_size = thumbnailwidth*thumbnailheight*3;
- enc.data_in_app1 = 0;
- enc.app1_data_size = 0;
- thumbnailsize = encode_jpeg2(&enc);
-
- delete rgbdata;
- // LOGD("after add thumbnail %d,%d len %d",thumbnailwidth,thumbnailheight,thumbnailsize);
- }
-
- *pExif = getExifBuf(exiflist,i,exifLen,thumbnailsize,thumbnaildata);
-
- //release exif
- for(i=0;i<MAX_EXIF_COUNT;i++)
- {
- if(exiflist[i]!=NULL)
- delete exiflist[i];
- exiflist[i] = NULL;
- }
- //release thumbnaildata
- if(thumbnaildata)
- delete thumbnaildata;
-
- return 1;
-}
-
-status_t V4L2Camera::GetJpegFrame(uint8_t* framebuf,int* jpegsize)
-{
- *jpegsize = 0;
- if(m_iPicIdx!=-1)
- {
- unsigned char* exifcontent = NULL;
- jpeg_enc_t enc;
- m_pSetting->m_hParameter.getPictureSize(&enc.width,&enc.height);
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- sp<MemoryHeapBase> rgbheap = new MemoryHeapBase( enc.width * 3 * enc.height);
- yuyv422_to_rgb24((unsigned char*)pV4L2Frames[m_iPicIdx],(unsigned char *)rgbheap->getBase(),enc.width,enc.height);
-#endif
- enc.quality= m_pSetting->m_hParameter.getInt(CameraParameters::KEY_JPEG_QUALITY);
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- enc.idata = (unsigned char *)rgbheap->getBase();
-#else
- enc.idata = (unsigned char*)pV4L2Frames[m_iPicIdx];
-#endif
- enc.odata = (unsigned char*)framebuf;
- enc.ibuff_size = pV4L2FrameSize[m_iPicIdx];
- enc.obuff_size = pV4L2FrameSize[m_iPicIdx];
-#ifdef AMLOGIC_USB_CAMERA_SUPPORT
- GenExif(&(exifcontent),&(enc.app1_data_size),(unsigned char*)rgbheap->getBase());
-#else
- GenExif(&(exifcontent),&(enc.app1_data_size),(unsigned char*)pV4L2Frames[m_iPicIdx]);
-#endif
- enc.data_in_app1=exifcontent+2;
- *jpegsize = encode_jpeg2(&enc);
- if(exifcontent!=0)
- free(exifcontent);
- }
- else
- {
- LOGE("GetRawFraem index -1");
- return UNKNOWN_ERROR;
- }
- return NO_ERROR;
-}
-
-
-//=======================================================================================
-//functions for set V4L2
-
-status_t V4L2Camera::V4L2_GetValidFrameSize(unsigned int pixel_format,String8 &framesize)
-{
- struct v4l2_frmsizeenum frmsize;
- int i=0;
- char tempwidth[5],tempheight[5];
- memset(&frmsize,0,sizeof(v4l2_frmsizeenum));
- for(i=0;;i++){
- frmsize.index = i;
- frmsize.pixel_format = pixel_format;
- if(ioctl(m_pSetting->m_iDevFd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0){
- if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
- sprintf(tempwidth,"%d",frmsize.discrete.width);
- sprintf(tempheight,"%d",frmsize.discrete.height);
- framesize.append(tempwidth);
- framesize.append("x");
- framesize.append(tempheight);
- framesize.append(",");
- }
- else
- break;
- }
- else
- break;
- }
- if(framesize.length() == 0)
- return UNKNOWN_ERROR;
- else
- return NO_ERROR;
-}
-
-status_t V4L2Camera::V4L2_BufferInit(int Buf_W,int Buf_H,int Buf_Num,int colorfmt)
-{
- struct v4l2_format hformat;
- memset(&hformat,0,sizeof(v4l2_format));
- hformat.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- hformat.fmt.pix.width = Buf_W;
- hformat.fmt.pix.height = Buf_H;
- hformat.fmt.pix.pixelformat = colorfmt;
- if (ioctl(m_pSetting->m_iDevFd, VIDIOC_S_FMT, &hformat) == -1)
- {
- LOGE("V4L2_BufferInit VIDIOC_S_FMT fail m_pSetting->m_iDevFd=%d width=%d height=%d pixelformat=(%c%c%c%c),errno:%d",m_pSetting->m_iDevFd,hformat.fmt.pix.width,hformat.fmt.pix.height,colorfmt&0xff, (colorfmt>>8)&0xff, (colorfmt>>16)&0xff, (colorfmt>>24)&0xff,errno);
- return UNKNOWN_ERROR;
- }
-
- //requeset buffers in V4L2
- v4l2_requestbuffers hbuf_req;
- memset(&hbuf_req,0,sizeof(v4l2_requestbuffers));
- hbuf_req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- hbuf_req.memory = V4L2_MEMORY_MMAP;
- hbuf_req.count = Buf_Num; //just set two frames for hal have cache buffer
- if (ioctl(m_pSetting->m_iDevFd, VIDIOC_REQBUFS, &hbuf_req) == -1)
- {
- LOGE("V4L2_BufferInit VIDIOC_REQBUFS fail");
- return UNKNOWN_ERROR;
- }
- else
- {
- if (hbuf_req.count < Buf_Num)
- {
- LOGE("V4L2_BufferInit hbuf_req.count < Buf_Num");
- return UNKNOWN_ERROR;
- }
- else//memmap these buffer to user space
- {
- pV4L2Frames = (void**)new int[Buf_Num];
- pV4L2FrameSize = new int[Buf_Num];
- int i = 0;
- v4l2_buffer hbuf_query;
- memset(&hbuf_query,0,sizeof(v4l2_buffer));
-
- hbuf_query.type = hbuf_req.type;
- hbuf_query.memory = V4L2_MEMORY_MMAP;
- for(;i<Buf_Num;i++)
- {
- hbuf_query.index = i;
- if (ioctl(m_pSetting->m_iDevFd, VIDIOC_QUERYBUF, &hbuf_query) == -1)
- {
- LOGE("Memap V4L2 buffer Fail");
- return UNKNOWN_ERROR;
- }
-
- pV4L2FrameSize[i] = hbuf_query.length;
- LOGD("V4L2_BufferInit::Get Buffer Idx %d Len %d",i,pV4L2FrameSize[i]);
- pV4L2Frames[i] = mmap(NULL,pV4L2FrameSize[i],PROT_READ | PROT_WRITE,MAP_SHARED,m_pSetting->m_iDevFd,hbuf_query.m.offset);
- if(pV4L2Frames[i] == MAP_FAILED)
- {
- LOGE("Memap V4L2 buffer Fail");
- return UNKNOWN_ERROR;
- }
- /*
- //enqueue buffer
- if (ioctl(m_pSetting->m_iDevFd, VIDIOC_QBUF, &hbuf_query) == -1)
- {
- LOGE("GetPreviewFrame nque buffer fail");
- return UNKNOWN_ERROR;
- }
- */
- }
- m_V4L2BufNum = Buf_Num;
- }
- }
- return NO_ERROR;
-}
-
-status_t V4L2Camera::V4L2_BufferUnInit()
-{
- if(m_V4L2BufNum > 0)
- {
- //un-memmap
- int i = 0;
- for (; i < m_V4L2BufNum; i++)
- {
- munmap(pV4L2Frames[i], pV4L2FrameSize[i]);
- pV4L2Frames[i] = NULL;
- pV4L2FrameSize[i] = 0;
- LOGD("unint buffer %d",i);
- }
- m_V4L2BufNum = 0;
- delete pV4L2Frames;
- delete pV4L2FrameSize;
- pV4L2FrameSize = NULL;
- pV4L2Frames = NULL;
- }
- return NO_ERROR;
-}
-
-status_t V4L2Camera::V4L2_BufferEnQue(int idx)
-{
- v4l2_buffer hbuf_query;
- memset(&hbuf_query,0,sizeof(v4l2_buffer));
- hbuf_query.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- hbuf_query.memory = V4L2_MEMORY_MMAP;//¼ÓºÍ²»¼ÓindexÓÐʲôÇø±ð?
- hbuf_query.index = idx;
- if (ioctl(m_pSetting->m_iDevFd, VIDIOC_QBUF, &hbuf_query) == -1)
- {
- LOGE("V4L2_BufferEnQue fail %d",errno);
- return UNKNOWN_ERROR;
- }
-
- return NO_ERROR;
-}
-int V4L2Camera::V4L2_BufferDeQue()
-{
- v4l2_buffer hbuf_query;
- memset(&hbuf_query,0,sizeof(v4l2_buffer));
- hbuf_query.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- hbuf_query.memory = V4L2_MEMORY_MMAP;//¼ÓºÍ²»¼ÓindexÓÐʲôÇø±ð?
- if (ioctl(m_pSetting->m_iDevFd, VIDIOC_DQBUF, &hbuf_query) == -1)
- {
- LOGE("V4L2_StreamGet Deque buffer fail %d",errno);
- return -1;
- }
-
- assert (hbuf_query.index < m_V4L2BufNum);
- pV4L2FrameSize[hbuf_query.index] = hbuf_query.bytesused;
- return hbuf_query.index;
-}
-
-status_t V4L2Camera::V4L2_StreamOn()
-{
- //LOGD("V4L2_StreamOn");
- int stream_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (ioctl(m_pSetting->m_iDevFd, VIDIOC_STREAMON, &stream_type) == -1)
- LOGE("V4L2_StreamOn Fail %d",errno);
- return NO_ERROR;
-}
-
-status_t V4L2Camera::V4L2_StreamOff()
-{
- int stream_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (ioctl(m_pSetting->m_iDevFd, VIDIOC_STREAMOFF, &stream_type) == -1)
- LOGE("V4L2_StreamOff Fail %d",errno);
- return NO_ERROR;
-}
-
-//extern CameraInterface* HAL_GetFakeCamera();
-extern CameraInterface* HAL_GetCameraInterface(int Id)
-{
- LOGD("HAL_GetCameraInterface return V4L2 interface");
- if(Id == 0)
- return new V4L2Camera("/dev/video0",0);
- else
- return new V4L2Camera("/dev/video1",1);
-}
-
-};
diff --git a/V4L2/V4L2Camera.h b/V4L2/V4L2Camera.h
deleted file mode 100755
index 934d6b1..0000000
--- a/V4L2/V4L2Camera.h
+++ b/dev/null
@@ -1,56 +0,0 @@
-#include "../AmlogicCameraHardware.h"
-#include <camera/CameraHardwareInterface.h>
-#include "CameraSetting.h"
-
-
-namespace android {
-
-class V4L2Camera : public CameraInterface
-{
-public:
- V4L2Camera(char* devname,int camid);
- ~V4L2Camera();
- status_t Open() ;
- status_t Close();
- status_t StartPreview();
- status_t StopPreview();
- status_t StartRecord() {return NO_ERROR;}
- status_t StopRecord() {return NO_ERROR;}
- status_t TakePicture();
- status_t TakePictureEnd();
- status_t StartFocus() {return NO_ERROR;}
- status_t StopFocus() {return NO_ERROR;}
- status_t InitParameters(CameraParameters& pParameters);
- status_t SetParameters(CameraParameters& pParameters) ;
- status_t GetPreviewFrame(uint8_t* framebuf) ;
- status_t GetRawFrame(uint8_t* framebuf) ;
- status_t GetJpegFrame(uint8_t* framebuf,int* jpegsize) ;
- int GetCamId() {return m_pSetting->m_iCamId;}
-
-protected:
- //internal used for controling V4L2
- status_t V4L2_GetValidFrameSize(unsigned int pixel_format,String8 &framesize);
- status_t V4L2_BufferInit(int Buf_W,int Buf_H,int Buf_Num,int colorfmt);
- status_t V4L2_BufferUnInit();
- status_t V4L2_BufferEnQue(int idx);
- int V4L2_BufferDeQue();//return the buffer index
-
-
- status_t V4L2_StreamOn();
- status_t V4L2_StreamOff();
-
- int GenExif(unsigned char** pExif,int* exifLen,uint8_t* framebuf);
-
- void** pV4L2Frames;
- int* pV4L2FrameSize;
- int m_V4L2BufNum;
- int m_iPicIdx;
- bool m_bFirstFrame;
-
-//manager the paramerter for different camera devices
- CameraSetting* m_pSetting;
- struct CameraInfo m_curCameraInfo;
-};
-
-
-};
diff --git a/V4L2/amlogic_camera_para.h b/V4L2/amlogic_camera_para.h
deleted file mode 100755
index 07ed2b9..0000000
--- a/V4L2/amlogic_camera_para.h
+++ b/dev/null
@@ -1,148 +0,0 @@
-/*
- * TVIN Modules Exported Header File
- *
- * Author: Lin Xu <lin.xu@amlogic.com>
- * Bobby Yang <bo.yang@amlogic.com>
- *
- * Copyright (C) 2010 Amlogic Inc.
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License version 2 as
- * published by the Free Software Foundation.
- */
-
-
-#ifndef _AMLOGIC_CAMERA_PARA_H
-#define _AMLOGIC_CAMERA_PARA_H
-
-/*
- below macro defined applied to camera driver
-*/
-typedef enum camera_light_mode_e {
- ADVANCED_AWB = 0,
- SIMPLE_AWB,
- MANUAL_DAY,
- MANUAL_A,
- MANUAL_CWF,
- MANUAL_CLOUDY,
-}camera_light_mode_t;
-
-typedef enum camera_saturation_e {
- SATURATION_N4_STEP = 0,
- SATURATION_N3_STEP,
- SATURATION_N2_STEP,
- SATURATION_N1_STEP,
- SATURATION_0_STEP,
- SATURATION_P1_STEP,
- SATURATION_P2_STEP,
- SATURATION_P3_STEP,
- SATURATION_P4_STEP,
-}camera_saturation_t;
-
-
-typedef enum camera_brightness_e {
- BRIGHTNESS_N4_STEP = 0,
- BRIGHTNESS_N3_STEP,
- BRIGHTNESS_N2_STEP,
- BRIGHTNESS_N1_STEP,
- BRIGHTNESS_0_STEP,
- BRIGHTNESS_P1_STEP,
- BRIGHTNESS_P2_STEP,
- BRIGHTNESS_P3_STEP,
- BRIGHTNESS_P4_STEP,
-}camera_brightness_t;
-
-typedef enum camera_contrast_e {
- CONTRAST_N4_STEP = 0,
- CONTRAST_N3_STEP,
- CONTRAST_N2_STEP,
- CONTRAST_N1_STEP,
- CONTRAST_0_STEP,
- CONTRAST_P1_STEP,
- CONTRAST_P2_STEP,
- CONTRAST_P3_STEP,
- CONTRAST_P4_STEP,
-}camera_contrast_t;
-
-typedef enum camera_hue_e {
- HUE_N180_DEGREE = 0,
- HUE_N150_DEGREE,
- HUE_N120_DEGREE,
- HUE_N90_DEGREE,
- HUE_N60_DEGREE,
- HUE_N30_DEGREE,
- HUE_0_DEGREE,
- HUE_P30_DEGREE,
- HUE_P60_DEGREE,
- HUE_P90_DEGREE,
- HUE_P120_DEGREE,
- HUE_P150_DEGREE,
-}camera_hue_t;
-
-typedef enum camera_special_effect_e {
- SPECIAL_EFFECT_NORMAL = 0,
- SPECIAL_EFFECT_BW,
- SPECIAL_EFFECT_BLUISH,
- SPECIAL_EFFECT_SEPIA,
- SPECIAL_EFFECT_REDDISH,
- SPECIAL_EFFECT_GREENISH,
- SPECIAL_EFFECT_NEGATIVE,
-}camera_special_effect_t;
-
-typedef enum camera_exposure_e {
- EXPOSURE_N4_STEP = 0,
- EXPOSURE_N3_STEP,
- EXPOSURE_N2_STEP,
- EXPOSURE_N1_STEP,
- EXPOSURE_0_STEP,
- EXPOSURE_P1_STEP,
- EXPOSURE_P2_STEP,
- EXPOSURE_P3_STEP,
- EXPOSURE_P4_STEP,
-}camera_exposure_t;
-
-
-typedef enum camera_sharpness_e {
- SHARPNESS_1_STEP = 0,
- SHARPNESS_2_STEP,
- SHARPNESS_3_STEP,
- SHARPNESS_4_STEP,
- SHARPNESS_5_STEP,
- SHARPNESS_6_STEP,
- SHARPNESS_7_STEP,
- SHARPNESS_8_STEP,
- SHARPNESS_AUTO_STEP,
-}camera_sharpness_t;
-
-typedef enum camera_mirror_flip_e {
- MF_NORMAL = 0,
- MF_MIRROR,
- MF_FLIP,
- MF_MIRROR_FLIP,
-}camera_mirror_flip_t;
-
-
-typedef enum camera_wb_flip_e {
- CAM_WB_AUTO = 0,
- CAM_WB_CLOUD,
- CAM_WB_DAYLIGHT,
- CAM_WB_INCANDESCENCE,
- CAM_WB_TUNGSTEN,
- CAM_WB_FLUORESCENT,
- CAM_WB_MANUAL,
-}camera_wb_flip_t;
-typedef enum camera_night_mode_flip_e {
- CAM_NM_AUTO = 0,
- CAM_NM_ENABLE,
-}camera_night_mode_flip_t;
-typedef enum camera_effect_flip_e {
- CAM_EFFECT_ENC_NORMAL = 0,
- CAM_EFFECT_ENC_GRAYSCALE,
- CAM_EFFECT_ENC_SEPIA,
- CAM_EFFECT_ENC_SEPIAGREEN,
- CAM_EFFECT_ENC_SEPIABLUE,
- CAM_EFFECT_ENC_COLORINV,
-}camera_effect_flip_t;
-#endif
-
-
diff --git a/V4LCameraAdapter/V4LCameraAdapter.cpp b/V4LCameraAdapter/V4LCameraAdapter.cpp
new file mode 100644
index 0000000..11b376a
--- a/dev/null
+++ b/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -0,0 +1,796 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file V4LCameraAdapter.cpp
+*
+* This file maps the Camera Hardware Interface to V4L2.
+*
+*/
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "V4LCameraAdapter"
+//reinclude because of a bug with the log macros
+#include <utils/Log.h>
+#include "DebugUtils.h"
+
+#include "V4LCameraAdapter.h"
+#include "CameraHal.h"
+#include "TICameraParameters.h"
+#include <signal.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/select.h>
+#include <linux/videodev.h>
+
+#include <cutils/properties.h>
+
+//for private_handle_t TODO move out of private header
+#include <gralloc_priv.h>
+
+#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
+static int mDebugFps = 0;
+
+#define Q16_OFFSET 16
+
+#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);}
+
+namespace android {
+
+#undef LOG_TAG
+///Maintain a separate tag for V4LCameraAdapter logs to isolate issues OMX specific
+#define LOG_TAG "V4LCameraAdapter"
+
+//redefine because of a bug with the log macros
+#undef LOG_FUNCTION_NAME
+#undef LOG_FUNCTION_NAME_EXIT
+#define LOG_FUNCTION_NAME LOGV("%d: %s() ENTER", __LINE__, __FUNCTION__);
+#define LOG_FUNCTION_NAME_EXIT LOGV("%d: %s() EXIT", __LINE__, __FUNCTION__);
+
+//frames skipped before recalculating the framerate
+#define FPS_PERIOD 30
+
+Mutex gAdapterLock;
+const char *device = DEVICE;
+
+/*--------------------junk STARTS here-----------------------------*/
+#define SYSFILE_CAMERA_SET_PARA "/sys/class/vm/attr2"
+#define SYSFILE_CAMERA_SET_MIRROR "/sys/class/vm/mirror"
+static int writefile(char* path,char* content)
+{
+ FILE* fp = fopen(path, "w+");
+
+ LOGD("Write file %s(%p) content %s", path, fp, content);
+
+ if (fp) {
+ while( ((*content) != '\0') ) {
+ if (EOF == fputc(*content,fp))
+ LOGD("write char fail");
+ content++;
+ }
+
+ fclose(fp);
+ }
+ else
+ LOGD("open file fail\n");
+ return 1;
+}
+
+/*--------------------Camera Adapter Class STARTS here-----------------------------*/
+
+status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
+{
+ LOG_FUNCTION_NAME;
+
+ char value[PROPERTY_VALUE_MAX];
+ property_get("debug.camera.showfps", value, "0");
+ mDebugFps = atoi(value);
+
+ int ret = NO_ERROR;
+
+ // Allocate memory for video info structure
+ mVideoInfo = (struct VideoInfo *) calloc (1, sizeof (struct VideoInfo));
+ if(!mVideoInfo)
+ {
+ return NO_MEMORY;
+ }
+
+ if ((mCameraHandle = open(device, O_RDWR)) == -1)
+ {
+ CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno));
+ return -EINVAL;
+ }
+
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap);
+ if (ret < 0)
+ {
+ CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
+ return -EINVAL;
+ }
+
+ if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0)
+ {
+ CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
+ return -EINVAL;
+ }
+
+ if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING))
+ {
+ CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o");
+ return -EINVAL;
+ }
+
+ // Initialize flags
+ mPreviewing = false;
+ mVideoInfo->isStreaming = false;
+ mRecording = false;
+
+ // ---------
+ writefile(SYSFILE_CAMERA_SET_PARA, "1");
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType)
+{
+
+ status_t ret = NO_ERROR;
+
+ if ( !mVideoInfo->isStreaming )
+ {
+ return NO_ERROR;
+ }
+
+ int i = mPreviewBufs.valueFor(( unsigned int )frameBuf);
+ if(i<0)
+ {
+ return BAD_VALUE;
+ }
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("Init: VIDIOC_QBUF Failed");
+ return -1;
+ }
+
+ nQueued++;
+
+ return ret;
+
+}
+
+status_t V4LCameraAdapter::setParameters(const CameraParameters &params)
+{
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+
+ int width, height;
+
+ params.getPreviewSize(&width, &height);
+
+ CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT);
+
+ mVideoInfo->width = width;
+ mVideoInfo->height = height;
+ mVideoInfo->framesizeIn = (width * height << 1);
+ mVideoInfo->formatIn = DEFAULT_PIXEL_FORMAT;
+
+ mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->format.fmt.pix.width = width;
+ mVideoInfo->format.fmt.pix.height = height;
+ mVideoInfo->format.fmt.pix.pixelformat = DEFAULT_PIXEL_FORMAT;
+
+ ret = ioctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Open: VIDIOC_S_FMT Failed: %s", strerror(errno));
+ return ret;
+ }
+
+ // Udpate the current parameter set
+ mParams = params;
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+
+void V4LCameraAdapter::getParameters(CameraParameters& params)
+{
+ LOG_FUNCTION_NAME;
+
+ // Return the current parameter set
+ //params = mParams;
+ //that won't work. we might wipe out the existing params
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+///API to give the buffers to Adapter
+status_t V4LCameraAdapter::useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable)
+{
+ status_t ret = NO_ERROR;
+
+ LOG_FUNCTION_NAME;
+
+ Mutex::Autolock lock(mLock);
+
+ switch(mode)
+ {
+ case CAMERA_PREVIEW:
+ ret = UseBuffersPreview(bufArr, num);
+ break;
+
+ //@todo Insert Image capture case here
+
+ case CAMERA_VIDEO:
+ //@warn Video capture is not fully supported yet
+ ret = UseBuffersPreview(bufArr, num);
+ break;
+
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::UseBuffersPreview(void* bufArr, int num)
+{
+ int ret = NO_ERROR;
+
+ if(NULL == bufArr)
+ {
+ return BAD_VALUE;
+ }
+
+ //First allocate adapter internal buffers at V4L level for USB Cam
+ //These are the buffers from which we will copy the data into overlay buffers
+ /* Check if camera can handle NB_BUFFER buffers */
+ mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
+ mVideoInfo->rb.count = num;
+
+ ret = ioctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
+ if (ret < 0) {
+ CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
+ return ret;
+ }
+
+ for (int i = 0; i < num; i++) {
+
+ memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = ioctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->mem[i] = mmap (0,
+ mVideoInfo->buf.length,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ mCameraHandle,
+ mVideoInfo->buf.m.offset);
+
+ if (mVideoInfo->mem[i] == MAP_FAILED) {
+ CAMHAL_LOGEB("Unable to map buffer (%s)", strerror(errno));
+ return -1;
+ }
+
+ uint32_t *ptr = (uint32_t*) bufArr;
+
+ //Associate each Camera internal buffer with the one from Overlay
+ LOGD("mPreviewBufs.add %#x, %d", ptr[i], i);
+ mPreviewBufs.add((int)ptr[i], i);
+
+ }
+
+ // Update the preview buffer count
+ mPreviewBufferCount = num;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::startPreview()
+{
+ status_t ret = NO_ERROR;
+
+ Mutex::Autolock lock(mPreviewBufsLock);
+
+ if(mPreviewing)
+ {
+ return BAD_VALUE;
+ }
+
+ for (int i = 0; i < mPreviewBufferCount; i++) {
+
+ mVideoInfo->buf.index = i;
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = ioctl(mCameraHandle, VIDIOC_QBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ return -EINVAL;
+ }
+
+ nQueued++;
+ }
+
+ enum v4l2_buf_type bufType;
+ if (!mVideoInfo->isStreaming) {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ ret = ioctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->isStreaming = true;
+ }
+
+ // Create and start preview thread for receiving buffers from V4L Camera
+ mPreviewThread = new PreviewThread(this);
+
+ CAMHAL_LOGDA("Created preview thread");
+
+
+ //Update the flag to indicate we are previewing
+ mPreviewing = true;
+
+ return ret;
+
+}
+
+status_t V4LCameraAdapter::stopPreview()
+{
+ enum v4l2_buf_type bufType;
+ int ret = NO_ERROR;
+
+ Mutex::Autolock lock(mPreviewBufsLock);
+
+ if(!mPreviewing)
+ {
+ return NO_INIT;
+ }
+
+ if (mVideoInfo->isStreaming) {
+ bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ ret = ioctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
+ if (ret < 0) {
+ CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
+ return ret;
+ }
+
+ mVideoInfo->isStreaming = false;
+ }
+
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ nQueued = 0;
+ nDequeued = 0;
+
+ /* Unmap buffers */
+ for (int i = 0; i < mPreviewBufferCount; i++)
+ if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0)
+ CAMHAL_LOGEA("Unmap failed");
+
+ mPreviewBufs.clear();
+
+ mPreviewThread->requestExitAndWait();
+ mPreviewThread.clear();
+
+ return ret;
+
+}
+
+char * V4LCameraAdapter::GetFrame(int &index)
+{
+ int ret;
+
+ mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
+
+ /* DQ */
+ ret = ioctl(mCameraHandle, VIDIOC_DQBUF, &mVideoInfo->buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("GetFrame: VIDIOC_DQBUF Failed");
+ return NULL;
+ }
+ nDequeued++;
+
+ index = mVideoInfo->buf.index;
+
+ return (char *)mVideoInfo->mem[mVideoInfo->buf.index];
+}
+
+//API to get the frame size required to be allocated. This size is used to override the size passed
+//by camera service when VSTAB/VNF is turned ON for example
+status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
+{
+ status_t ret = NO_ERROR;
+
+ // Just return the current preview size, nothing more to do here.
+ mParams.getPreviewSize(( int * ) &width,
+ ( int * ) &height);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+}
+
+status_t V4LCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount)
+{
+ // We don't support meta data, so simply return
+ return NO_ERROR;
+}
+
+status_t V4LCameraAdapter::getPictureBufferSize(size_t &length, size_t bufferCount)
+{
+ // We don't support image capture yet, safely return from here without messing up
+ return NO_ERROR;
+}
+
+static void debugShowFPS()
+{
+ static int mFrameCount = 0;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ mFrameCount++;
+ if (!(mFrameCount & 0x1F)) {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ LOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ }
+ // XXX: mFPS has the value we want
+}
+
+status_t V4LCameraAdapter::recalculateFPS()
+{
+ float currentFPS;
+
+ mFrameCount++;
+
+ if ( ( mFrameCount % FPS_PERIOD ) == 0 )
+ {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFPSTime;
+ currentFPS = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFPSTime = now;
+ mLastFrameCount = mFrameCount;
+
+ if ( 1 == mIter )
+ {
+ mFPS = currentFPS;
+ }
+ else
+ {
+ //cumulative moving average
+ mFPS = mLastFPS + (currentFPS - mLastFPS)/mIter;
+ }
+
+ mLastFPS = mFPS;
+ mIter++;
+ }
+
+ return NO_ERROR;
+}
+
+void V4LCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
+{
+ //LOG_FUNCTION_NAME;
+
+ //LOG_FUNCTION_NAME_EXIT;
+}
+
+
+V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index)
+{
+ LOG_FUNCTION_NAME;
+
+ // Nothing useful to do in the constructor
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+V4LCameraAdapter::~V4LCameraAdapter()
+{
+ LOG_FUNCTION_NAME;
+
+ // Close the camera handle and free the video info structure
+ close(mCameraHandle);
+
+ if (mVideoInfo)
+ {
+ free(mVideoInfo);
+ mVideoInfo = NULL;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/* Preview Thread */
+// ---------------------------------------------------------------------------
+
+int V4LCameraAdapter::previewThread()
+{
+ status_t ret = NO_ERROR;
+ int width, height;
+ CameraFrame frame;
+
+ if (mPreviewing)
+ {
+ int index = 0;
+ char *fp = this->GetFrame(index);
+ if(!fp)
+ {
+ return BAD_VALUE;
+ }
+
+ uint8_t* ptr = (uint8_t*) mPreviewBufs.keyAt(index);
+ private_handle_t* gralloc_hnd = (private_handle_t*)ptr;
+ if (!ptr)
+ {
+ return BAD_VALUE;
+ }
+
+ int width, height;
+ uint16_t* dest = (uint16_t*)gralloc_hnd->base; //ptr;
+ uint16_t* src = (uint16_t*) fp;
+ mParams.getPreviewSize(&width, &height);
+ //LOGD("previewThread ptr=%#x dest=%#x fp=%#x width=%d height=%d", ptr, dest, fp, width, height);
+ for(int i=0;i<height;i++)
+ {
+ for(int j=0;j<width;j++)
+ {
+ *dest = *src;
+ //convert from YUYV to UYVY supported in Camera service
+ /*
+ *dest = (((*src & 0xFF000000)>>24)<<16)|(((*src & 0x00FF0000)>>16)<<24) |
+ (((*src & 0xFF00)>>8)<<0)|(((*src & 0x00FF)>>0)<<8);
+ */
+ src++;
+ dest++;
+ }
+ dest += 4096/2-width;
+ }
+
+ mParams.getPreviewSize(&width, &height);
+ frame.mFrameMask = CameraFrame::PREVIEW_FRAME_SYNC;
+ //frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
+ frame.mBuffer = ptr; //dest
+ frame.mLength = width*height*2;
+ frame.mAlignment = width*2;
+ frame.mOffset = 0;
+ frame.mYuv[0] = NULL;
+ frame.mYuv[1] = NULL;
+ frame.mWidth = width;
+ frame.mHeight = height;
+ frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);;
+ ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret)
+ LOGE("setInitFrameRefCount err=%d", ret);
+ else
+ ret = sendFrameToSubscribers(&frame);
+ //LOGD("previewThread /sendFrameToSubscribers ret=%d", ret);
+
+ }
+
+ return ret;
+}
+
+extern "C" CameraAdapter* CameraAdapter_Factory()
+{
+ CameraAdapter *adapter = NULL;
+ Mutex::Autolock lock(gAdapterLock);
+
+ LOG_FUNCTION_NAME;
+
+ adapter = new V4LCameraAdapter(0/*sensor_index*/);
+ if ( adapter ) {
+ CAMHAL_LOGDB("New V4L Camera adapter instance created for sensor %d",0/*sensor_index*/);
+ } else {
+ CAMHAL_LOGEA("Camera adapter create failed!");
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return adapter;
+}
+
+extern "C" int CameraAdapter_Capabilities(CameraProperties::Properties* properties_array,
+ const unsigned int starting_camera,
+ const unsigned int max_camera) {
+ int num_cameras_supported = 0;
+ CameraProperties::Properties* properties = NULL;
+
+ LOG_FUNCTION_NAME;
+
+ if(!properties_array)
+ {
+ return -EINVAL;
+ }
+
+ // TODO: Need to tell camera properties what other cameras we can support
+ if (starting_camera + num_cameras_supported < max_camera) {
+ num_cameras_supported++;
+ properties = properties_array + starting_camera;
+ properties->set(CameraProperties::CAMERA_NAME, "USBCamera");
+ }
+ //TODO move
+ extern void loadCaps(CameraProperties::Properties* params);
+ loadCaps(properties);
+
+
+ //------------------------
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return num_cameras_supported;
+}
+
+//TODO move
+extern "C" void loadCaps(CameraProperties::Properties* params) {
+ const char DEFAULT_ANTIBANDING[] = "auto";
+ const char DEFAULT_BRIGHTNESS[] = "50";
+ const char DEFAULT_CONTRAST[] = "100";
+ const char DEFAULT_EFFECT[] = "none";
+ const char DEFAULT_EV_COMPENSATION[] = "0";
+ const char DEFAULT_EV_STEP[] = "0.1";
+ const char DEFAULT_EXPOSURE_MODE[] = "auto";
+ const char DEFAULT_FLASH_MODE[] = "off";
+ const char DEFAULT_FOCUS_MODE_PREFERRED[] = "auto";
+ const char DEFAULT_FOCUS_MODE[] = "infinity";
+ const char DEFAULT_FRAMERATE_RANGE_IMAGE[] = "15000,30000";
+ const char DEFAULT_FRAMERATE_RANGE_VIDEO[]="24000,30000";
+ const char DEFAULT_IPP[] = "ldc-nsf";
+ const char DEFAULT_GBCE[] = "disable";
+ const char DEFAULT_ISO_MODE[] = "auto";
+ const char DEFAULT_JPEG_QUALITY[] = "95";
+ const char DEFAULT_THUMBNAIL_QUALITY[] = "60";
+ const char DEFAULT_THUMBNAIL_SIZE[] = "160x120";
+ const char DEFAULT_PICTURE_FORMAT[] = "jpeg";
+ const char DEFAULT_PICTURE_SIZE[] = "320x240";
+ const char DEFAULT_PREVIEW_FORMAT[] = "yuv420sp";
+ const char DEFAULT_FRAMERATE[] = "30";
+ const char DEFAULT_PREVIEW_SIZE[] = "640x480";
+ const char DEFAULT_NUM_PREV_BUFS[] = "6";
+ const char DEFAULT_NUM_PIC_BUFS[] = "1";
+ const char DEFAULT_MAX_FOCUS_AREAS[] = "1";
+ const char DEFAULT_SATURATION[] = "100";
+ const char DEFAULT_SCENE_MODE[] = "auto";
+ const char DEFAULT_SHARPNESS[] = "100";
+ const char DEFAULT_VSTAB[] = "false";
+ const char DEFAULT_VSTAB_SUPPORTED[] = "true";
+ const char DEFAULT_WB[] = "auto";
+ const char DEFAULT_ZOOM[] = "0";
+ const char DEFAULT_MAX_FD_HW_FACES[] = "0";
+ const char DEFAULT_MAX_FD_SW_FACES[] = "0";
+ const char DEFAULT_FOCAL_LENGTH_PRIMARY[] = "3.43";
+ const char DEFAULT_FOCAL_LENGTH_SECONDARY[] = "1.95";
+ const char DEFAULT_HOR_ANGLE[] = "54.8";
+ const char DEFAULT_VER_ANGLE[] = "42.5";
+ const char DEFAULT_AE_LOCK[] = "false";
+ const char DEFAULT_AWB_LOCK[] = "false";
+ const char DEFAULT_MAX_NUM_METERING_AREAS[] = "0";
+ const char DEFAULT_LOCK_SUPPORTED[] = "true";
+ const char DEFAULT_LOCK_UNSUPPORTED[] = "false";
+ const char DEFAULT_VIDEO_SNAPSHOT_SUPPORTED[] = "true";
+ const char DEFAULT_VIDEO_SIZE[] = "1920x1080";
+ const char DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "1920x1080";
+
+ params->set(CameraProperties::FACING_INDEX, TICameraParameters::FACING_FRONT);
+ params->set(CameraProperties::ANTIBANDING, DEFAULT_ANTIBANDING);
+ params->set(CameraProperties::BRIGHTNESS, DEFAULT_BRIGHTNESS);
+ params->set(CameraProperties::CONTRAST, DEFAULT_CONTRAST);
+ params->set(CameraProperties::EFFECT, DEFAULT_EFFECT);
+ params->set(CameraProperties::EV_COMPENSATION, DEFAULT_EV_COMPENSATION);
+ params->set(CameraProperties::SUPPORTED_EV_STEP, DEFAULT_EV_STEP);
+ params->set(CameraProperties::EXPOSURE_MODE, DEFAULT_EXPOSURE_MODE);
+ params->set(CameraProperties::FLASH_MODE, DEFAULT_FLASH_MODE);
+ char *pos = strstr(params->get(CameraProperties::SUPPORTED_FOCUS_MODES), DEFAULT_FOCUS_MODE_PREFERRED);
+ if ( NULL != pos )
+ {
+ params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE_PREFERRED);
+ }
+ else
+ {
+ params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE);
+ }
+ params->set(CameraProperties::IPP, DEFAULT_IPP);
+ params->set(CameraProperties::GBCE, DEFAULT_GBCE);
+ params->set(CameraProperties::ISO_MODE, DEFAULT_ISO_MODE);
+ params->set(CameraProperties::JPEG_QUALITY, DEFAULT_JPEG_QUALITY);
+ params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, DEFAULT_THUMBNAIL_QUALITY);
+ params->set(CameraProperties::JPEG_THUMBNAIL_SIZE, DEFAULT_THUMBNAIL_SIZE);
+ params->set(CameraProperties::PICTURE_FORMAT, DEFAULT_PICTURE_FORMAT);
+ params->set(CameraProperties::PICTURE_SIZE, DEFAULT_PICTURE_SIZE);
+ params->set(CameraProperties::PREVIEW_FORMAT, DEFAULT_PREVIEW_FORMAT);
+ params->set(CameraProperties::PREVIEW_FRAME_RATE, DEFAULT_FRAMERATE);
+ params->set(CameraProperties::PREVIEW_SIZE, DEFAULT_PREVIEW_SIZE);
+ params->set(CameraProperties::REQUIRED_PREVIEW_BUFS, DEFAULT_NUM_PREV_BUFS);
+ params->set(CameraProperties::REQUIRED_IMAGE_BUFS, DEFAULT_NUM_PIC_BUFS);
+ params->set(CameraProperties::SATURATION, DEFAULT_SATURATION);
+ params->set(CameraProperties::SCENE_MODE, DEFAULT_SCENE_MODE);
+ params->set(CameraProperties::SHARPNESS, DEFAULT_SHARPNESS);
+ params->set(CameraProperties::VSTAB, DEFAULT_VSTAB);
+ params->set(CameraProperties::VSTAB_SUPPORTED, DEFAULT_VSTAB_SUPPORTED);
+ params->set(CameraProperties::WHITEBALANCE, DEFAULT_WB);
+ params->set(CameraProperties::ZOOM, DEFAULT_ZOOM);
+ params->set(CameraProperties::MAX_FD_HW_FACES, DEFAULT_MAX_FD_HW_FACES);
+ params->set(CameraProperties::MAX_FD_SW_FACES, DEFAULT_MAX_FD_SW_FACES);
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK, DEFAULT_AE_LOCK);
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK, DEFAULT_AWB_LOCK);
+ params->set(CameraProperties::FOCAL_LENGTH, DEFAULT_FOCAL_LENGTH_PRIMARY);
+ params->set(CameraProperties::HOR_ANGLE, DEFAULT_HOR_ANGLE);
+ params->set(CameraProperties::VER_ANGLE, DEFAULT_VER_ANGLE);
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, DEFAULT_VIDEO_SNAPSHOT_SUPPORTED);
+ params->set(CameraProperties::VIDEO_SIZE, DEFAULT_VIDEO_SIZE);
+ params->set(CameraProperties::PREFERRED_PREVIEW_SIZE_FOR_VIDEO, DEFAULT_PREFERRED_PREVIEW_SIZE_FOR_VIDEO);
+
+ params->set(CameraProperties::SUPPORTED_PICTURE_SIZES, "320x240");
+ params->set(CameraProperties::SUPPORTED_PREVIEW_SIZES, "640x480");
+ params->set(CameraProperties::SUPPORTED_THUMBNAIL_SIZES, "0x0");
+
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, CameraParameters::PIXEL_FORMAT_YUV420SP);
+ params->set(CameraProperties::SUPPORTED_IPP_MODES, DEFAULT_IPP);
+ params->set(CameraProperties::SUPPORTED_PICTURE_FORMATS, CameraParameters::PIXEL_FORMAT_JPEG);
+ params->set(CameraProperties::FRAMERATE_RANGE_SUPPORTED, 30);
+ params->set(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES, "30,4294967");
+ //params->set(CameraProperties::SUPPORTED_FOCUS_MODES, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE);
+ params->set(CameraProperties::SUPPORTED_FOCUS_MODES, DEFAULT_FOCUS_MODE);
+
+ params->set(CameraProperties::SUPPORTED_EXPOSURE_MODES, "auto");
+ params->set(CameraProperties::SUPPORTED_WHITE_BALANCE, "auto");
+ params->set(CameraProperties::SUPPORTED_ANTIBANDING, "auto");
+ params->set(CameraProperties::SUPPORTED_ISO_VALUES, "auto");
+ params->set(CameraProperties::SUPPORTED_SCENE_MODES, "auto");
+ params->set(CameraProperties::SUPPORTED_FLASH_MODES, "off");
+ params->set(CameraProperties::SUPPORTED_EFFECTS, "none");
+
+}
+
+};
+
+
+/*--------------------Camera Adapter Class ENDS here-----------------------------*/
+
diff --git a/inc/ANativeWindowDisplayAdapter.h b/inc/ANativeWindowDisplayAdapter.h
new file mode 100644
index 0000000..1d1118b
--- a/dev/null
+++ b/inc/ANativeWindowDisplayAdapter.h
@@ -0,0 +1,189 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#include "CameraHal.h"
+#include <ui/egl/android_natives.h>
+#include <ui/GraphicBufferMapper.h>
+//#include <hal_public.h>
+
+//temporarily define format here
+//#define HAL_PIXEL_FORMAT_TI_NV12 0x100
+
+namespace android {
+
+/**
+ * Display handler class - This class basically handles the buffer posting to display
+ */
+
+class ANativeWindowDisplayAdapter : public DisplayAdapter
+{
+public:
+
+ typedef struct
+ {
+ void *mBuffer;
+ void *mUser;
+ int mOffset;
+ int mWidth;
+ int mHeight;
+ int mWidthStride;
+ int mHeightStride;
+ int mLength;
+ CameraFrame::FrameType mType;
+ } DisplayFrame;
+
+ enum DisplayStates
+ {
+ DISPLAY_INIT = 0,
+ DISPLAY_STARTED,
+ DISPLAY_STOPPED,
+ DISPLAY_EXITED
+ };
+
+public:
+
+ ANativeWindowDisplayAdapter();
+ virtual ~ANativeWindowDisplayAdapter();
+
+ ///Initializes the display adapter creates any resources required
+ virtual status_t initialize();
+
+ virtual int setPreviewWindow(struct preview_stream_ops *window);
+ virtual int setFrameProvider(FrameNotifier *frameProvider);
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier);
+ virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL, S3DParameters *s3dParams = NULL);
+ virtual int disableDisplay(bool cancel_buffer = true);
+ virtual status_t pauseDisplay(bool pause);
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Used for shot to snapshot measurement
+ virtual status_t setSnapshotTimeRef(struct timeval *refTime = NULL);
+
+#endif
+
+ virtual int useBuffers(void* bufArr, int num);
+ virtual bool supportsExternalBuffering();
+
+ //Implementation of inherited interfaces
+ virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual uint32_t * getOffsets() ;
+ virtual int getFd() ;
+ virtual int freeBuffer(void* buf);
+
+ virtual int maxQueueableBuffers(unsigned int& queueable);
+
+ ///Class specific functions
+ static void frameCallbackRelay(CameraFrame* caFrame);
+ void frameCallback(CameraFrame* caFrame);
+
+ void displayThread();
+
+ private:
+ void destroy();
+ bool processHalMsg();
+ status_t PostFrame(ANativeWindowDisplayAdapter::DisplayFrame &dispFrame);
+ bool handleFrameReturn();
+ status_t returnBuffersToWindow();
+
+public:
+
+ static const int DISPLAY_TIMEOUT;
+ static const int FAILED_DQS_TO_SUSPEND;
+
+ class DisplayThread : public Thread
+ {
+ ANativeWindowDisplayAdapter* mDisplayAdapter;
+ TIUTILS::MessageQueue mDisplayThreadQ;
+
+ public:
+ DisplayThread(ANativeWindowDisplayAdapter* da)
+ : Thread(false), mDisplayAdapter(da) { }
+
+ ///Returns a reference to the display message Q for display adapter to post messages
+ TIUTILS::MessageQueue& msgQ()
+ {
+ return mDisplayThreadQ;
+ }
+
+ virtual bool threadLoop()
+ {
+ mDisplayAdapter->displayThread();
+ return false;
+ }
+
+ enum DisplayThreadCommands
+ {
+ DISPLAY_START,
+ DISPLAY_STOP,
+ DISPLAY_FRAME,
+ DISPLAY_EXIT
+ };
+ };
+
+ //friend declarations
+friend class DisplayThread;
+
+private:
+ int postBuffer(void* displayBuf);
+
+private:
+ bool mFirstInit;
+ bool mSuspend;
+ int mFailedDQs;
+ bool mPaused; //Pause state
+ preview_stream_ops_t* mANativeWindow;
+ sp<DisplayThread> mDisplayThread;
+ FrameProvider *mFrameProvider; ///Pointer to the frame provider interface
+ TIUTILS::MessageQueue mDisplayQ;
+ unsigned int mDisplayState;
+ ///@todo Have a common class for these members
+ mutable Mutex mLock;
+ bool mDisplayEnabled;
+ int mBufferCount;
+ buffer_handle_t** mBufferHandleMap;
+ native_handle_t** mGrallocHandleMap;//IMG_native_handle_t** mGrallocHandleMap;//TODO
+ uint32_t* mOffsetsMap;
+ int mFD;
+ KeyedVector<int, int> mFramesWithCameraAdapterMap;
+ sp<ErrorNotifier> mErrorNotifier;
+
+ uint32_t mFrameWidth;
+ uint32_t mFrameHeight;
+ uint32_t mPreviewWidth;
+ uint32_t mPreviewHeight;
+
+ uint32_t mXOff;
+ uint32_t mYOff;
+
+ const char *mPixelFormat;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ //Used for calculating standby to first shot
+ struct timeval mStandbyToShot;
+ bool mMeasureStandby;
+ //Used for shot to snapshot/shot calculation
+ struct timeval mStartCapture;
+ bool mShotToShot;
+
+#endif
+
+};
+
+};
+
diff --git a/inc/BaseCameraAdapter.h b/inc/BaseCameraAdapter.h
new file mode 100644
index 0000000..d778491
--- a/dev/null
+++ b/inc/BaseCameraAdapter.h
@@ -0,0 +1,272 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef BASE_CAMERA_ADAPTER_H
+#define BASE_CAMERA_ADAPTER_H
+
+#include "CameraHal.h"
+
+namespace android {
+
+class BaseCameraAdapter : public CameraAdapter
+{
+
+public:
+
+ BaseCameraAdapter();
+ virtual ~BaseCameraAdapter();
+
+ ///Initialzes the camera adapter creates any resources required
+ virtual status_t initialize(CameraProperties::Properties*) = 0;
+
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier);
+
+ //Message/Frame notification APIs
+ virtual void enableMsgType(int32_t msgs, frame_callback callback=NULL, event_callback eventCb=NULL, void* cookie=NULL);
+ virtual void disableMsgType(int32_t msgs, void* cookie);
+ virtual void returnFrame(void * frameBuf, CameraFrame::FrameType frameType);
+ virtual void addFramePointers(void *frameBuf, void *y_uv);
+ virtual void removeFramePointers();
+
+ //APIs to configure Camera adapter and get the current parameter set
+ virtual status_t setParameters(const CameraParameters& params) = 0;
+ virtual void getParameters(CameraParameters& params) = 0;
+
+ //API to send a command to the camera
+ virtual status_t sendCommand(CameraCommands operation, int value1 = 0, int value2 = 0, int value3 = 0 );
+
+ virtual status_t registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data);
+
+ virtual status_t registerEndCaptureCallback(end_image_capture_callback callback, void *user_data);
+
+ //Retrieves the current Adapter state
+ virtual AdapterState getState();
+ //Retrieves the next Adapter state
+ virtual AdapterState getNextState();
+
+ // Rolls the state machine back to INTIALIZED_STATE from the current state
+ virtual status_t rollbackToInitializedState();
+
+protected:
+ //The first two methods will try to switch the adapter state.
+ //Every call to setState() should be followed by a corresponding
+ //call to commitState(). If the state switch fails, then it will
+ //get reset to the previous state via rollbackState().
+ virtual status_t setState(CameraCommands operation);
+ virtual status_t commitState();
+ virtual status_t rollbackState();
+
+ // Retrieves the current Adapter state - for internal use (not locked)
+ virtual status_t getState(AdapterState &state);
+ // Retrieves the next Adapter state - for internal use (not locked)
+ virtual status_t getNextState(AdapterState &state);
+
+ //-----------Interface that needs to be implemented by deriving classes --------------------
+
+ //Should be implmented by deriving classes in order to start image capture
+ virtual status_t takePicture();
+
+ //Should be implmented by deriving classes in order to start image capture
+ virtual status_t stopImageCapture();
+
+ //Should be implmented by deriving classes in order to start temporal bracketing
+ virtual status_t startBracketing(int range);
+
+ //Should be implemented by deriving classes in order to stop temporal bracketing
+ virtual status_t stopBracketing();
+
+ //Should be implemented by deriving classes in oder to initiate autoFocus
+ virtual status_t autoFocus();
+
+ //Should be implemented by deriving classes in oder to initiate autoFocus
+ virtual status_t cancelAutoFocus();
+
+ //Should be called by deriving classes in order to do some bookkeeping
+ virtual status_t startVideoCapture();
+
+ //Should be called by deriving classes in order to do some bookkeeping
+ virtual status_t stopVideoCapture();
+
+ //Should be implemented by deriving classes in order to start camera preview
+ virtual status_t startPreview();
+
+ //Should be implemented by deriving classes in order to stop camera preview
+ virtual status_t stopPreview();
+
+ //Should be implemented by deriving classes in order to start smooth zoom
+ virtual status_t startSmoothZoom(int targetIdx);
+
+ //Should be implemented by deriving classes in order to stop smooth zoom
+ virtual status_t stopSmoothZoom();
+
+ //Should be implemented by deriving classes in order to stop smooth zoom
+ virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable);
+
+ //Should be implemented by deriving classes in order queue a released buffer in CameraAdapter
+ virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType);
+
+ //API to get the frame size required to be allocated. This size is used to override the size passed
+ //by camera service when VSTAB/VNF is turned ON for example
+ virtual status_t getFrameSize(size_t &width, size_t &height);
+
+ //API to get required data frame size
+ virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
+
+ //API to get required picture buffers size with the current configuration in CameraParameters
+ virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount);
+
+ // Should be implemented by deriving classes in order to start face detection
+ // ( if supported )
+ virtual status_t startFaceDetection();
+
+ // Should be implemented by deriving classes in order to stop face detection
+ // ( if supported )
+ virtual status_t stopFaceDetection();
+
+ virtual status_t switchToExecuting();
+
+ // Receive orientation events from CameraHal
+ virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt);
+
+ // ---------------------Interface ends-----------------------------------
+
+ status_t notifyFocusSubscribers(bool status);
+ status_t notifyShutterSubscribers();
+ status_t notifyZoomSubscribers(int zoomIdx, bool targetReached);
+ status_t notifyFaceSubscribers(sp<CameraFDResult> &faces);
+
+ //Send the frame to subscribers
+ status_t sendFrameToSubscribers(CameraFrame *frame);
+
+ //Resets the refCount for this particular frame
+ status_t resetFrameRefCount(CameraFrame &frame);
+
+ //A couple of helper functions
+ void setFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType, int refCount);
+ int getFrameRefCount(void* frameBuf, CameraFrame::FrameType frameType);
+ int setInitFrameRefCount(void* buf, unsigned int mask);
+
+// private member functions
+private:
+ status_t __sendFrameToSubscribers(CameraFrame* frame,
+ KeyedVector<int, frame_callback> *subscribers,
+ CameraFrame::FrameType frameType);
+ status_t rollbackToPreviousState();
+
+// protected data types and variables
+protected:
+ enum FrameState {
+ STOPPED = 0,
+ RUNNING
+ };
+
+ enum FrameCommands {
+ START_PREVIEW = 0,
+ START_RECORDING,
+ RETURN_FRAME,
+ STOP_PREVIEW,
+ STOP_RECORDING,
+ DO_AUTOFOCUS,
+ TAKE_PICTURE,
+ FRAME_EXIT
+ };
+
+ enum AdapterCommands {
+ ACK = 0,
+ ERROR
+ };
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ struct timeval mStartFocus;
+ struct timeval mStartCapture;
+
+#endif
+
+ mutable Mutex mReturnFrameLock;
+
+ //Lock protecting the Adapter state
+ mutable Mutex mLock;
+ AdapterState mAdapterState;
+ AdapterState mNextState;
+
+ //Different frame subscribers get stored using these
+ KeyedVector<int, frame_callback> mFrameSubscribers;
+ KeyedVector<int, frame_callback> mFrameDataSubscribers;
+ KeyedVector<int, frame_callback> mVideoSubscribers;
+ KeyedVector<int, frame_callback> mImageSubscribers;
+ KeyedVector<int, frame_callback> mRawSubscribers;
+ KeyedVector<int, event_callback> mFocusSubscribers;
+ KeyedVector<int, event_callback> mZoomSubscribers;
+ KeyedVector<int, event_callback> mShutterSubscribers;
+ KeyedVector<int, event_callback> mFaceSubscribers;
+
+ //Preview buffer management data
+ int *mPreviewBuffers;
+ int mPreviewBufferCount;
+ size_t mPreviewBuffersLength;
+ KeyedVector<int, int> mPreviewBuffersAvailable;
+ mutable Mutex mPreviewBufferLock;
+
+ //Video buffer management data
+ int *mVideoBuffers;
+ KeyedVector<int, int> mVideoBuffersAvailable;
+ int mVideoBuffersCount;
+ size_t mVideoBuffersLength;
+ mutable Mutex mVideoBufferLock;
+
+ //Image buffer management data
+ int *mCaptureBuffers;
+ KeyedVector<int, bool> mCaptureBuffersAvailable;
+ int mCaptureBuffersCount;
+ size_t mCaptureBuffersLength;
+ mutable Mutex mCaptureBufferLock;
+
+ //Metadata buffermanagement
+ int *mPreviewDataBuffers;
+ KeyedVector<int, bool> mPreviewDataBuffersAvailable;
+ int mPreviewDataBuffersCount;
+ size_t mPreviewDataBuffersLength;
+ mutable Mutex mPreviewDataBufferLock;
+
+ TIUTILS::MessageQueue mFrameQ;
+ TIUTILS::MessageQueue mAdapterQ;
+ mutable Mutex mSubscriberLock;
+ ErrorNotifier *mErrorNotifier;
+ release_image_buffers_callback mReleaseImageBuffersCallback;
+ end_image_capture_callback mEndImageCaptureCallback;
+ void *mReleaseData;
+ void *mEndCaptureData;
+ bool mRecording;
+
+ uint32_t mFramesWithDucati;
+ uint32_t mFramesWithDisplay;
+ uint32_t mFramesWithEncoder;
+
+#ifdef DEBUG_LOG
+ KeyedVector<int, bool> mBuffersWithDucati;
+#endif
+
+ KeyedVector<void *, CameraFrame *> mFrameQueue;
+};
+
+};
+
+#endif //BASE_CAMERA_ADAPTER_H
+
+
diff --git a/inc/CameraHal.h b/inc/CameraHal.h
new file mode 100644
index 0000000..a3fcf85
--- a/dev/null
+++ b/inc/CameraHal.h
@@ -0,0 +1,1264 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef ANDROID_HARDWARE_CAMERA_HARDWARE_H
+#define ANDROID_HARDWARE_CAMERA_HARDWARE_H
+
+#include <stdio.h>
+#include <stdarg.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+#include <time.h>
+#include <fcntl.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <sys/stat.h>
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <linux/videodev2.h>
+#include "binder/MemoryBase.h"
+#include "binder/MemoryHeapBase.h"
+#include <utils/threads.h>
+#include <camera/CameraParameters.h>
+#include <hardware/camera.h>
+#include "MessageQueue.h"
+#include "Semaphore.h"
+#include "CameraProperties.h"
+#include "DebugUtils.h"
+#include "SensorListener.h"
+
+#include <ui/GraphicBufferAllocator.h>
+#include <ui/GraphicBuffer.h>
+
+#define MIN_WIDTH 640
+#define MIN_HEIGHT 480
+#define PICTURE_WIDTH 3264 /* 5mp - 2560. 8mp - 3280 */ /* Make sure it is a multiple of 16. */
+#define PICTURE_HEIGHT 2448 /* 5mp - 2048. 8mp - 2464 */ /* Make sure it is a multiple of 16. */
+#define PREVIEW_WIDTH 176
+#define PREVIEW_HEIGHT 144
+//#define PIXEL_FORMAT V4L2_PIX_FMT_UYVY
+
+#define VIDEO_FRAME_COUNT_MAX 8 //NUM_OVERLAY_BUFFERS_REQUESTED
+#define MAX_CAMERA_BUFFERS 8 //NUM_OVERLAY_BUFFERS_REQUESTED
+#define MAX_ZOOM 3
+#define THUMB_WIDTH 80
+#define THUMB_HEIGHT 60
+#define PIX_YUV422I 0
+#define PIX_YUV420P 1
+
+#define SATURATION_OFFSET 100
+#define SHARPNESS_OFFSET 100
+#define CONTRAST_OFFSET 100
+
+#define CAMHAL_GRALLOC_USAGE GRALLOC_USAGE_HW_TEXTURE | \
+ GRALLOC_USAGE_HW_RENDER | \
+ GRALLOC_USAGE_SW_READ_RARELY | \
+ GRALLOC_USAGE_SW_WRITE_NEVER
+
+//Enables Absolute PPM measurements in logcat
+#define PPM_INSTRUMENTATION_ABS 1
+
+#define LOCK_BUFFER_TRIES 5
+//TODO this is wrong. fix this:
+#define HAL_PIXEL_FORMAT_NV12 HAL_PIXEL_FORMAT_YCrCb_420_SP
+
+//Uncomment to enable more verbose/debug logs
+#define DEBUG_LOG
+
+///Camera HAL Logging Functions
+#ifndef DEBUG_LOG
+
+#define CAMHAL_LOGDA(str)
+#define CAMHAL_LOGDB(str, ...)
+#define CAMHAL_LOGVA(str)
+#define CAMHAL_LOGVB(str, ...)
+
+#define CAMHAL_LOGEA LOGE
+#define CAMHAL_LOGEB LOGE
+
+#undef LOG_FUNCTION_NAME
+#undef LOG_FUNCTION_NAME_EXIT
+#define LOG_FUNCTION_NAME
+#define LOG_FUNCTION_NAME_EXIT
+
+#else
+
+#define CAMHAL_LOGDA DBGUTILS_LOGDA
+#define CAMHAL_LOGDB DBGUTILS_LOGDB
+#define CAMHAL_LOGVA DBGUTILS_LOGVA
+#define CAMHAL_LOGVB DBGUTILS_LOGVB
+
+#define CAMHAL_LOGEA DBGUTILS_LOGEA
+#define CAMHAL_LOGEB DBGUTILS_LOGEB
+
+#endif
+
+
+
+#define NONNEG_ASSIGN(x,y) \
+ if(x > -1) \
+ y = x
+
+namespace android {
+
+#define PARAM_BUFFER 6000
+
+///Forward declarations
+class CameraHal;
+class CameraFrame;
+class CameraHalEvent;
+class DisplayFrame;
+
+class CameraArea : public RefBase
+{
+public:
+
+ CameraArea(ssize_t top,
+ ssize_t left,
+ ssize_t bottom,
+ ssize_t right,
+ size_t weight) : mTop(top),
+ mLeft(left),
+ mBottom(bottom),
+ mRight(right),
+ mWeight(weight) {}
+
+ status_t transfrom(size_t width,
+ size_t height,
+ size_t &top,
+ size_t &left,
+ size_t &areaWidth,
+ size_t &areaHeight);
+
+ bool isValid()
+ {
+ return ( ( 0 != mTop ) || ( 0 != mLeft ) || ( 0 != mBottom ) || ( 0 != mRight) );
+ }
+
+ bool isZeroArea()
+ {
+ return ( (0 == mTop ) && ( 0 == mLeft ) && ( 0 == mBottom )
+ && ( 0 == mRight ) && ( 0 == mWeight ));
+ }
+
+ size_t getWeight()
+ {
+ return mWeight;
+ }
+
+ bool compare(const sp<CameraArea> &area);
+
+ static status_t parseAreas(const char *area,
+ size_t areaLength,
+ Vector< sp<CameraArea> > &areas);
+
+ static status_t checkArea(ssize_t top,
+ ssize_t left,
+ ssize_t bottom,
+ ssize_t right,
+ ssize_t weight);
+
+ static bool areAreasDifferent(Vector< sp<CameraArea> > &, Vector< sp<CameraArea> > &);
+
+protected:
+ static const ssize_t TOP = -1000;
+ static const ssize_t LEFT = -1000;
+ static const ssize_t BOTTOM = 1000;
+ static const ssize_t RIGHT = 1000;
+ static const ssize_t WEIGHT_MIN = 1;
+ static const ssize_t WEIGHT_MAX = 1000;
+
+ ssize_t mTop;
+ ssize_t mLeft;
+ ssize_t mBottom;
+ ssize_t mRight;
+ size_t mWeight;
+};
+
+class CameraFDResult : public RefBase
+{
+public:
+
+ CameraFDResult() : mFaceData(NULL) {};
+ CameraFDResult(camera_frame_metadata_t *faces) : mFaceData(faces) {};
+
+ virtual ~CameraFDResult() {
+ if ( ( NULL != mFaceData ) && ( NULL != mFaceData->faces ) ) {
+ free(mFaceData->faces);
+ free(mFaceData);
+ mFaceData=NULL;
+ }
+
+ if(( NULL != mFaceData ))
+ {
+ free(mFaceData);
+ mFaceData = NULL;
+ }
+ }
+
+ camera_frame_metadata_t *getFaceResult() { return mFaceData; };
+
+ static const ssize_t TOP = -1000;
+ static const ssize_t LEFT = -1000;
+ static const ssize_t BOTTOM = 1000;
+ static const ssize_t RIGHT = 1000;
+ static const ssize_t INVALID_DATA = -2000;
+
+private:
+
+ camera_frame_metadata_t *mFaceData;
+};
+
+class CameraFrame
+{
+ public:
+
+ enum FrameType
+ {
+ PREVIEW_FRAME_SYNC = 0x1, ///SYNC implies that the frame needs to be explicitly returned after consuming in order to be filled by camera again
+ PREVIEW_FRAME = 0x2 , ///Preview frame includes viewfinder and snapshot frames
+ IMAGE_FRAME_SYNC = 0x4, ///Image Frame is the image capture output frame
+ IMAGE_FRAME = 0x8,
+ VIDEO_FRAME_SYNC = 0x10, ///Timestamp will be updated for these frames
+ VIDEO_FRAME = 0x20,
+ FRAME_DATA_SYNC = 0x40, ///Any extra data assosicated with the frame. Always synced with the frame
+ FRAME_DATA= 0x80,
+ RAW_FRAME = 0x100,
+ SNAPSHOT_FRAME = 0x200,
+ ALL_FRAMES = 0xFFFF ///Maximum of 16 frame types supported
+ };
+
+ enum FrameQuirks
+ {
+ ENCODE_RAW_YUV422I_TO_JPEG = 0x1 << 0,
+ HAS_EXIF_DATA = 0x1 << 1,
+ };
+
+ //default contrustor
+ CameraFrame():
+ mCookie(NULL),
+ mCookie2(NULL),
+ mBuffer(NULL),
+ mFrameType(0),
+ mTimestamp(0),
+ mWidth(0),
+ mHeight(0),
+ mOffset(0),
+ mAlignment(0),
+ mFd(0),
+ mLength(0),
+ mFrameMask(0),
+ mQuirks(0) {
+
+ mYuv[0] = NULL;
+ mYuv[1] = NULL;
+ }
+
+ //copy constructor
+ CameraFrame(const CameraFrame &frame) :
+ mCookie(frame.mCookie),
+ mCookie2(frame.mCookie2),
+ mBuffer(frame.mBuffer),
+ mFrameType(frame.mFrameType),
+ mTimestamp(frame.mTimestamp),
+ mWidth(frame.mWidth),
+ mHeight(frame.mHeight),
+ mOffset(frame.mOffset),
+ mAlignment(frame.mAlignment),
+ mFd(frame.mFd),
+ mLength(frame.mLength),
+ mFrameMask(frame.mFrameMask),
+ mQuirks(frame.mQuirks) {
+
+ mYuv[0] = frame.mYuv[0];
+ mYuv[1] = frame.mYuv[1];
+ }
+
+ void *mCookie;
+ void *mCookie2;
+ void *mBuffer;
+ int mFrameType;
+ nsecs_t mTimestamp;
+ unsigned int mWidth, mHeight;
+ uint32_t mOffset;
+ unsigned int mAlignment;
+ int mFd;
+ size_t mLength;
+ unsigned mFrameMask;
+ unsigned int mQuirks;
+ unsigned int mYuv[2];
+ ///@todo add other member vars like stride etc
+};
+
+enum CameraHalError
+{
+ CAMERA_ERROR_FATAL = 0x1, //Fatal errors can only be recovered by restarting media server
+ CAMERA_ERROR_HARD = 0x2, // Hard errors are hardware hangs that may be recoverable by resetting the hardware internally within the adapter
+ CAMERA_ERROR_SOFT = 0x4, // Soft errors are non fatal errors that can be recovered from without needing to stop use-case
+};
+
+///Common Camera Hal Event class which is visible to CameraAdapter,DisplayAdapter and AppCallbackNotifier
+///@todo Rename this class to CameraEvent
+class CameraHalEvent
+{
+public:
+ //Enums
+ enum CameraHalEventType {
+ NO_EVENTS = 0x0,
+ EVENT_FOCUS_LOCKED = 0x1,
+ EVENT_FOCUS_ERROR = 0x2,
+ EVENT_ZOOM_INDEX_REACHED = 0x4,
+ EVENT_SHUTTER = 0x8,
+ EVENT_FACE = 0x10,
+ ///@remarks Future enum related to display, like frame displayed event, could be added here
+ ALL_EVENTS = 0xFFFF ///Maximum of 16 event types supported
+ };
+
+ ///Class declarations
+ ///@remarks Add a new class for a new event type added above
+
+ //Shutter event specific data
+ typedef struct ShutterEventData_t {
+ bool shutterClosed;
+ }ShutterEventData;
+
+ ///Focus event specific data
+ typedef struct FocusEventData_t {
+ bool focusLocked;
+ bool focusError;
+ int currentFocusValue;
+ } FocusEventData;
+
+ ///Zoom specific event data
+ typedef struct ZoomEventData_t {
+ int currentZoomIndex;
+ bool targetZoomIndexReached;
+ } ZoomEventData;
+
+ typedef struct FaceData_t {
+ ssize_t top;
+ ssize_t left;
+ ssize_t bottom;
+ ssize_t right;
+ size_t score;
+ } FaceData;
+
+ typedef sp<CameraFDResult> FaceEventData;
+
+ class CameraHalEventData : public RefBase{
+
+ public:
+
+ CameraHalEvent::FocusEventData focusEvent;
+ CameraHalEvent::ZoomEventData zoomEvent;
+ CameraHalEvent::ShutterEventData shutterEvent;
+ CameraHalEvent::FaceEventData faceEvent;
+ };
+
+ //default contrustor
+ CameraHalEvent():
+ mCookie(NULL),
+ mEventType(NO_EVENTS) {}
+
+ //copy constructor
+ CameraHalEvent(const CameraHalEvent &event) :
+ mCookie(event.mCookie),
+ mEventType(event.mEventType),
+ mEventData(event.mEventData) {};
+
+ void* mCookie;
+ CameraHalEventType mEventType;
+ sp<CameraHalEventData> mEventData;
+
+};
+
+/// Have a generic callback class based on template - to adapt CameraFrame and Event
+typedef void (*frame_callback) (CameraFrame *cameraFrame);
+typedef void (*event_callback) (CameraHalEvent *event);
+
+//signals CameraHAL to relase image buffers
+typedef void (*release_image_buffers_callback) (void *userData);
+typedef void (*end_image_capture_callback) (void *userData);
+
+/**
+ * Interface class implemented by classes that have some events to communicate to dependendent classes
+ * Dependent classes use this interface for registering for events
+ */
+class MessageNotifier
+{
+public:
+ static const uint32_t EVENT_BIT_FIELD_POSITION;
+ static const uint32_t FRAME_BIT_FIELD_POSITION;
+
+ ///@remarks Msg type comes from CameraFrame and CameraHalEvent classes
+ /// MSB 16 bits is for events and LSB 16 bits is for frame notifications
+ /// FrameProvider and EventProvider classes act as helpers to event/frame
+ /// consumers to call this api
+ virtual void enableMsgType(int32_t msgs, frame_callback frameCb=NULL, event_callback eventCb=NULL, void* cookie=NULL) = 0;
+ virtual void disableMsgType(int32_t msgs, void* cookie) = 0;
+
+ virtual ~MessageNotifier() {};
+};
+
+class ErrorNotifier : public virtual RefBase
+{
+public:
+ virtual void errorNotify(int error) = 0;
+
+ virtual ~ErrorNotifier() {};
+};
+
+
+/**
+ * Interace class abstraction for Camera Adapter to act as a frame provider
+ * This interface is fully implemented by Camera Adapter
+ */
+class FrameNotifier : public MessageNotifier
+{
+public:
+ virtual void returnFrame(void* frameBuf, CameraFrame::FrameType frameType) = 0;
+ virtual void addFramePointers(void *frameBuf, void *buf) = 0;
+ virtual void removeFramePointers() = 0;
+
+ virtual ~FrameNotifier() {};
+};
+
+/** * Wrapper class around Frame Notifier, which is used by display and notification classes for interacting with Camera Adapter
+ */
+class FrameProvider
+{
+ FrameNotifier* mFrameNotifier;
+ void* mCookie;
+ frame_callback mFrameCallback;
+
+public:
+ FrameProvider(FrameNotifier *fn, void* cookie, frame_callback frameCallback)
+ :mFrameNotifier(fn), mCookie(cookie),mFrameCallback(frameCallback) { }
+
+ int enableFrameNotification(int32_t frameTypes);
+ int disableFrameNotification(int32_t frameTypes);
+ int returnFrame(void *frameBuf, CameraFrame::FrameType frameType);
+ void addFramePointers(void *frameBuf, void *buf);
+ void removeFramePointers();
+};
+
+/** Wrapper class around MessageNotifier, which is used by display and notification classes for interacting with
+ * Camera Adapter
+ */
+class EventProvider
+{
+public:
+ MessageNotifier* mEventNotifier;
+ void* mCookie;
+ event_callback mEventCallback;
+
+public:
+ EventProvider(MessageNotifier *mn, void* cookie, event_callback eventCallback)
+ :mEventNotifier(mn), mCookie(cookie), mEventCallback(eventCallback) {}
+
+ int enableEventNotification(int32_t eventTypes);
+ int disableEventNotification(int32_t eventTypes);
+};
+
+/*
+ * Interface for providing buffers
+ */
+class BufferProvider
+{
+public:
+ virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs) = 0;
+
+ //additional methods used for memory mapping
+ virtual uint32_t * getOffsets() = 0;
+ virtual int getFd() = 0;
+
+ virtual int freeBuffer(void* buf) = 0;
+
+ virtual ~BufferProvider() {}
+};
+
+/**
+ * Class for handling data and notify callbacks to application
+ */
+class AppCallbackNotifier: public ErrorNotifier , public virtual RefBase
+{
+
+public:
+
+ ///Constants
+ static const int NOTIFIER_TIMEOUT;
+ static const int32_t MAX_BUFFERS = 8;
+
+ enum NotifierCommands
+ {
+ NOTIFIER_CMD_PROCESS_EVENT,
+ NOTIFIER_CMD_PROCESS_FRAME,
+ NOTIFIER_CMD_PROCESS_ERROR
+ };
+
+ enum NotifierState
+ {
+ NOTIFIER_STOPPED,
+ NOTIFIER_STARTED,
+ NOTIFIER_EXITED
+ };
+
+public:
+
+ ~AppCallbackNotifier();
+
+ ///Initialzes the callback notifier, creates any resources required
+ status_t initialize();
+
+ ///Starts the callbacks to application
+ status_t start();
+
+ ///Stops the callbacks from going to application
+ status_t stop();
+
+ void setEventProvider(int32_t eventMask, MessageNotifier * eventProvider);
+ void setFrameProvider(FrameNotifier *frameProvider);
+
+ //All sub-components of Camera HAL call this whenever any error happens
+ virtual void errorNotify(int error);
+
+ status_t startPreviewCallbacks(CameraParameters &params, void *buffers, uint32_t *offsets, int fd, size_t length, size_t count);
+ status_t stopPreviewCallbacks();
+
+ status_t enableMsgType(int32_t msgType);
+ status_t disableMsgType(int32_t msgType);
+
+ //API for enabling/disabling measurement data
+ void setMeasurements(bool enable);
+
+ //thread loops
+ bool notificationThread();
+
+ ///Notification callback functions
+ static void frameCallbackRelay(CameraFrame* caFrame);
+ static void eventCallbackRelay(CameraHalEvent* chEvt);
+ void frameCallback(CameraFrame* caFrame);
+ void eventCallback(CameraHalEvent* chEvt);
+ void flushAndReturnFrames();
+
+ void setCallbacks(CameraHal *cameraHal,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ //Set Burst mode
+ void setBurst(bool burst);
+
+ //Notifications from CameraHal for video recording case
+ status_t startRecording();
+ status_t stopRecording();
+ status_t initSharedVideoBuffers(void *buffers, uint32_t *offsets, int fd, size_t length, size_t count, void *vidBufs);
+ status_t releaseRecordingFrame(const void *opaque);
+
+ status_t useMetaDataBufferMode(bool enable);
+
+ void EncoderDoneCb(void*, void*, CameraFrame::FrameType type, void* cookie1, void* cookie2);
+
+ void useVideoBuffers(bool useVideoBuffers);
+
+ bool getUesVideoBuffers();
+ void setVideoRes(int width, int height);
+
+ void flushEventQueue();
+
+ //Internal class definitions
+ class NotificationThread : public Thread {
+ AppCallbackNotifier* mAppCallbackNotifier;
+ TIUTILS::MessageQueue mNotificationThreadQ;
+ public:
+ enum NotificationThreadCommands
+ {
+ NOTIFIER_START,
+ NOTIFIER_STOP,
+ NOTIFIER_EXIT,
+ };
+ public:
+ NotificationThread(AppCallbackNotifier* nh)
+ : Thread(false), mAppCallbackNotifier(nh) { }
+ virtual bool threadLoop() {
+ return mAppCallbackNotifier->notificationThread();
+ }
+
+ TIUTILS::MessageQueue &msgQ() { return mNotificationThreadQ;}
+ };
+
+ //Friend declarations
+ friend class NotificationThread;
+
+private:
+ void notifyEvent();
+ void notifyFrame();
+ bool processMessage();
+ void releaseSharedVideoBuffers();
+ status_t dummyRaw();
+ void copyAndSendPictureFrame(CameraFrame* frame, int32_t msgType);
+ void copyAndSendPreviewFrame(CameraFrame* frame, int32_t msgType);
+
+private:
+ mutable Mutex mLock;
+ mutable Mutex mBurstLock;
+ CameraHal* mCameraHal;
+ camera_notify_callback mNotifyCb;
+ camera_data_callback mDataCb;
+ camera_data_timestamp_callback mDataCbTimestamp;
+ camera_request_memory mRequestMemory;
+ void *mCallbackCookie;
+
+ //Keeps Video MemoryHeaps and Buffers within
+ //these objects
+ KeyedVector<unsigned int, unsigned int> mVideoHeaps;
+ KeyedVector<unsigned int, unsigned int> mVideoBuffers;
+ KeyedVector<unsigned int, unsigned int> mVideoMap;
+
+ //Keeps list of Gralloc handles and associated Video Metadata Buffers
+ KeyedVector<uint32_t, uint32_t> mVideoMetadataBufferMemoryMap;
+ KeyedVector<uint32_t, uint32_t> mVideoMetadataBufferReverseMap;
+
+ bool mBufferReleased;
+
+ sp< NotificationThread> mNotificationThread;
+ EventProvider *mEventProvider;
+ FrameProvider *mFrameProvider;
+ TIUTILS::MessageQueue mEventQ;
+ TIUTILS::MessageQueue mFrameQ;
+ NotifierState mNotifierState;
+
+ bool mPreviewing;
+ camera_memory_t* mPreviewMemory;
+ unsigned char* mPreviewBufs[MAX_BUFFERS];
+ int mPreviewBufCount;
+ const char *mPreviewPixelFormat;
+ KeyedVector<unsigned int, sp<MemoryHeapBase> > mSharedPreviewHeaps;
+ KeyedVector<unsigned int, sp<MemoryBase> > mSharedPreviewBuffers;
+
+ //Burst mode active
+ bool mBurst;
+ mutable Mutex mRecordingLock;
+ bool mRecording;
+ bool mMeasurementEnabled;
+
+ bool mUseMetaDataBufferMode;
+ bool mRawAvailable;
+
+ bool mUseVideoBuffers;
+
+ int mVideoWidth;
+ int mVideoHeight;
+
+};
+
+
+/**
+ * Class used for allocating memory for JPEG bit stream buffers, output buffers of camera in no overlay case
+ */
+class MemoryManager : public BufferProvider, public virtual RefBase
+{
+public:
+ MemoryManager():mIonFd(0){ }
+
+ ///Initializes the memory manager creates any resources required
+ status_t initialize() { return NO_ERROR; }
+
+ int setErrorHandler(ErrorNotifier *errorNotifier);
+ virtual void* allocateBuffer(int width, int height, const char* format, int &bytes, int numBufs);
+ virtual uint32_t * getOffsets();
+ virtual int getFd() ;
+ virtual int freeBuffer(void* buf);
+
+private:
+
+ sp<ErrorNotifier> mErrorNotifier;
+ int mIonFd;
+ KeyedVector<unsigned int, unsigned int> mIonHandleMap;
+ KeyedVector<unsigned int, unsigned int> mIonFdMap;
+ KeyedVector<unsigned int, unsigned int> mIonBufLength;
+};
+
+
+
+
+/**
+ * CameraAdapter interface class
+ * Concrete classes derive from this class and provide implementations based on the specific camera h/w interface
+ */
+
+class CameraAdapter: public FrameNotifier, public virtual RefBase
+{
+protected:
+ enum AdapterActiveStates {
+ INTIALIZED_ACTIVE = 1 << 0,
+ LOADED_PREVIEW_ACTIVE = 1 << 1,
+ PREVIEW_ACTIVE = 1 << 2,
+ LOADED_CAPTURE_ACTIVE = 1 << 3,
+ CAPTURE_ACTIVE = 1 << 4,
+ BRACKETING_ACTIVE = 1 << 5,
+ AF_ACTIVE = 1 << 6,
+ ZOOM_ACTIVE = 1 << 7,
+ VIDEO_ACTIVE = 1 << 8,
+ };
+public:
+ typedef struct
+ {
+ void *mBuffers;
+ uint32_t *mOffsets;
+ int mFd;
+ size_t mLength;
+ size_t mCount;
+ size_t mMaxQueueable;
+ } BuffersDescriptor;
+
+ enum CameraCommands
+ {
+ CAMERA_START_PREVIEW = 0,
+ CAMERA_STOP_PREVIEW = 1,
+ CAMERA_START_VIDEO = 2,
+ CAMERA_STOP_VIDEO = 3,
+ CAMERA_START_IMAGE_CAPTURE = 4,
+ CAMERA_STOP_IMAGE_CAPTURE = 5,
+ CAMERA_PERFORM_AUTOFOCUS = 6,
+ CAMERA_CANCEL_AUTOFOCUS = 7,
+ CAMERA_PREVIEW_FLUSH_BUFFERS = 8,
+ CAMERA_START_SMOOTH_ZOOM = 9,
+ CAMERA_STOP_SMOOTH_ZOOM = 10,
+ CAMERA_USE_BUFFERS_PREVIEW = 11,
+ CAMERA_SET_TIMEOUT = 12,
+ CAMERA_CANCEL_TIMEOUT = 13,
+ CAMERA_START_BRACKET_CAPTURE = 14,
+ CAMERA_STOP_BRACKET_CAPTURE = 15,
+ CAMERA_QUERY_RESOLUTION_PREVIEW = 16,
+ CAMERA_QUERY_BUFFER_SIZE_IMAGE_CAPTURE = 17,
+ CAMERA_QUERY_BUFFER_SIZE_PREVIEW_DATA = 18,
+ CAMERA_USE_BUFFERS_IMAGE_CAPTURE = 19,
+ CAMERA_USE_BUFFERS_PREVIEW_DATA = 20,
+ CAMERA_TIMEOUT_EXPIRED = 21,
+ CAMERA_START_FD = 22,
+ CAMERA_STOP_FD = 23,
+ CAMERA_SWITCH_TO_EXECUTING = 24,
+ };
+
+ enum CameraMode
+ {
+ CAMERA_PREVIEW,
+ CAMERA_IMAGE_CAPTURE,
+ CAMERA_VIDEO,
+ CAMERA_MEASUREMENT
+ };
+
+ enum AdapterState {
+ INTIALIZED_STATE = INTIALIZED_ACTIVE,
+ LOADED_PREVIEW_STATE = LOADED_PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ PREVIEW_STATE = PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ LOADED_CAPTURE_STATE = LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ CAPTURE_STATE = CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ BRACKETING_STATE = BRACKETING_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE ,
+ AF_STATE = AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ ZOOM_STATE = ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_STATE = VIDEO_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_AF_STATE = VIDEO_ACTIVE | AF_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_ZOOM_STATE = VIDEO_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_LOADED_CAPTURE_STATE = VIDEO_ACTIVE | LOADED_CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ VIDEO_CAPTURE_STATE = VIDEO_ACTIVE | CAPTURE_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ AF_ZOOM_STATE = AF_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ BRACKETING_ZOOM_STATE = BRACKETING_ACTIVE | ZOOM_ACTIVE | PREVIEW_ACTIVE | INTIALIZED_ACTIVE,
+ };
+
+public:
+
+ ///Initialzes the camera adapter creates any resources required
+ virtual int initialize(CameraProperties::Properties*) = 0;
+
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier) = 0;
+
+ //Message/Frame notification APIs
+ virtual void enableMsgType(int32_t msgs,
+ frame_callback callback = NULL,
+ event_callback eventCb = NULL,
+ void *cookie = NULL) = 0;
+ virtual void disableMsgType(int32_t msgs, void* cookie) = 0;
+ virtual void returnFrame(void* frameBuf, CameraFrame::FrameType frameType) = 0;
+ virtual void addFramePointers(void *frameBuf, void *buf) = 0;
+ virtual void removeFramePointers() = 0;
+
+ //APIs to configure Camera adapter and get the current parameter set
+ virtual int setParameters(const CameraParameters& params) = 0;
+ virtual void getParameters(CameraParameters& params) = 0;
+
+ //API to flush the buffers from Camera
+ status_t flushBuffers()
+ {
+ return sendCommand(CameraAdapter::CAMERA_PREVIEW_FLUSH_BUFFERS);
+ }
+
+ //Registers callback for returning image buffers back to CameraHAL
+ virtual int registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data) = 0;
+
+ //Registers callback, which signals a completed image capture
+ virtual int registerEndCaptureCallback(end_image_capture_callback callback, void *user_data) = 0;
+
+ //API to send a command to the camera
+ virtual status_t sendCommand(CameraCommands operation, int value1=0, int value2=0, int value3=0) = 0;
+
+ virtual ~CameraAdapter() {};
+
+ //Retrieves the current Adapter state
+ virtual AdapterState getState() = 0;
+
+ //Retrieves the next Adapter state
+ virtual AdapterState getNextState() = 0;
+
+ // Receive orientation events from CameraHal
+ virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt) = 0;
+
+ // Rolls the state machine back to INTIALIZED_STATE from the current state
+ virtual status_t rollbackToInitializedState() = 0;
+protected:
+ //The first two methods will try to switch the adapter state.
+ //Every call to setState() should be followed by a corresponding
+ //call to commitState(). If the state switch fails, then it will
+ //get reset to the previous state via rollbackState().
+ virtual status_t setState(CameraCommands operation) = 0;
+ virtual status_t commitState() = 0;
+ virtual status_t rollbackState() = 0;
+
+ // Retrieves the current Adapter state - for internal use (not locked)
+ virtual status_t getState(AdapterState &state) = 0;
+ // Retrieves the next Adapter state - for internal use (not locked)
+ virtual status_t getNextState(AdapterState &state) = 0;
+};
+
+class DisplayAdapter : public BufferProvider, public virtual RefBase
+{
+public:
+ typedef struct S3DParameters_t
+ {
+ int mode;
+ int framePacking;
+ int order;
+ int subSampling;
+ } S3DParameters;
+
+ ///Initializes the display adapter creates any resources required
+ virtual int initialize() = 0;
+
+ virtual int setPreviewWindow(struct preview_stream_ops *window) = 0;
+ virtual int setFrameProvider(FrameNotifier *frameProvider) = 0;
+ virtual int setErrorHandler(ErrorNotifier *errorNotifier) = 0;
+ virtual int enableDisplay(int width, int height, struct timeval *refTime = NULL, S3DParameters *s3dParams = NULL) = 0;
+ virtual int disableDisplay(bool cancel_buffer = true) = 0;
+ //Used for Snapshot review temp. pause
+ virtual int pauseDisplay(bool pause) = 0;
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+ //Used for shot to snapshot measurement
+ virtual int setSnapshotTimeRef(struct timeval *refTime = NULL) = 0;
+#endif
+
+ virtual int useBuffers(void *bufArr, int num) = 0;
+ virtual bool supportsExternalBuffering() = 0;
+
+ // Get max queueable buffers display supports
+ // This function should only be called after
+ // allocateBuffer
+ virtual int maxQueueableBuffers(unsigned int& queueable) = 0;
+};
+
+static void releaseImageBuffers(void *userData);
+
+static void endImageCapture(void *userData);
+
+ /**
+ Implementation of the Android Camera hardware abstraction layer
+
+ This class implements the interface methods defined in CameraHardwareInterface
+ for the OMAP4 platform
+
+*/
+class CameraHal
+
+{
+
+public:
+ ///Constants
+ static const int NO_BUFFERS_PREVIEW;
+ static const int NO_BUFFERS_IMAGE_CAPTURE;
+ static const uint32_t VFR_SCALE = 1000;
+
+
+ /*--------------------Interface Methods---------------------------------*/
+
+ //@{
+public:
+
+ /** Set the notification and data callbacks */
+ void setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ /** Receives orientation events from SensorListener **/
+ void onOrientationEvent(uint32_t orientation, uint32_t tilt);
+
+ /**
+ * The following three functions all take a msgtype,
+ * which is a bitmask of the messages defined in
+ * include/ui/Camera.h
+ */
+
+ /**
+ * Enable a message, or set of messages.
+ */
+ void enableMsgType(int32_t msgType);
+
+ /**
+ * Disable a message, or a set of messages.
+ */
+ void disableMsgType(int32_t msgType);
+
+ /**
+ * Query whether a message, or a set of messages, is enabled.
+ * Note that this is operates as an AND, if any of the messages
+ * queried are off, this will return false.
+ */
+ int msgTypeEnabled(int32_t msgType);
+
+ /**
+ * Start preview mode.
+ */
+ int startPreview();
+
+ /**
+ * Only used if overlays are used for camera preview.
+ */
+ int setPreviewWindow(struct preview_stream_ops *window);
+
+ /**
+ * Stop a previously started preview.
+ */
+ void stopPreview();
+
+ /**
+ * Returns true if preview is enabled.
+ */
+ bool previewEnabled();
+
+ /**
+ * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
+ * message is sent with the corresponding frame. Every record frame must be released
+ * by calling releaseRecordingFrame().
+ */
+ int startRecording();
+
+ /**
+ * Stop a previously started recording.
+ */
+ void stopRecording();
+
+ /**
+ * Returns true if recording is enabled.
+ */
+ int recordingEnabled();
+
+ /**
+ * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+ */
+ void releaseRecordingFrame(const void *opaque);
+
+ /**
+ * Start auto focus, the notification callback routine is called
+ * with CAMERA_MSG_FOCUS once when focusing is complete. autoFocus()
+ * will be called again if another auto focus is needed.
+ */
+ int autoFocus();
+
+ /**
+ * Cancels auto-focus function. If the auto-focus is still in progress,
+ * this function will cancel it. Whether the auto-focus is in progress
+ * or not, this function will return the focus position to the default.
+ * If the camera does not support auto-focus, this is a no-op.
+ */
+ int cancelAutoFocus();
+
+ /**
+ * Take a picture.
+ */
+ int takePicture();
+
+ /**
+ * Cancel a picture that was started with takePicture. Calling this
+ * method when no picture is being taken is a no-op.
+ */
+ int cancelPicture();
+
+ /** Set the camera parameters. */
+ int setParameters(const char* params);
+ int setParameters(const CameraParameters& params);
+
+ /** Return the camera parameters. */
+ char* getParameters();
+ void putParameters(char *);
+
+ /**
+ * Send command to camera driver.
+ */
+ int sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
+
+ /**
+ * Release the hardware resources owned by this object. Note that this is
+ * *not* done in the destructor.
+ */
+ void release();
+
+ /**
+ * Dump state of the camera hardware
+ */
+ int dump(int fd) const;
+
+
+ status_t storeMetaDataInBuffers(bool enable);
+
+ //@}
+
+/*--------------------Internal Member functions - Public---------------------------------*/
+
+public:
+ /** @name internalFunctionsPublic */
+ //@{
+
+ /** Constructor of CameraHal */
+ CameraHal(int cameraId);
+
+ // Destructor of CameraHal
+ ~CameraHal();
+
+ /** Initialize CameraHal */
+ status_t initialize(CameraProperties::Properties*);
+
+ /** Deinitialize CameraHal */
+ void deinitialize();
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Uses the constructor timestamp as a reference to calcluate the
+ // elapsed time
+ static void PPM(const char *);
+ //Uses a user provided timestamp as a reference to calcluate the
+ // elapsed time
+ static void PPM(const char *, struct timeval*, ...);
+
+#endif
+
+ /** Free image bufs */
+ status_t freeImageBufs();
+
+ //Signals the end of image capture
+ status_t signalEndImageCapture();
+
+ //Events
+ static void eventCallbackRelay(CameraHalEvent* event);
+ void eventCallback(CameraHalEvent* event);
+ void setEventProvider(int32_t eventMask, MessageNotifier * eventProvider);
+
+/*--------------------Internal Member functions - Private---------------------------------*/
+private:
+
+ /** @name internalFunctionsPrivate */
+ //@{
+
+ /** Set the camera parameters specific to Video Recording. */
+ bool setVideoModeParameters(const CameraParameters&);
+
+ /** Reset the camera parameters specific to Video Recording. */
+ bool resetVideoModeParameters();
+
+ /** Restart the preview with setParameter. */
+ status_t restartPreview();
+
+ status_t parseResolution(const char *resStr, int &width, int &height);
+
+ void insertSupportedParams();
+
+ /** Allocate preview data buffers */
+ status_t allocPreviewDataBufs(size_t size, size_t bufferCount);
+
+ /** Free preview data buffers */
+ status_t freePreviewDataBufs();
+
+ /** Allocate preview buffers */
+ status_t allocPreviewBufs(int width, int height, const char* previewFormat, unsigned int bufferCount, unsigned int &max_queueable);
+
+ /** Allocate video buffers */
+ status_t allocVideoBufs(uint32_t width, uint32_t height, uint32_t bufferCount);
+
+ /** Allocate image capture buffers */
+ status_t allocImageBufs(unsigned int width, unsigned int height, size_t length, const char* previewFormat, unsigned int bufferCount);
+
+ /** Free preview buffers */
+ status_t freePreviewBufs();
+
+ /** Free video bufs */
+ status_t freeVideoBufs(void *bufs);
+
+ //Check if a given resolution is supported by the current camera
+ //instance
+ bool isResolutionValid(unsigned int width, unsigned int height, const char *supportedResolutions);
+
+ //Check if a given parameter is supported by the current camera
+ // instance
+ bool isParameterValid(const char *param, const char *supportedParams);
+ bool isParameterValid(int param, const char *supportedParams);
+ status_t doesSetParameterNeedUpdate(const char *new_param, const char *old_params, bool &update);
+
+ /** Initialize default parameters */
+ void initDefaultParameters();
+
+ void dumpProperties(CameraProperties::Properties& cameraProps);
+
+ status_t startImageBracketing();
+
+ status_t stopImageBracketing();
+
+ void setShutter(bool enable);
+
+ void forceStopPreview();
+
+ void selectFPSRange(int framerate, int *min_fps, int *max_fps);
+
+ void setPreferredPreviewRes(int width, int height);
+ void resetPreviewRes(CameraParameters *mParams, int width, int height);
+
+ //@}
+
+
+/*----------Member variables - Public ---------------------*/
+public:
+ int32_t mMsgEnabled;
+ bool mRecordEnabled;
+ nsecs_t mCurrentTime;
+ bool mFalsePreview;
+ bool mPreviewEnabled;
+ uint32_t mTakePictureQueue;
+ bool mBracketingEnabled;
+ bool mBracketingRunning;
+ //User shutter override
+ bool mShutterEnabled;
+ bool mMeasurementEnabled;
+ //Google's parameter delimiter
+ static const char PARAMS_DELIMITER[];
+
+ CameraAdapter *mCameraAdapter;
+ sp<AppCallbackNotifier> mAppCallbackNotifier;
+ sp<DisplayAdapter> mDisplayAdapter;
+ sp<MemoryManager> mMemoryManager;
+
+ sp<IMemoryHeap> mPictureHeap;
+
+ int* mGrallocHandles;
+ bool mFpsRangeChangedByApp;
+
+
+
+
+
+///static member vars
+
+#if PPM_INSTRUMENTATION || PPM_INSTRUMENTATION_ABS
+
+ //Timestamp from the CameraHal constructor
+ static struct timeval ppm_start;
+ //Timestamp of the autoFocus command
+ static struct timeval mStartFocus;
+ //Timestamp of the startPreview command
+ static struct timeval mStartPreview;
+ //Timestamp of the takePicture command
+ static struct timeval mStartCapture;
+
+#endif
+
+/*----------Member variables - Private ---------------------*/
+private:
+ bool mDynamicPreviewSwitch;
+ //keeps paused state of display
+ bool mDisplayPaused;
+ //Index of current camera adapter
+ int mCameraIndex;
+
+ mutable Mutex mLock;
+
+ sp<SensorListener> mSensorListener;
+
+ void* mCameraAdapterHandle;
+
+ CameraParameters mParameters;
+ bool mPreviewRunning;
+ bool mPreviewStateOld;
+ bool mRecordingEnabled;
+ EventProvider *mEventProvider;
+
+ int32_t *mPreviewDataBufs;
+ uint32_t *mPreviewDataOffsets;
+ int mPreviewDataFd;
+ int mPreviewDataLength;
+ int32_t *mImageBufs;
+ uint32_t *mImageOffsets;
+ int mImageFd;
+ int mImageLength;
+ int32_t *mPreviewBufs;
+ uint32_t *mPreviewOffsets;
+ int mPreviewLength;
+ int mPreviewFd;
+ int32_t *mVideoBufs;
+ uint32_t *mVideoOffsets;
+ int mVideoFd;
+ int mVideoLength;
+
+ int mBracketRangePositive;
+ int mBracketRangeNegative;
+
+ ///@todo Rename this as preview buffer provider
+ BufferProvider *mBufProvider;
+ BufferProvider *mVideoBufProvider;
+
+
+ CameraProperties::Properties* mCameraProperties;
+
+ bool mPreviewStartInProgress;
+
+ bool mSetPreviewWindowCalled;
+
+ uint32_t mPreviewWidth;
+ uint32_t mPreviewHeight;
+ int32_t mMaxZoomSupported;
+
+ int mVideoWidth;
+ int mVideoHeight;
+
+};
+
+
+}; // namespace android
+
+#endif
diff --git a/inc/CameraProperties.h b/inc/CameraProperties.h
new file mode 100644
index 0000000..c13edf7
--- a/dev/null
+++ b/inc/CameraProperties.h
@@ -0,0 +1,199 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+
+#ifndef CAMERA_PROPERTIES_H
+#define CAMERA_PROPERTIES_H
+
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+#include <stdio.h>
+#include <dirent.h>
+#include <errno.h>
+#include <stdio.h>
+#include <string.h>
+#include <ctype.h>
+#include "cutils/properties.h"
+
+namespace android {
+
+//TODO
+#define MAX_CAMERAS_SUPPORTED 1
+#define MAX_SIMUL_CAMERAS_SUPPORTED 1
+#define MAX_PROP_NAME_LENGTH 50
+#define MAX_PROP_VALUE_LENGTH 2048
+
+#define EXIF_MAKE_DEFAULT "default_make"
+#define EXIF_MODEL_DEFAULT "default_model"
+
+// Class that handles the Camera Properties
+class CameraProperties
+{
+public:
+ static const char INVALID[];
+ static const char CAMERA_NAME[];
+ static const char CAMERA_SENSOR_INDEX[];
+ static const char ORIENTATION_INDEX[];
+ static const char FACING_INDEX[];
+ static const char S3D_SUPPORTED[];
+ static const char SUPPORTED_PREVIEW_SIZES[];
+ static const char SUPPORTED_PREVIEW_FORMATS[];
+ static const char SUPPORTED_PREVIEW_FRAME_RATES[];
+ static const char SUPPORTED_PICTURE_SIZES[];
+ static const char SUPPORTED_PICTURE_FORMATS[];
+ static const char SUPPORTED_THUMBNAIL_SIZES[];
+ static const char SUPPORTED_WHITE_BALANCE[];
+ static const char SUPPORTED_EFFECTS[];
+ static const char SUPPORTED_ANTIBANDING[];
+ static const char SUPPORTED_EXPOSURE_MODES[];
+ static const char SUPPORTED_EV_MIN[];
+ static const char SUPPORTED_EV_MAX[];
+ static const char SUPPORTED_EV_STEP[];
+ static const char SUPPORTED_ISO_VALUES[];
+ static const char SUPPORTED_SCENE_MODES[];
+ static const char SUPPORTED_FLASH_MODES[];
+ static const char SUPPORTED_FOCUS_MODES[];
+ static const char REQUIRED_PREVIEW_BUFS[];
+ static const char REQUIRED_IMAGE_BUFS[];
+ static const char SUPPORTED_ZOOM_RATIOS[];
+ static const char SUPPORTED_ZOOM_STAGES[];
+ static const char SUPPORTED_IPP_MODES[];
+ static const char SMOOTH_ZOOM_SUPPORTED[];
+ static const char ZOOM_SUPPORTED[];
+ static const char PREVIEW_SIZE[];
+ static const char PREVIEW_FORMAT[];
+ static const char PREVIEW_FRAME_RATE[];
+ static const char ZOOM[];
+ static const char PICTURE_SIZE[];
+ static const char PICTURE_FORMAT[];
+ static const char JPEG_THUMBNAIL_SIZE[];
+ static const char WHITEBALANCE[];
+ static const char EFFECT[];
+ static const char ANTIBANDING[];
+ static const char EXPOSURE_MODE[];
+ static const char EV_COMPENSATION[];
+ static const char ISO_MODE[];
+ static const char FOCUS_MODE[];
+ static const char SCENE_MODE[];
+ static const char FLASH_MODE[];
+ static const char JPEG_QUALITY[];
+ static const char BRIGHTNESS[];
+ static const char SATURATION[];
+ static const char SHARPNESS[];
+ static const char CONTRAST[];
+ static const char IPP[];
+ static const char GBCE[];
+ static const char AUTOCONVERGENCE[];
+ static const char AUTOCONVERGENCE_MODE[];
+ static const char MANUALCONVERGENCE_VALUES[];
+ static const char SENSOR_ORIENTATION[];
+ static const char SENSOR_ORIENTATION_VALUES[];
+ static const char REVISION[];
+ static const char FOCAL_LENGTH[];
+ static const char HOR_ANGLE[];
+ static const char VER_ANGLE[];
+ static const char EXIF_MAKE[];
+ static const char EXIF_MODEL[];
+ static const char JPEG_THUMBNAIL_QUALITY[];
+ static const char MAX_FOCUS_AREAS[];
+ static const char MAX_FD_HW_FACES[];
+ static const char MAX_FD_SW_FACES[];
+
+ static const char PARAMS_DELIMITER [];
+
+ static const char S3D2D_PREVIEW[];
+ static const char S3D2D_PREVIEW_MODES[];
+ static const char VSTAB[];
+ static const char VSTAB_SUPPORTED[];
+ static const char FRAMERATE_RANGE[];
+ static const char FRAMERATE_RANGE_IMAGE[];
+ static const char FRAMERATE_RANGE_VIDEO[];
+ static const char FRAMERATE_RANGE_SUPPORTED[];
+
+ static const char DEFAULT_VALUE[];
+
+ static const char AUTO_EXPOSURE_LOCK[];
+ static const char AUTO_EXPOSURE_LOCK_SUPPORTED[];
+ static const char AUTO_WHITEBALANCE_LOCK[];
+ static const char AUTO_WHITEBALANCE_LOCK_SUPPORTED[];
+ static const char MAX_NUM_METERING_AREAS[];
+ static const char METERING_AREAS[];
+ static const char MAX_NUM_FOCUS_AREAS[];
+
+ static const char VIDEO_SNAPSHOT_SUPPORTED[];
+
+ static const char VIDEO_SIZE[];
+ static const char SUPPORTED_VIDEO_SIZES[];
+ static const char PREFERRED_PREVIEW_SIZE_FOR_VIDEO[];
+
+ CameraProperties();
+ ~CameraProperties();
+
+ // container class passed around for accessing properties
+ class Properties
+ {
+ public:
+ Properties()
+ {
+ mProperties = new DefaultKeyedVector<String8, String8>(String8(DEFAULT_VALUE));
+ char property[PROPERTY_VALUE_MAX];
+ property_get("ro.product.manufacturer", property, EXIF_MAKE_DEFAULT);
+ property[0] = toupper(property[0]);
+ set(EXIF_MAKE, property);
+ property_get("ro.product.model", property, EXIF_MODEL_DEFAULT);
+ property[0] = toupper(property[0]);
+ set(EXIF_MODEL, property);
+ }
+ ~Properties()
+ {
+ delete mProperties;
+ }
+ ssize_t set(const char *prop, const char *value);
+ ssize_t set(const char *prop, int value);
+ const char* get(const char * prop);
+ void dump();
+
+ protected:
+ const char* keyAt(unsigned int);
+ const char* valueAt(unsigned int);
+
+ private:
+ DefaultKeyedVector<String8, String8>* mProperties;
+
+ };
+
+ ///Initializes the CameraProperties class
+ status_t initialize();
+ status_t loadProperties();
+ int camerasSupported();
+ int getProperties(int cameraIndex, Properties** properties);
+
+private:
+
+ uint32_t mCamerasSupported;
+ int mInitialized;
+ mutable Mutex mLock;
+
+ Properties mCameraProps[MAX_CAMERAS_SUPPORTED];
+
+};
+
+};
+
+#endif //CAMERA_PROPERTIES_H
+
diff --git a/inc/Encoder_libjpeg.h b/inc/Encoder_libjpeg.h
new file mode 100755
index 0000000..e3e9ac5
--- a/dev/null
+++ b/inc/Encoder_libjpeg.h
@@ -0,0 +1,174 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file Encoder_libjpeg.h
+*
+* This defines API for camerahal to encode YUV using libjpeg
+*
+*/
+
+#ifndef ANDROID_CAMERA_HARDWARE_ENCODER_LIBJPEG_H
+#define ANDROID_CAMERA_HARDWARE_ENCODER_LIBJPEG_H
+
+#include <utils/threads.h>
+#include <utils/RefBase.h>
+
+extern "C" {
+#include "jhead.h"
+}
+namespace android {
+/**
+ * libjpeg encoder class - uses libjpeg to encode yuv
+ */
+
+#define MAX_EXIF_TAGS_SUPPORTED 30
+typedef void (*encoder_libjpeg_callback_t) (void* main_jpeg,
+ void* thumb_jpeg,
+ CameraFrame::FrameType type,
+ void* cookie1,
+ void* cookie2,
+ void* cookie3);
+
+static const char TAG_MODEL[] = "Model";
+static const char TAG_MAKE[] = "Make";
+static const char TAG_FOCALLENGTH[] = "FocalLength";
+static const char TAG_DATETIME[] = "DateTime";
+static const char TAG_IMAGE_WIDTH[] = "ImageWidth";
+static const char TAG_IMAGE_LENGTH[] = "ImageLength";
+static const char TAG_GPS_LAT[] = "GPSLatitude";
+static const char TAG_GPS_LAT_REF[] = "GPSLatitudeRef";
+static const char TAG_GPS_LONG[] = "GPSLongitude";
+static const char TAG_GPS_LONG_REF[] = "GPSLongitudeRef";
+static const char TAG_GPS_ALT[] = "GPSAltitude";
+static const char TAG_GPS_ALT_REF[] = "GPSAltitudeRef";
+static const char TAG_GPS_MAP_DATUM[] = "GPSMapDatum";
+static const char TAG_GPS_PROCESSING_METHOD[] = "GPSProcessingMethod";
+static const char TAG_GPS_VERSION_ID[] = "GPSVersionID";
+static const char TAG_GPS_TIMESTAMP[] = "GPSTimeStamp";
+static const char TAG_GPS_DATESTAMP[] = "GPSDateStamp";
+static const char TAG_ORIENTATION[] = "Orientation";
+
+class ExifElementsTable {
+ public:
+ ExifElementsTable() :
+ gps_tag_count(0), exif_tag_count(0), position(0),
+ jpeg_opened(false) { }
+ ~ExifElementsTable();
+
+ status_t insertElement(const char* tag, const char* value);
+ void insertExifToJpeg(unsigned char* jpeg, size_t jpeg_size);
+ status_t insertExifThumbnailImage(const char*, int);
+ void saveJpeg(unsigned char* picture, size_t jpeg_size);
+ static const char* degreesToExifOrientation(const char*);
+ static void stringToRational(const char*, unsigned int*, unsigned int*);
+ static bool isAsciiTag(const char* tag);
+ private:
+ ExifElement_t table[MAX_EXIF_TAGS_SUPPORTED];
+ unsigned int gps_tag_count;
+ unsigned int exif_tag_count;
+ unsigned int position;
+ bool jpeg_opened;
+};
+
+class Encoder_libjpeg : public Thread {
+ /* public member types and variables */
+ public:
+ struct params {
+ uint8_t* src;
+ int src_size;
+ uint8_t* dst;
+ int dst_size;
+ int quality;
+ int in_width;
+ int in_height;
+ int out_width;
+ int out_height;
+ const char* format;
+ size_t jpeg_size;
+ };
+ /* public member functions */
+ public:
+ Encoder_libjpeg(params* main_jpeg,
+ params* tn_jpeg,
+ encoder_libjpeg_callback_t cb,
+ CameraFrame::FrameType type,
+ void* cookie1,
+ void* cookie2,
+ void* cookie3)
+ : Thread(false), mMainInput(main_jpeg), mThumbnailInput(tn_jpeg), mCb(cb),
+ mCancelEncoding(false), mCookie1(cookie1), mCookie2(cookie2), mCookie3(cookie3),
+ mType(type), mThumb(NULL) {
+ this->incStrong(this);
+ }
+
+ ~Encoder_libjpeg() {
+ CAMHAL_LOGVB("~Encoder_libjpeg(%p)", this);
+ }
+
+ virtual bool threadLoop() {
+ size_t size = 0;
+ sp<Encoder_libjpeg> tn = NULL;
+ if (mThumbnailInput) {
+ // start thread to encode thumbnail
+ mThumb = new Encoder_libjpeg(mThumbnailInput, NULL, NULL, mType, NULL, NULL, NULL);
+ mThumb->run();
+ }
+
+ // encode our main image
+ size = encode(mMainInput);
+
+ // check if it is main jpeg thread
+ if(mThumb.get()) {
+ // wait until tn jpeg thread exits.
+ mThumb->join();
+ mThumb.clear();
+ mThumb = NULL;
+ }
+
+ if(mCb) {
+ mCb(mMainInput, mThumbnailInput, mType, mCookie1, mCookie2, mCookie3);
+ }
+
+ // encoder thread runs, self-destructs, and then exits
+ this->decStrong(this);
+ return false;
+ }
+
+ void cancel() {
+ if (mThumb.get()) {
+ mThumb->cancel();
+ }
+ mCancelEncoding = true;
+ }
+
+ private:
+ params* mMainInput;
+ params* mThumbnailInput;
+ encoder_libjpeg_callback_t mCb;
+ bool mCancelEncoding;
+ void* mCookie1;
+ void* mCookie2;
+ void* mCookie3;
+ CameraFrame::FrameType mType;
+ sp<Encoder_libjpeg> mThumb;
+
+ size_t encode(params*);
+};
+
+}
+
+#endif
diff --git a/inc/General3A_Settings.h b/inc/General3A_Settings.h
new file mode 100644
index 0000000..dab1f8e
--- a/dev/null
+++ b/inc/General3A_Settings.h
@@ -0,0 +1,280 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file General3A_Settings.h
+*
+* This file maps the Camera Hardware Interface to OMX.
+*
+*/
+
+#include "OMX_TI_IVCommon.h"
+#include "OMX_TI_Common.h"
+#include "OMX_TI_Index.h"
+#include "TICameraParameters.h"
+
+#ifndef GENERAL_3A_SETTINGS_H
+#define GENERAL_3A_SETTINGS_H
+
+#define FOCUS_FACE_PRIORITY OMX_IMAGE_FocusControlMax -1
+#define FOCUS_REGION_PRIORITY OMX_IMAGE_FocusControlMax -2
+#define WB_FACE_PRIORITY OMX_WhiteBalControlMax -1
+#define EXPOSURE_FACE_PRIORITY OMX_ExposureControlMax - 1
+
+namespace android {
+
+struct userToOMX_LUT{
+ const char * userDefinition;
+ int omxDefinition;
+};
+
+struct LUTtype{
+ int size;
+ const userToOMX_LUT *Table;
+};
+
+const userToOMX_LUT isoUserToOMX[] = {
+ { TICameraParameters::ISO_MODE_AUTO, 0 },
+ { TICameraParameters::ISO_MODE_100, 100 },
+ { TICameraParameters::ISO_MODE_200, 200 },
+ { TICameraParameters::ISO_MODE_400, 400 },
+ { TICameraParameters::ISO_MODE_800, 800 },
+ { TICameraParameters::ISO_MODE_1000, 1000 },
+ { TICameraParameters::ISO_MODE_1200, 1200 },
+ { TICameraParameters::ISO_MODE_1600, 1600 },
+};
+
+const userToOMX_LUT effects_UserToOMX [] = {
+ { CameraParameters::EFFECT_NONE, OMX_ImageFilterNone },
+ { CameraParameters::EFFECT_NEGATIVE, OMX_ImageFilterNegative },
+ { CameraParameters::EFFECT_SOLARIZE, OMX_ImageFilterSolarize },
+ { CameraParameters::EFFECT_SEPIA, OMX_ImageFilterSepia },
+ { CameraParameters::EFFECT_MONO, OMX_ImageFilterGrayScale },
+ { CameraParameters::EFFECT_BLACKBOARD, OMX_TI_ImageFilterBlackBoard },
+ { CameraParameters::EFFECT_WHITEBOARD, OMX_TI_ImageFilterWhiteBoard },
+ { CameraParameters::EFFECT_AQUA, OMX_TI_ImageFilterAqua },
+ { CameraParameters::EFFECT_POSTERIZE, OMX_TI_ImageFilterPosterize },
+#ifdef OMAP_ENHANCEMENT
+ { TICameraParameters::EFFECT_NATURAL, OMX_ImageFilterNatural },
+ { TICameraParameters::EFFECT_VIVID, OMX_ImageFilterVivid },
+ { TICameraParameters::EFFECT_COLOR_SWAP, OMX_ImageFilterColourSwap },
+ { TICameraParameters::EFFECT_BLACKWHITE, OMX_TI_ImageFilterBlackWhite }
+#endif
+};
+
+const userToOMX_LUT scene_UserToOMX [] = {
+ { CameraParameters::SCENE_MODE_AUTO, OMX_Manual },
+ { CameraParameters::SCENE_MODE_ACTION, OMX_TI_Action },
+ { CameraParameters::SCENE_MODE_NIGHT, OMX_TI_Night },
+ { CameraParameters::SCENE_MODE_PARTY, OMX_TI_Party },
+ { CameraParameters::SCENE_MODE_SUNSET, OMX_TI_Sunset },
+/*********** TODO: These scene modes are not verified. ************
+ ***************** Have to verify and reeable later. **************
+ { CameraParameters::SCENE_MODE_THEATRE, OMX_TI_Theatre },
+ { CameraParameters::SCENE_MODE_LANDSCAPE, OMX_Landscape },
+ { CameraParameters::SCENE_MODE_NIGHT_PORTRAIT, OMX_NightPortrait },
+ { CameraParameters::SCENE_MODE_FIREWORKS, OMX_Fireworks },
+ { CameraParameters::SCENE_MODE_BEACH, OMX_TI_Beach },
+ { CameraParameters::SCENE_MODE_CANDLELIGHT, OMX_TI_Candlelight },
+ { CameraParameters::SCENE_MODE_PORTRAIT, OMX_TI_Portrait },
+ { CameraParameters::SCENE_MODE_SNOW, OMX_TI_Snow },
+ { CameraParameters::SCENE_MODE_STEADYPHOTO, OMX_TI_Steadyphoto },
+*********************************************************************/
+#ifdef OMAP_ENHANCEMENT
+ { TICameraParameters::SCENE_MODE_CLOSEUP, OMX_Closeup },
+ { TICameraParameters::SCENE_MODE_AQUA, OMX_Underwater },
+ { TICameraParameters::SCENE_MODE_SPORT, OMX_Sport },
+ { TICameraParameters::SCENE_MODE_MOOD, OMX_Mood },
+ { TICameraParameters::SCENE_MODE_NIGHT_INDOOR, OMX_NightIndoor },
+ { TICameraParameters::SCENE_MODE_DOCUMENT, OMX_Document },
+ { TICameraParameters::SCENE_MODE_BARCODE, OMX_Barcode },
+ { TICameraParameters::SCENE_MODE_VIDEO_SUPER_NIGHT, OMX_SuperNight },
+ { TICameraParameters::SCENE_MODE_VIDEO_CINE, OMX_Cine },
+ { TICameraParameters::SCENE_MODE_VIDEO_OLD_FILM, OMX_OldFilm },
+#endif
+};
+
+const userToOMX_LUT whiteBal_UserToOMX [] = {
+ { CameraParameters::WHITE_BALANCE_AUTO, OMX_WhiteBalControlAuto },
+ { CameraParameters::WHITE_BALANCE_DAYLIGHT, OMX_WhiteBalControlSunLight },
+ { CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, OMX_WhiteBalControlCloudy },
+ { CameraParameters::WHITE_BALANCE_FLUORESCENT, OMX_WhiteBalControlFluorescent },
+ { CameraParameters::WHITE_BALANCE_INCANDESCENT, OMX_WhiteBalControlIncandescent },
+/********************** THESE ARE CURRENT NOT TUNED PROPERLY *************************
+ { CameraParameters::WHITE_BALANCE_SHADE, OMX_TI_WhiteBalControlShade },
+ { CameraParameters::WHITE_BALANCE_TWILIGHT, OMX_TI_WhiteBalControlTwilight },
+ { CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT, OMX_TI_WhiteBalControlWarmFluorescent },
+**************************************************************************************/
+#ifdef OMAP_ENHANCEMENT
+ { TICameraParameters::WHITE_BALANCE_TUNGSTEN, OMX_WhiteBalControlTungsten },
+ { TICameraParameters::WHITE_BALANCE_HORIZON, OMX_WhiteBalControlHorizon },
+ { TICameraParameters::WHITE_BALANCE_FACE, WB_FACE_PRIORITY },
+ { TICameraParameters::WHITE_BALANCE_SUNSET, OMX_TI_WhiteBalControlSunset }
+#endif
+};
+
+const userToOMX_LUT antibanding_UserToOMX [] = {
+ { CameraParameters::ANTIBANDING_OFF, OMX_FlickerCancelOff },
+ { CameraParameters::ANTIBANDING_AUTO, OMX_FlickerCancelAuto },
+ { CameraParameters::ANTIBANDING_50HZ, OMX_FlickerCancel50 },
+ { CameraParameters::ANTIBANDING_60HZ, OMX_FlickerCancel60 }
+};
+
+const userToOMX_LUT focus_UserToOMX [] = {
+ { CameraParameters::FOCUS_MODE_AUTO, OMX_IMAGE_FocusControlAutoLock },
+ { CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlAutoInfinity },
+ { CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlHyperfocal },
+ { CameraParameters::FOCUS_MODE_MACRO, OMX_IMAGE_FocusControlAutoMacro },
+ { CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, OMX_IMAGE_FocusControlAuto },
+ { CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, OMX_IMAGE_FocusControlAuto },
+#ifdef OMAP_ENHANCEMENT
+ { TICameraParameters::FOCUS_MODE_FACE , FOCUS_FACE_PRIORITY },
+ { TICameraParameters::FOCUS_MODE_PORTRAIT, OMX_IMAGE_FocusControlPortrait },
+ { TICameraParameters::FOCUS_MODE_EXTENDED, OMX_IMAGE_FocusControlExtended },
+#endif
+};
+
+const userToOMX_LUT exposure_UserToOMX [] = {
+ { TICameraParameters::EXPOSURE_MODE_OFF, OMX_ExposureControlOff },
+ { TICameraParameters::EXPOSURE_MODE_AUTO, OMX_ExposureControlAuto },
+ { TICameraParameters::EXPOSURE_MODE_NIGHT, OMX_ExposureControlNight },
+ { TICameraParameters::EXPOSURE_MODE_BACKLIGHT, OMX_ExposureControlBackLight },
+ { TICameraParameters::EXPOSURE_MODE_SPOTLIGHT, OMX_ExposureControlSpotLight},
+ { TICameraParameters::EXPOSURE_MODE_SPORTS, OMX_ExposureControlSports },
+ { TICameraParameters::EXPOSURE_MODE_SNOW, OMX_ExposureControlSnow },
+ { TICameraParameters::EXPOSURE_MODE_BEACH, OMX_ExposureControlBeach },
+ { TICameraParameters::EXPOSURE_MODE_APERTURE, OMX_ExposureControlLargeAperture },
+ { TICameraParameters::EXPOSURE_MODE_SMALL_APERTURE, OMX_ExposureControlSmallApperture },
+ { TICameraParameters::EXPOSURE_MODE_FACE, EXPOSURE_FACE_PRIORITY },
+};
+
+const userToOMX_LUT flash_UserToOMX [] = {
+ { CameraParameters::FLASH_MODE_OFF ,OMX_IMAGE_FlashControlOff },
+ { CameraParameters::FLASH_MODE_ON ,OMX_IMAGE_FlashControlOn },
+ { CameraParameters::FLASH_MODE_AUTO ,OMX_IMAGE_FlashControlAuto },
+ { CameraParameters::FLASH_MODE_TORCH ,OMX_IMAGE_FlashControlTorch },
+ { CameraParameters::FLASH_MODE_RED_EYE ,OMX_IMAGE_FlashControlRedEyeReduction },
+#ifdef OMAP_ENHANCEMENT
+ { TICameraParameters::FLASH_MODE_FILL_IN ,OMX_IMAGE_FlashControlFillin }
+#endif
+};
+
+const LUTtype ExpLUT =
+ {
+ sizeof(exposure_UserToOMX)/sizeof(exposure_UserToOMX[0]),
+ exposure_UserToOMX
+ };
+
+const LUTtype WBalLUT =
+ {
+ sizeof(whiteBal_UserToOMX)/sizeof(whiteBal_UserToOMX[0]),
+ whiteBal_UserToOMX
+ };
+
+const LUTtype FlickerLUT =
+ {
+ sizeof(antibanding_UserToOMX)/sizeof(antibanding_UserToOMX[0]),
+ antibanding_UserToOMX
+ };
+
+const LUTtype SceneLUT =
+ {
+ sizeof(scene_UserToOMX)/sizeof(scene_UserToOMX[0]),
+ scene_UserToOMX
+ };
+
+const LUTtype FlashLUT =
+ {
+ sizeof(flash_UserToOMX)/sizeof(flash_UserToOMX[0]),
+ flash_UserToOMX
+ };
+
+const LUTtype EffLUT =
+ {
+ sizeof(effects_UserToOMX)/sizeof(effects_UserToOMX[0]),
+ effects_UserToOMX
+ };
+
+const LUTtype FocusLUT =
+ {
+ sizeof(focus_UserToOMX)/sizeof(focus_UserToOMX[0]),
+ focus_UserToOMX
+ };
+
+const LUTtype IsoLUT =
+ {
+ sizeof(isoUserToOMX)/sizeof(isoUserToOMX[0]),
+ isoUserToOMX
+ };
+
+/*
+* class Gen3A_settings
+* stores the 3A settings
+* also defines the look up tables
+* for mapping settings from Hal to OMX
+*/
+class Gen3A_settings{
+ public:
+
+ int Exposure;
+ int WhiteBallance;
+ int Flicker;
+ int SceneMode;
+ int Effect;
+ int Focus;
+ int EVCompensation;
+ int Contrast;
+ int Saturation;
+ int Sharpness;
+ int ISO;
+ int FlashMode;
+
+ unsigned int Brightness;
+ OMX_BOOL ExposureLock;
+ OMX_BOOL FocusLock;
+ OMX_BOOL WhiteBalanceLock;
+};
+
+/*
+* Flags raised when a setting is changed
+*/
+enum E3ASettingsFlags
+{
+ SetSceneMode = 1 << 0,
+ SetEVCompensation = 1 << 1,
+ SetWhiteBallance = 1 << 2,
+ SetFlicker = 1 << 3,
+ SetExposure = 1 << 4,
+ SetSharpness = 1 << 5,
+ SetBrightness = 1 << 6,
+ SetContrast = 1 << 7,
+ SetISO = 1 << 8,
+ SetSaturation = 1 << 9,
+ SetEffect = 1 << 10,
+ SetFocus = 1 << 11,
+ SetExpMode = 1 << 14,
+ SetFlash = 1 << 15,
+ SetExpLock = 1 << 16,
+ SetWBLock = 1 << 17,
+ SetMeteringAreas = 1 << 18,
+
+ E3aSettingMax,
+ E3AsettingsAll = ( ((E3aSettingMax -1 ) << 1) -1 ) /// all possible flags raised
+};
+
+};
+
+#endif //GENERAL_3A_SETTINGS_H
diff --git a/inc/NV12_resize.h b/inc/NV12_resize.h
new file mode 100644
index 0000000..927faf8
--- a/dev/null
+++ b/inc/NV12_resize.h
@@ -0,0 +1,148 @@
+#ifndef NV12_RESIZE_H_
+#define NV12_RESIZE_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef unsigned char mmBool;
+typedef unsigned char mmUchar;
+typedef unsigned char mmUint8;
+typedef unsigned char mmByte;
+typedef unsigned short mmUint16;
+typedef unsigned int mmUint32;
+typedef unsigned long mmUint64;
+typedef signed char mmInt8;
+typedef char mmChar;
+typedef signed short mmInt16;
+typedef signed int mmInt32;
+typedef signed long mmLong;
+typedef signed int mmHandle;
+typedef float mmFloat;
+typedef double mmDouble;
+typedef int HObj;
+typedef HObj HFile;
+typedef int HDir;
+typedef void* mmMutexHandle;
+typedef struct _fstat
+{
+ mmInt32 fileSize;
+}VE_FileAttribute;
+
+typedef struct
+{
+ mmInt32 second;
+ mmInt32 millisecond;
+}tsVE_Time;
+
+typedef struct
+{
+ mmInt32 year;
+ mmInt32 month;
+ mmInt32 day;
+ mmInt32 hour;
+ mmInt32 minute;
+ mmInt32 second;
+} TmDateTime;
+
+/*----------------------------------------------------------------------------
+ Define : TRUE/FALSE for boolean operations
+----------------------------------------------------------------------------*/
+
+#ifndef TRUE
+ #define TRUE 1
+#endif
+
+#ifndef FALSE
+ #define FALSE 0
+#endif
+
+#ifndef NULL
+ #define NULL 0
+#endif
+
+const mmUint8 bWeights[8][8][4] = {
+ {{64, 0, 0, 0}, {56, 0, 0, 8}, {48, 0, 0,16}, {40, 0, 0,24},
+ {32, 0, 0,32}, {24, 0, 0,40}, {16, 0, 0,48}, { 8, 0, 0,56}},
+
+ {{56, 8, 0, 0}, {49, 7, 1, 7}, {42, 6, 2,14}, {35, 5, 3,21},
+ {28, 4, 4,28}, {21, 3, 5,35}, {14, 2, 6,42}, { 7, 1, 7,49}},
+
+ {{48,16, 0, 0}, {42,14, 2, 6}, {36,12,4 ,12}, {30,10,6 ,18},
+ {24, 8, 8,24}, {18, 6,10,30}, {12,4 ,12,36}, { 6, 2,14,42}},
+
+ {{40,24,0 ,0 }, {35,21, 3, 5}, {30,18, 6,10}, {25,15, 9,15},
+ {20,12,12,20}, {15, 9,15,25}, {10, 6,18,30}, { 5, 3,21,35}},
+
+ {{32,32, 0,0 }, {28,28, 4, 4}, {24,24, 8, 8}, {20,20,12,12},
+ {16,16,16,16}, {12,12,20,20}, { 8, 8,24,24}, { 4, 4,28,28}},
+
+ {{24,40,0 ,0 }, {21,35, 5, 3}, {18,30,10, 6}, {15,25,15, 9},
+ {12,20,20,12}, { 9,15,25,15}, { 6,10,30,18}, { 3, 5,35,21}},
+
+ {{16,48, 0,0 }, {14,42, 6, 2}, {12,36,12, 4}, {10,30,18, 6},
+ {8 ,24,24,8 }, { 6,18,30,10}, { 4,12,36,12}, { 2, 6,42,14}},
+
+ {{ 8,56, 0,0 }, { 7,49, 7, 1}, { 6,42,14, 2}, { 5,35,21, 3},
+ { 4,28,28,4 }, { 3,21,35, 5}, { 2,14,42, 6}, { 1,7 ,49, 7}}
+};
+
+typedef enum
+{
+ IC_FORMAT_NONE,
+ IC_FORMAT_RGB565,
+ IC_FORMAT_RGB888,
+ IC_FORMAT_YCbCr420_lp,
+ IC_FORMAT_YCbCr,
+ IC_FORMAT_YCbCr420_FRAME_PK,
+ IC_FORMAT_MAX
+}enumImageFormat;
+
+/* This structure defines the format of an image */
+typedef struct
+{
+ mmInt32 uWidth;
+ mmInt32 uHeight;
+ mmInt32 uStride;
+ enumImageFormat eFormat;
+ mmByte *imgPtr;
+ mmByte *clrPtr;
+ mmInt32 uOffset;
+} structConvImage;
+
+typedef struct IC_crop_struct
+{
+ mmUint32 x; /* x pos of rectangle */
+ mmUint32 y; /* y pos of rectangle */
+ mmUint32 uWidth; /* dx of rectangle */
+ mmUint32 uHeight; /* dy of rectangle */
+} IC_rect_type;
+
+/*==========================================================================
+* Function Name : VT_resizeFrame_Video_opt2_lp
+*
+* Description : Resize a yuv frame.
+*
+* Input(s) : input_img_ptr -> Input Image Structure
+* : output_img_ptr -> Output Image Structure
+* : cropout -> crop structure
+*
+* Value Returned : mmBool -> FALSE on error TRUE on success
+* NOTE:
+* Not tested for crop funtionallity.
+* faster version.
+============================================================================*/
+mmBool
+VT_resizeFrame_Video_opt2_lp
+(
+ structConvImage* i_img_ptr, /* Points to the input image */
+ structConvImage* o_img_ptr, /* Points to the output image */
+ IC_rect_type* cropout, /* how much to resize to in final image */
+ mmUint16 dummy /* Transparent pixel value */
+ );
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif //#define NV12_RESIZE_H_
diff --git a/inc/SensorListener.h b/inc/SensorListener.h
new file mode 100644
index 0000000..913eb95
--- a/dev/null
+++ b/inc/SensorListener.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+* @file SensorListener.h
+*
+* This defines API for camerahal to get sensor events
+*
+*/
+
+#ifndef ANDROID_CAMERA_HARDWARE_SENSOR_LISTENER_H
+#define ANDROID_CAMERA_HARDWARE_SENSOR_LISTENER_H
+
+#include <android/sensor.h>
+#include <gui/Sensor.h>
+#include <gui/SensorManager.h>
+#include <gui/SensorEventQueue.h>
+#include <utils/Looper.h>
+
+namespace android {
+
+/**
+ * SensorListner class - Registers with sensor manager to get sensor events
+ */
+
+typedef void (*orientation_callback_t) (uint32_t orientation, uint32_t tilt, void* cookie);
+
+class SensorLooperThread : public Thread {
+ public:
+ SensorLooperThread(Looper* looper)
+ : Thread(false) {
+ mLooper = sp<Looper>(looper);
+ }
+ ~SensorLooperThread() {
+ mLooper.clear();
+ }
+
+ virtual bool threadLoop() {
+ int32_t ret = mLooper->pollOnce(-1);
+ return true;
+ }
+
+ // force looper wake up
+ void wake() {
+ mLooper->wake();
+ }
+ private:
+ sp<Looper> mLooper;
+};
+
+
+class SensorListener : public RefBase
+{
+/* public - types */
+public:
+ typedef enum {
+ SENSOR_ACCELEROMETER = 1 << 0,
+ SENSOR_MAGNETIC_FIELD = 1 << 1,
+ SENSOR_GYROSCOPE = 1 << 2,
+ SENSOR_LIGHT = 1 << 3,
+ SENSOR_PROXIMITY = 1 << 4,
+ SENSOR_ORIENTATION = 1 << 5,
+ } sensor_type_t;
+/* public - functions */
+public:
+ SensorListener();
+ ~SensorListener();
+ status_t initialize();
+ void setCallbacks(orientation_callback_t orientation_cb, void *cookie);
+ void enableSensor(sensor_type_t type);
+ void disableSensor(sensor_type_t type);
+ void handleOrientation(uint32_t orientation, uint32_t tilt);
+/* public - member variables */
+public:
+ sp<SensorEventQueue> mSensorEventQueue;
+/* private - member variables */
+private:
+ int sensorsEnabled;
+ orientation_callback_t mOrientationCb;
+ void *mCbCookie;
+ sp<Looper> mLooper;
+ sp<SensorLooperThread> mSensorLooperThread;
+ Mutex mLock;
+};
+
+}
+
+#endif
diff --git a/inc/TICameraParameters.h b/inc/TICameraParameters.h
new file mode 100644
index 0000000..633e5b6
--- a/dev/null
+++ b/inc/TICameraParameters.h
@@ -0,0 +1,242 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+
+#ifndef TI_CAMERA_PARAMETERS_H
+#define TI_CAMERA_PARAMETERS_H
+
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+
+namespace android {
+
+///TI Specific Camera Parameters
+class TICameraParameters
+{
+public:
+
+// Supported Camera indexes
+// Example value: "0,1,2,3", where 0-primary, 1-secondary1, 2-secondary2, 3-sterocamera
+static const char KEY_SUPPORTED_CAMERAS[];
+// Select logical Camera index
+static const char KEY_CAMERA[];
+static const char KEY_CAMERA_NAME[];
+static const char KEY_S3D_SUPPORTED[];
+static const char KEY_BURST[];
+static const char KEY_CAP_MODE[];
+static const char KEY_VNF[];
+static const char KEY_SATURATION[];
+static const char KEY_BRIGHTNESS[];
+static const char KEY_EXPOSURE_MODE[];
+static const char KEY_SUPPORTED_EXPOSURE[];
+static const char KEY_CONTRAST[];
+static const char KEY_SHARPNESS[];
+static const char KEY_ISO[];
+static const char KEY_SUPPORTED_ISO_VALUES[];
+static const char KEY_SUPPORTED_IPP[];
+static const char KEY_IPP[];
+static const char KEY_MAN_EXPOSURE[];
+static const char KEY_METERING_MODE[];
+static const char KEY_PADDED_WIDTH[];
+static const char KEY_PADDED_HEIGHT[];
+static const char KEY_EXP_BRACKETING_RANGE[];
+static const char KEY_TEMP_BRACKETING[];
+static const char KEY_TEMP_BRACKETING_RANGE_POS[];
+static const char KEY_TEMP_BRACKETING_RANGE_NEG[];
+static const char KEY_SHUTTER_ENABLE[];
+static const char KEY_MEASUREMENT_ENABLE[];
+static const char KEY_INITIAL_VALUES[];
+static const char KEY_GBCE[];
+static const char KEY_GLBCE[];
+static const char KEY_MINFRAMERATE[];
+static const char KEY_MAXFRAMERATE[];
+
+// TI recording hint to notify camera adapters of possible recording
+static const char KEY_RECORDING_HINT[];
+static const char KEY_AUTO_FOCUS_LOCK[];
+static const char KEY_CURRENT_ISO[];
+
+static const char KEY_SENSOR_ORIENTATION[];
+static const char KEY_SENSOR_ORIENTATION_VALUES[];
+
+//TI extensions for zoom
+static const char ZOOM_SUPPORTED[];
+static const char ZOOM_UNSUPPORTED[];
+
+//TI extensions for camera capabilies
+static const char INITIAL_VALUES_TRUE[];
+static const char INITIAL_VALUES_FALSE[];
+
+//TI extensions for enabling/disabling measurements
+static const char MEASUREMENT_ENABLE[];
+static const char MEASUREMENT_DISABLE[];
+
+// TI extensions to add values for ManualConvergence and AutoConvergence mode
+static const char KEY_AUTOCONVERGENCE[];
+static const char KEY_AUTOCONVERGENCE_MODE[];
+static const char KEY_MANUALCONVERGENCE_VALUES[];
+
+//TI extensions for enabling/disabling GLBCE
+static const char GLBCE_ENABLE[];
+static const char GLBCE_DISABLE[];
+
+//TI extensions for enabling/disabling GBCE
+static const char GBCE_ENABLE[];
+static const char GBCE_DISABLE[];
+
+// TI extensions to add Min frame rate Values
+static const char VIDEO_MINFRAMERATE_5[];
+static const char VIDEO_MINFRAMERATE_10[];
+static const char VIDEO_MINFRAMERATE_15[];
+static const char VIDEO_MINFRAMERATE_20[];
+static const char VIDEO_MINFRAMERATE_24[];
+static const char VIDEO_MINFRAMERATE_25[];
+static const char VIDEO_MINFRAMERATE_30[];
+static const char VIDEO_MINFRAMERATE_33[];
+
+// TI extensions for Manual Gain and Manual Exposure
+static const char KEY_MANUAL_EXPOSURE_LEFT[];
+static const char KEY_MANUAL_EXPOSURE_RIGHT[];
+static const char KEY_MANUAL_EXPOSURE_MODES[];
+static const char KEY_MANUAL_GAIN_EV_RIGHT[];
+static const char KEY_MANUAL_GAIN_EV_LEFT[];
+static const char KEY_MANUAL_GAIN_ISO_RIGHT[];
+static const char KEY_MANUAL_GAIN_ISO_LEFT[];
+static const char KEY_MANUAL_GAIN_MODES[];
+
+//TI extensions for setting EXIF tags
+static const char KEY_EXIF_MODEL[];
+static const char KEY_EXIF_MAKE[];
+
+//TI extensions for additional GPS data
+static const char KEY_GPS_MAPDATUM[];
+static const char KEY_GPS_VERSION[];
+static const char KEY_GPS_DATESTAMP[];
+
+//TI extensions for enabling/disabling shutter sound
+static const char SHUTTER_ENABLE[];
+static const char SHUTTER_DISABLE[];
+
+//TI extensions for Temporal bracketing
+static const char BRACKET_ENABLE[];
+static const char BRACKET_DISABLE[];
+
+//TI extensions to Image post-processing
+static const char IPP_LDCNSF[];
+static const char IPP_LDC[];
+static const char IPP_NSF[];
+static const char IPP_NONE[];
+
+//TI extensions to camera mode
+static const char HIGH_PERFORMANCE_MODE[];
+static const char HIGH_QUALITY_MODE[];
+static const char HIGH_QUALITY_ZSL_MODE[];
+static const char VIDEO_MODE[];
+
+
+// TI extensions to standard android pixel formats
+static const char PIXEL_FORMAT_RAW[];
+static const char PIXEL_FORMAT_JPS[];
+static const char PIXEL_FORMAT_MPO[];
+static const char PIXEL_FORMAT_RAW_JPEG[];
+static const char PIXEL_FORMAT_RAW_MPO[];
+
+// TI extensions to standard android scene mode settings
+static const char SCENE_MODE_SPORT[];
+static const char SCENE_MODE_CLOSEUP[];
+static const char SCENE_MODE_AQUA[];
+static const char SCENE_MODE_SNOWBEACH[];
+static const char SCENE_MODE_MOOD[];
+static const char SCENE_MODE_NIGHT_INDOOR[];
+static const char SCENE_MODE_DOCUMENT[];
+static const char SCENE_MODE_BARCODE[];
+static const char SCENE_MODE_VIDEO_SUPER_NIGHT[];
+static const char SCENE_MODE_VIDEO_CINE[];
+static const char SCENE_MODE_VIDEO_OLD_FILM[];
+
+// TI extensions to standard android white balance settings.
+static const char WHITE_BALANCE_TUNGSTEN[];
+static const char WHITE_BALANCE_HORIZON[];
+static const char WHITE_BALANCE_SUNSET[];
+static const char WHITE_BALANCE_FACE[];
+
+// TI extensions to add exposure preset modes to android api
+static const char EXPOSURE_MODE_OFF[];
+static const char EXPOSURE_MODE_AUTO[];
+static const char EXPOSURE_MODE_NIGHT[];
+static const char EXPOSURE_MODE_BACKLIGHT[];
+static const char EXPOSURE_MODE_SPOTLIGHT[];
+static const char EXPOSURE_MODE_SPORTS[];
+static const char EXPOSURE_MODE_SNOW[];
+static const char EXPOSURE_MODE_BEACH[];
+static const char EXPOSURE_MODE_APERTURE[];
+static const char EXPOSURE_MODE_SMALL_APERTURE[];
+static const char EXPOSURE_MODE_FACE[];
+
+// TI extensions to standard android focus presets.
+static const char FOCUS_MODE_PORTRAIT[];
+static const char FOCUS_MODE_EXTENDED[];
+static const char FOCUS_MODE_FACE[];
+
+// TI extensions to add iso values
+static const char ISO_MODE_AUTO[];
+static const char ISO_MODE_100[];
+static const char ISO_MODE_200[];
+static const char ISO_MODE_400[];
+static const char ISO_MODE_800[];
+static const char ISO_MODE_1000[];
+static const char ISO_MODE_1200[];
+static const char ISO_MODE_1600[];
+
+// TI extensions to add values for effect settings.
+static const char EFFECT_NATURAL[];
+static const char EFFECT_VIVID[];
+static const char EFFECT_COLOR_SWAP[];
+static const char EFFECT_BLACKWHITE[];
+
+static const char KEY_S3D2D_PREVIEW[];
+static const char KEY_S3D2D_PREVIEW_MODE[];
+
+// TI extensions to add values for AutoConvergence settings.
+static const char AUTOCONVERGENCE_MODE_DISABLE[];
+static const char AUTOCONVERGENCE_MODE_FRAME[];
+static const char AUTOCONVERGENCE_MODE_CENTER[];
+static const char AUTOCONVERGENCE_MODE_FFT[];
+static const char AUTOCONVERGENCE_MODE_MANUAL[];
+
+
+//TI extensions for flash mode settings
+static const char FLASH_MODE_FILL_IN[];
+
+//TI extensions to add sensor orientation parameters
+static const char ORIENTATION_SENSOR_NONE[];
+static const char ORIENTATION_SENSOR_90[];
+static const char ORIENTATION_SENSOR_180[];
+static const char ORIENTATION_SENSOR_270[];
+
+
+//TI values for camera direction
+static const char FACING_FRONT[];
+static const char FACING_BACK[];
+
+};
+
+};
+
+#endif
+
diff --git a/inc/V4LCameraAdapter/V4LCameraAdapter.h b/inc/V4LCameraAdapter/V4LCameraAdapter.h
new file mode 100644
index 0000000..5e06074
--- a/dev/null
+++ b/inc/V4LCameraAdapter/V4LCameraAdapter.h
@@ -0,0 +1,160 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef V4L_CAMERA_ADAPTER_H
+#define V4L_CAMERA_ADAPTER_H
+
+#include "CameraHal.h"
+#include "BaseCameraAdapter.h"
+#include "DebugUtils.h"
+
+namespace android {
+
+//#define DEFAULT_PIXEL_FORMAT V4L2_PIX_FMT_YUYV
+#define DEFAULT_PIXEL_FORMAT V4L2_PIX_FMT_NV21
+#define NB_BUFFER 6
+#define DEVICE "/dev/video0"
+
+
+struct VideoInfo {
+ struct v4l2_capability cap;
+ struct v4l2_format format;
+ struct v4l2_buffer buf;
+ struct v4l2_requestbuffers rb;
+ void *mem[NB_BUFFER];
+ bool isStreaming;
+ int width;
+ int height;
+ int formatIn;
+ int framesizeIn;
+};
+
+
+/**
+ * Class which completely abstracts the camera hardware interaction from camera hal
+ * TODO: Need to list down here, all the message types that will be supported by this class
+ Need to implement BufferProvider interface to use AllocateBuffer of OMX if needed
+ */
+class V4LCameraAdapter : public BaseCameraAdapter
+{
+public:
+
+ /*--------------------Constant declarations----------------------------------------*/
+ static const int32_t MAX_NO_BUFFERS = 20;
+
+ ///@remarks OMX Camera has six ports - buffer input, time input, preview, image, video, and meta data
+ static const int MAX_NO_PORTS = 6;
+
+ ///Five second timeout
+ static const int CAMERA_ADAPTER_TIMEOUT = 5000*1000;
+
+public:
+
+ V4LCameraAdapter(size_t sensor_index);
+ ~V4LCameraAdapter();
+
+
+ ///Initialzes the camera adapter creates any resources required
+ virtual status_t initialize(CameraProperties::Properties*);
+ //virtual status_t initialize(CameraProperties::Properties*, int sensor_index=0);
+
+ //APIs to configure Camera adapter and get the current parameter set
+ virtual status_t setParameters(const CameraParameters& params);
+ virtual void getParameters(CameraParameters& params);
+
+ // API
+ virtual status_t UseBuffersPreview(void* bufArr, int num);
+
+ //API to flush the buffers for preview
+ status_t flushBuffers();
+
+protected:
+
+//----------Parent class method implementation------------------------------------
+ virtual status_t startPreview();
+ virtual status_t stopPreview();
+ virtual status_t useBuffers(CameraMode mode, void* bufArr, int num, size_t length, unsigned int queueable);
+ virtual status_t fillThisBuffer(void* frameBuf, CameraFrame::FrameType frameType);
+ virtual status_t getFrameSize(size_t &width, size_t &height);
+ virtual status_t getPictureBufferSize(size_t &length, size_t bufferCount);
+ virtual status_t getFrameDataSize(size_t &dataFrameSize, size_t bufferCount);
+ virtual void onOrientationEvent(uint32_t orientation, uint32_t tilt);
+//-----------------------------------------------------------------------------
+
+
+private:
+
+ class PreviewThread : public Thread {
+ V4LCameraAdapter* mAdapter;
+ public:
+ PreviewThread(V4LCameraAdapter* hw) :
+ Thread(false), mAdapter(hw) { }
+ virtual void onFirstRef() {
+ run("CameraPreviewThread", PRIORITY_URGENT_DISPLAY);
+ }
+ virtual bool threadLoop() {
+ mAdapter->previewThread();
+ // loop until we need to quit
+ return true;
+ }
+ };
+
+ //Used for calculation of the average frame rate during preview
+ status_t recalculateFPS();
+
+ char * GetFrame(int &index);
+
+ int previewThread();
+
+public:
+
+private:
+ int mPreviewBufferCount;
+ KeyedVector<int, int> mPreviewBufs;
+ mutable Mutex mPreviewBufsLock;
+
+ CameraParameters mParams;
+
+ bool mPreviewing;
+ bool mCapturing;
+ Mutex mLock;
+
+ int mFrameCount;
+ int mLastFrameCount;
+ unsigned int mIter;
+ nsecs_t mLastFPSTime;
+
+ //variables holding the estimated framerate
+ float mFPS, mLastFPS;
+
+ int mSensorIndex;
+
+ // protected by mLock
+ sp<PreviewThread> mPreviewThread;
+
+ struct VideoInfo *mVideoInfo;
+ int mCameraHandle;
+
+
+ int nQueued;
+ int nDequeued;
+
+};
+}; //// namespace
+#endif //V4L_CAMERA_ADAPTER_H
+
diff --git a/inc/VideoMetadata.h b/inc/VideoMetadata.h
new file mode 100644
index 0000000..f05ee50
--- a/dev/null
+++ b/inc/VideoMetadata.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_METADATA_H
+#define VIDEO_METADATA_H
+
+/* This structure is used to pass buffer offset from Camera-Hal to Encoder component
+ * for specific algorithms like VSTAB & VNF
+ */
+
+typedef struct
+{
+ int metadataBufferType;
+ void* handle;
+ int offset;
+}
+video_metadata_t;
+
+#endif
diff --git a/jpegenc/amljpeg_enc.c b/jpegenc/amljpeg_enc.c
deleted file mode 100755
index f8a1e2c..0000000
--- a/jpegenc/amljpeg_enc.c
+++ b/dev/null
@@ -1,240 +0,0 @@
-/****************************************************
-*
-* api to encoder rgb data into jpeg format.
-* 01/27/2011.
-* kasin.li@amlogic.com
-*
-*****************************************************/
-#include"amljpeg_enc.h"
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <sys/ioctl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <string.h>
-#include <errno.h>
-#include "jpeglib.h"
-#include "jconfig.h"
-#include <setjmp.h>
-#include "amljpeg_enc.h"
-#include <cutils/log.h>
-
-#include <tjutil.h>
-#include <turbojpeg.h>
-
-typedef struct {
- struct jpeg_destination_mgr pub; /* public fields */
-
- char* obuf;
- int buf_size;
- int data_size;
-} aml_destination_mgr;
-
-typedef aml_destination_mgr * aml_dest_ptr;
-
-static void init_destination (j_compress_ptr cinfo)
-{
- aml_dest_ptr dest = (aml_dest_ptr) cinfo->dest;
-
- dest->pub.next_output_byte = dest->obuf;
- dest->pub.free_in_buffer = dest->buf_size;
-}
-
-static boolean empty_output_buffer (j_compress_ptr cinfo)
-{
- aml_dest_ptr dest = (aml_dest_ptr) cinfo->dest;
-
- /* error handling, input buffer too small. */
- /* maybe you need not use jpeg encoder. */
-
- return FALSE;
-}
-
-static void term_destination (j_compress_ptr cinfo)
-{
- aml_dest_ptr dest = (aml_dest_ptr) cinfo->dest;
- size_t datacount = dest->buf_size - dest->pub.free_in_buffer;
-
- dest->data_size = datacount;
-}
-
-int encode_jpeg(jpeg_enc_t* enc)
-{
-
- /* create compress. information data. */
- struct jpeg_compress_struct cinfo;
- struct jpeg_error_mgr jerr;
- aml_destination_mgr dest;
-
- JSAMPROW row_pointer[1]; /* pointer to JSAMPLE row[s] */
- int row_stride; /* physical row width in image buffer */
- cinfo.err = jpeg_std_error(&jerr);
- jpeg_create_compress(&cinfo);
- //jpeg_stdio_dest(&cinfo, outfile);
- cinfo.dest=(struct jpeg_destination_mgr *)&dest;
- dest.pub.init_destination = init_destination;
- dest.pub.empty_output_buffer = empty_output_buffer;
- dest.pub.term_destination = term_destination;
-
- dest.obuf = enc->odata;
- dest.buf_size = enc->obuff_size;
-
- /* set parameters. */
- cinfo.image_width = enc->width;
- cinfo.image_height = enc->height;
- cinfo.input_components = 3;
- cinfo.in_color_space = JCS_RGB;
- jpeg_set_defaults(&cinfo);
- if(enc->quality==0)
- enc->quality=75;
- jpeg_set_quality(&cinfo, enc->quality, TRUE);
-
- jpeg_start_compress(&cinfo, TRUE);
-
- if(enc->data_in_app1)
- jpeg_write_marker(&cinfo,0xe1,enc->data_in_app1,enc->app1_data_size);
-
- row_stride = enc->width * 3;
-
- /* processing. */
- while (cinfo.next_scanline < cinfo.image_height) {
- row_pointer[0] = & enc->idata[cinfo.next_scanline * row_stride];
- (void) jpeg_write_scanlines(&cinfo, row_pointer, 1);
- }
-
- jpeg_finish_compress(&cinfo);
- jpeg_destroy_compress(&cinfo);
- return dest.data_size;
-}
-
-#define _throwtj() {LOGD("TurboJPEG ERROR:\n%s\n", tjGetErrorStr()); \
- enc->obuff_size=0;goto exit;}
-#define _tj(f) {if((f)==-1) _throwtj();}
-#define _throw(m) {LOGD("ERROR: %s\n", m); enc->obuff_size=0; goto exit;}
-
-int encode_jpeg2(jpeg_enc_t* enc)
-{
- tjhandle handle=NULL;
-
- if(enc->quality==0)
- enc->quality=75;
-
- if((handle=tjInitCompress())==NULL) _throwtj();
- _tj(tjCompressRGB2YUV420WithMark(handle, enc->idata, enc->width, enc->height, &enc->odata,
- &enc->obuff_size, enc->quality, enc->data_in_app1,enc->app1_data_size));
-
-exit:
- if(handle) tjDestroy(handle);
- return enc->obuff_size;
-}
-
-/* example. */
-#ifdef ENC_TEST
-
-#define iwidth 3000
-#define iheight 2000
-#define buf_size (iwidth*iheight)
-
-char jpeg_dat[buf_size*3];
-char jpeg_out[buf_size*3];
-int main() {
- FILE* fp;
- int data_size,i;
- jpeg_enc_t enc;
-
- struct timeval start;
- struct timeval end;
- float time_use=0;
-
- memset(&enc,0,sizeof(jpeg_enc_t));
-
- enc.width=iwidth;
- enc.height=iheight;
- enc.quality=75;
- enc.idata = jpeg_dat;
- enc.ibuff_size = buf_size*3;
- enc.odata = jpeg_dat;
- enc.obuff_size = buf_size*3;
-
- fp=fopen("test1.jpeg","wb");
- if(!fp) {
- LOGD(" open error\n");
- exit(1);
- }
- for(i=0;i<buf_size;i+=3) {
- jpeg_dat[i]=0;
- jpeg_dat[i+1]=0;
- jpeg_dat[i+2]=0xff;
- }
- for(i=buf_size;i<buf_size*2;i+=3) {
- jpeg_dat[i]=0;
- jpeg_dat[i+1]=0xff;
- jpeg_dat[i+2]=0;
- }
- for(i=buf_size*2;i<buf_size*3;i+=3) {
- jpeg_dat[i]=0xff;
- jpeg_dat[i+1]=0;
- jpeg_dat[i+2]=0;
- }
- gettimeofday(&start,NULL);
- for(i=0;i<20;i++) {
- enc.obuff_size = buf_size*3;
- data_size=encode_jpeg(&enc);
- }
- gettimeofday(&end,NULL);
- time_use=(end.tv_sec-start.tv_sec)*1000000+(end.tv_usec-start.tv_usec);
- LOGD("time_use in libjpeg2 %f\n",time_use/20);
- fwrite(jpeg_dat,1,data_size,fp);
- fclose(fp);
-
-/*******************************************/
- LOGD("start decoder2\n");
- memset(&enc,0,sizeof(jpeg_enc_t));
- enc.width=iwidth;
- enc.height=iheight;
- enc.quality=75;
- enc.idata = jpeg_dat;
- enc.ibuff_size = buf_size*3;
- enc.odata = jpeg_out;
- enc.obuff_size = buf_size*3;
-
- fp=fopen("test2.jpeg","wb");
- if(!fp) {
- LOGD(" open error\n");
- exit(1);
- }
- for(i=0;i<buf_size;i+=3) {
- jpeg_dat[i]=0;
- jpeg_dat[i+1]=0;
- jpeg_dat[i+2]=0xff;
- }
- for(i=buf_size;i<buf_size*2;i+=3) {
- jpeg_dat[i]=0;
- jpeg_dat[i+1]=0xff;
- jpeg_dat[i+2]=0;
- }
- for(i=buf_size*2;i<buf_size*3;i+=3) {
- jpeg_dat[i]=0xff;
- jpeg_dat[i+1]=0;
- jpeg_dat[i+2]=0;
- }
- gettimeofday(&start,NULL);
- for(i=0;i<20;i++) {
- enc.obuff_size = buf_size*3;
- data_size=encode_jpeg2(&enc);
- }
- gettimeofday(&end,NULL);
- time_use=(end.tv_sec-start.tv_sec)*1000000+(end.tv_usec-start.tv_usec);
- LOGD("time_use in turbojpeg2 %f\n",time_use/20);
- if(data_size)
- fwrite(jpeg_out,1,data_size,fp);
- else
- LOGD("fucking error\n");
- fclose(fp);
-
- return 0;
-}
-#endif
diff --git a/jpegenc/amljpeg_enc.h b/jpegenc/amljpeg_enc.h
deleted file mode 100755
index 23a9bf3..0000000
--- a/jpegenc/amljpeg_enc.h
+++ b/dev/null
@@ -1,27 +0,0 @@
-#ifndef AM_JPEG_ENCODER_9908049
-#define AM_JPEG_ENCODER_9908049
-
-typedef struct {
- int width;
- int height;
- int quality;
-
- unsigned char* idata;
- unsigned char* odata;
- unsigned char* data_in_app1;
- int app1_data_size;
- int ibuff_size;
- int obuff_size;
-} jpeg_enc_t;
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-int encode_jpeg(jpeg_enc_t* enc);
-int encode_jpeg2(jpeg_enc_t* enc);
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* AM_JPEG_ENCODER_9908049 */
diff --git a/jpegenc/jconfig.h b/jpegenc/jconfig.h
deleted file mode 100755
index 15a9817..0000000
--- a/jpegenc/jconfig.h
+++ b/dev/null
@@ -1,156 +0,0 @@
-/* android jconfig.h */
-/*
- * jconfig.doc
- *
- * Copyright (C) 1991-1994, Thomas G. Lane.
- * This file is part of the Independent JPEG Group's software.
- * For conditions of distribution and use, see the accompanying README file.
- *
- * This file documents the configuration options that are required to
- * customize the JPEG software for a particular system.
- *
- * The actual configuration options for a particular installation are stored
- * in jconfig.h. On many machines, jconfig.h can be generated automatically
- * or copied from one of the "canned" jconfig files that we supply. But if
- * you need to generate a jconfig.h file by hand, this file tells you how.
- *
- * DO NOT EDIT THIS FILE --- IT WON'T ACCOMPLISH ANYTHING.
- * EDIT A COPY NAMED JCONFIG.H.
- */
-
-
-/*
- * These symbols indicate the properties of your machine or compiler.
- * #define the symbol if yes, #undef it if no.
- */
-
-/* Does your compiler support function prototypes?
- * (If not, you also need to use ansi2knr, see install.doc)
- */
-#define HAVE_PROTOTYPES
-
-/* Does your compiler support the declaration "unsigned char" ?
- * How about "unsigned short" ?
- */
-#define HAVE_UNSIGNED_CHAR
-#define HAVE_UNSIGNED_SHORT
-
-/* Define "void" as "char" if your compiler doesn't know about type void.
- * NOTE: be sure to define void such that "void *" represents the most general
- * pointer type, e.g., that returned by malloc().
- */
-/* #define void char */
-
-/* Define "const" as empty if your compiler doesn't know the "const" keyword.
- */
-/* #define const */
-
-/* Define this if an ordinary "char" type is unsigned.
- * If you're not sure, leaving it undefined will work at some cost in speed.
- * If you defined HAVE_UNSIGNED_CHAR then the speed difference is minimal.
- */
-#undef CHAR_IS_UNSIGNED
-
-/* Define this if your system has an ANSI-conforming <stddef.h> file.
- */
-#define HAVE_STDDEF_H
-
-/* Define this if your system has an ANSI-conforming <stdlib.h> file.
- */
-#define HAVE_STDLIB_H
-
-/* Define this if your system does not have an ANSI/SysV <string.h>,
- * but does have a BSD-style <strings.h>.
- */
-#undef NEED_BSD_STRINGS
-
-/* Define this if your system does not provide typedef size_t in any of the
- * ANSI-standard places (stddef.h, stdlib.h, or stdio.h), but places it in
- * <sys/types.h> instead.
- */
-#undef NEED_SYS_TYPES_H
-
-/* For 80x86 machines, you need to define NEED_FAR_POINTERS,
- * unless you are using a large-data memory model or 80386 flat-memory mode.
- * On less brain-damaged CPUs this symbol must not be defined.
- * (Defining this symbol causes large data structures to be referenced through
- * "far" pointers and to be allocated with a special version of malloc.)
- */
-#undef NEED_FAR_POINTERS
-
-/* Define this if your linker needs global names to be unique in less
- * than the first 15 characters.
- */
-#undef NEED_SHORT_EXTERNAL_NAMES
-
-/* Although a real ANSI C compiler can deal perfectly well with pointers to
- * unspecified structures (see "incomplete types" in the spec), a few pre-ANSI
- * and pseudo-ANSI compilers get confused. To keep one of these bozos happy,
- * define INCOMPLETE_TYPES_BROKEN. This is not recommended unless you
- * actually get "missing structure definition" warnings or errors while
- * compiling the JPEG code.
- */
-#undef INCOMPLETE_TYPES_BROKEN
-
-
-/*
- * The following options affect code selection within the JPEG library,
- * but they don't need to be visible to applications using the library.
- * To minimize application namespace pollution, the symbols won't be
- * defined unless JPEG_INTERNALS has been defined.
- */
-
-#ifdef JPEG_INTERNALS
-
-/* Define this if your compiler implements ">>" on signed values as a logical
- * (unsigned) shift; leave it undefined if ">>" is a signed (arithmetic) shift,
- * which is the normal and rational definition.
- */
-#undef RIGHT_SHIFT_IS_UNSIGNED
-
-
-#endif /* JPEG_INTERNALS */
-
-
-/*
- * The remaining options do not affect the JPEG library proper,
- * but only the sample applications cjpeg/djpeg (see cjpeg.c, djpeg.c).
- * Other applications can ignore these.
- */
-
-#ifdef JPEG_CJPEG_DJPEG
-
-/* These defines indicate which image (non-JPEG) file formats are allowed. */
-
-#define BMP_SUPPORTED /* BMP image file format */
-#define GIF_SUPPORTED /* GIF image file format */
-#define PPM_SUPPORTED /* PBMPLUS PPM/PGM image file format */
-#undef RLE_SUPPORTED /* Utah RLE image file format */
-#define TARGA_SUPPORTED /* Targa image file format */
-
-/* Define this if you want to name both input and output files on the command
- * line, rather than using stdout and optionally stdin. You MUST do this if
- * your system can't cope with binary I/O to stdin/stdout. See comments at
- * head of cjpeg.c or djpeg.c.
- */
-#undef TWO_FILE_COMMANDLINE
-
-/* Define this if your system needs explicit cleanup of temporary files.
- * This is crucial under MS-DOS, where the temporary "files" may be areas
- * of extended memory; on most other systems it's not as important.
- */
-#undef NEED_SIGNAL_CATCHER
-
-/* By default, we open image files with fopen(...,"rb") or fopen(...,"wb").
- * This is necessary on systems that distinguish text files from binary files,
- * and is harmless on most systems that don't. If you have one of the rare
- * systems that complains about the "b" spec, define this symbol.
- */
-#undef DONT_USE_B_MODE
-
-/* Define this if you want percent-done progress reports from cjpeg/djpeg.
- */
-#undef PROGRESS_REPORT
-
-
-#endif /* JPEG_CJPEG_DJPEG */
diff --git a/jpegenc/jmorecfg.h b/jpegenc/jmorecfg.h
deleted file mode 100755
index 236bbcb..0000000
--- a/jpegenc/jmorecfg.h
+++ b/dev/null
@@ -1,387 +0,0 @@
-/*
- * jmorecfg.h
- *
- * Copyright (C) 1991-1997, Thomas G. Lane.
- * This file is part of the Independent JPEG Group's software.
- * For conditions of distribution and use, see the accompanying README file.
- *
- * This file contains additional configuration options that customize the
- * JPEG software for special applications or support machine-dependent
- * optimizations. Most users will not need to touch this file.
- */
-
-/*
- * Define ANDROID_RGB to enable specific optimizations for Android
- * JCS_RGBA_8888 support
- * JCS_RGB_565 support
- *
- */
-
-#define ANDROID_RGB
-
-#ifdef ANDROID_RGB
-#define PACK_SHORT_565(r,g,b) ((((r)<<8)&0xf800)|(((g)<<3)&0x7E0)|((b)>>3))
-#define PACK_TWO_PIXELS(l,r) ((r<<16) | l)
-#define PACK_NEED_ALIGNMENT(ptr) (((int)(ptr))&3)
-#define WRITE_TWO_PIXELS(addr, pixels) do { \
- ((INT16*)(addr))[0] = (pixels); \
- ((INT16*)(addr))[1] = (pixels)>>16; \
- } while(0)
-#define WRITE_TWO_ALIGNED_PIXELS(addr, pixels) ((*(INT32*)(addr)) = pixels)
-#define DITHER_565_R(r, dither) ((r) + ((dither)&0xFF))
-#define DITHER_565_G(g, dither) ((g) + (((dither)&0xFF)>>1))
-#define DITHER_565_B(b, dither) ((b) + ((dither)&0xFF))
-#endif
-
-/*
- * Define BITS_IN_JSAMPLE as either
- * 8 for 8-bit sample values (the usual setting)
- * 12 for 12-bit sample values
- * Only 8 and 12 are legal data precisions for lossy JPEG according to the
- * JPEG standard, and the IJG code does not support anything else!
- * We do not support run-time selection of data precision, sorry.
- */
-
-#define BITS_IN_JSAMPLE 8 /* use 8 or 12 */
-
-
-/*
- * Maximum number of components (color channels) allowed in JPEG image.
- * To meet the letter of the JPEG spec, set this to 255. However, darn
- * few applications need more than 4 channels (maybe 5 for CMYK + alpha
- * mask). We recommend 10 as a reasonable compromise; use 4 if you are
- * really short on memory. (Each allowed component costs a hundred or so
- * bytes of storage, whether actually used in an image or not.)
- */
-
-#define MAX_COMPONENTS 10 /* maximum number of image components */
-
-
-/*
- * Basic data types.
- * You may need to change these if you have a machine with unusual data
- * type sizes; for example, "char" not 8 bits, "short" not 16 bits,
- * or "long" not 32 bits. We don't care whether "int" is 16 or 32 bits,
- * but it had better be at least 16.
- */
-
-/* Representation of a single sample (pixel element value).
- * We frequently allocate large arrays of these, so it's important to keep
- * them small. But if you have memory to burn and access to char or short
- * arrays is very slow on your hardware, you might want to change these.
- */
-
-#if BITS_IN_JSAMPLE == 8
-/* JSAMPLE should be the smallest type that will hold the values 0..255.
- * You can use a signed char by having GETJSAMPLE mask it with 0xFF.
- */
-
-#ifdef HAVE_UNSIGNED_CHAR
-
-typedef unsigned char JSAMPLE;
-#define GETJSAMPLE(value) ((int) (value))
-
-#else /* not HAVE_UNSIGNED_CHAR */
-
-typedef char JSAMPLE;
-#ifdef CHAR_IS_UNSIGNED
-#define GETJSAMPLE(value) ((int) (value))
-#else
-#define GETJSAMPLE(value) ((int) (value) & 0xFF)
-#endif /* CHAR_IS_UNSIGNED */
-
-#endif /* HAVE_UNSIGNED_CHAR */
-
-#define MAXJSAMPLE 255
-#define CENTERJSAMPLE 128
-
-#endif /* BITS_IN_JSAMPLE == 8 */
-
-
-#if BITS_IN_JSAMPLE == 12
-/* JSAMPLE should be the smallest type that will hold the values 0..4095.
- * On nearly all machines "short" will do nicely.
- */
-
-typedef short JSAMPLE;
-#define GETJSAMPLE(value) ((int) (value))
-
-#define MAXJSAMPLE 4095
-#define CENTERJSAMPLE 2048
-
-#endif /* BITS_IN_JSAMPLE == 12 */
-
-
-/* Representation of a DCT frequency coefficient.
- * This should be a signed value of at least 16 bits; "short" is usually OK.
- * Again, we allocate large arrays of these, but you can change to int
- * if you have memory to burn and "short" is really slow.
- */
-
-typedef short JCOEF;
-
-
-/* Compressed datastreams are represented as arrays of JOCTET.
- * These must be EXACTLY 8 bits wide, at least once they are written to
- * external storage. Note that when using the stdio data source/destination
- * managers, this is also the data type passed to fread/fwrite.
- */
-
-#ifdef HAVE_UNSIGNED_CHAR
-
-typedef unsigned char JOCTET;
-#define GETJOCTET(value) (value)
-
-#else /* not HAVE_UNSIGNED_CHAR */
-
-typedef char JOCTET;
-#ifdef CHAR_IS_UNSIGNED
-#define GETJOCTET(value) (value)
-#else
-#define GETJOCTET(value) ((value) & 0xFF)
-#endif /* CHAR_IS_UNSIGNED */
-
-#endif /* HAVE_UNSIGNED_CHAR */
-
-
-/* These typedefs are used for various table entries and so forth.
- * They must be at least as wide as specified; but making them too big
- * won't cost a huge amount of memory, so we don't provide special
- * extraction code like we did for JSAMPLE. (In other words, these
- * typedefs live at a different point on the speed/space tradeoff curve.)
- */
-
-/* UINT8 must hold at least the values 0..255. */
-
-#ifdef HAVE_UNSIGNED_CHAR
-typedef unsigned char UINT8;
-#else /* not HAVE_UNSIGNED_CHAR */
-#ifdef CHAR_IS_UNSIGNED
-typedef char UINT8;
-#else /* not CHAR_IS_UNSIGNED */
-typedef short UINT8;
-#endif /* CHAR_IS_UNSIGNED */
-#endif /* HAVE_UNSIGNED_CHAR */
-
-/* UINT16 must hold at least the values 0..65535. */
-
-#ifdef HAVE_UNSIGNED_SHORT
-typedef unsigned short UINT16;
-#else /* not HAVE_UNSIGNED_SHORT */
-typedef unsigned int UINT16;
-#endif /* HAVE_UNSIGNED_SHORT */
-
-/* INT16 must hold at least the values -32768..32767. */
-
-#ifndef XMD_H /* X11/xmd.h correctly defines INT16 */
-typedef short INT16;
-#endif
-
-/* INT32 must hold at least signed 32-bit values. */
-
-#ifndef XMD_H /* X11/xmd.h correctly defines INT32 */
-typedef long INT32;
-#endif
-
-/* Datatype used for image dimensions. The JPEG standard only supports
- * images up to 64K*64K due to 16-bit fields in SOF markers. Therefore
- * "unsigned int" is sufficient on all machines. However, if you need to
- * handle larger images and you don't mind deviating from the spec, you
- * can change this datatype.
- */
-
-typedef unsigned int JDIMENSION;
-
-#define JPEG_MAX_DIMENSION 65500L /* a tad under 64K to prevent overflows */
-
-
-/* These macros are used in all function definitions and extern declarations.
- * You could modify them if you need to change function linkage conventions;
- * in particular, you'll need to do that to make the library a Windows DLL.
- * Another application is to make all functions global for use with debuggers
- * or code profilers that require it.
- */
-
-/* a function called through method pointers: */
-#define METHODDEF(type) static type
-/* a function used only in its module: */
-#define LOCAL(type) static type
-/* a function referenced thru EXTERNs: */
-#define GLOBAL(type) type
-/* a reference to a GLOBAL function: */
-#define EXTERN(type) extern type
-
-
-/* This macro is used to declare a "method", that is, a function pointer.
- * We want to supply prototype parameters if the compiler can cope.
- * Note that the arglist parameter must be parenthesized!
- * Again, you can customize this if you need special linkage keywords.
- */
-
-#ifdef HAVE_PROTOTYPES
-#define JMETHOD(type,methodname,arglist) type (*methodname) arglist
-#else
-#define JMETHOD(type,methodname,arglist) type (*methodname) ()
-#endif
-
-
-/* Here is the pseudo-keyword for declaring pointers that must be "far"
- * on 80x86 machines. Most of the specialized coding for 80x86 is handled
- * by just saying "FAR *" where such a pointer is needed. In a few places
- * explicit coding is needed; see uses of the NEED_FAR_POINTERS symbol.
- */
-
-#ifdef NEED_FAR_POINTERS
-#define FAR far
-#else
-#define FAR
-#endif
-
-
-/*
- * On a few systems, type boolean and/or its values FALSE, TRUE may appear
- * in standard header files. Or you may have conflicts with application-
- * specific header files that you want to include together with these files.
- * Defining HAVE_BOOLEAN before including jpeglib.h should make it work.
- */
-
-#ifndef HAVE_BOOLEAN
-typedef int boolean;
-#endif
-#ifndef FALSE /* in case these macros already exist */
-#define FALSE 0 /* values of boolean */
-#endif
-#ifndef TRUE
-#define TRUE 1
-#endif
-
-
-/*
- * The remaining options affect code selection within the JPEG library,
- * but they don't need to be visible to most applications using the library.
- * To minimize application namespace pollution, the symbols won't be
- * defined unless JPEG_INTERNALS or JPEG_INTERNAL_OPTIONS has been defined.
- */
-
-#ifdef JPEG_INTERNALS
-#define JPEG_INTERNAL_OPTIONS
-#endif
-
-#ifdef JPEG_INTERNAL_OPTIONS
-
-
-/*
- * These defines indicate whether to include various optional functions.
- * Undefining some of these symbols will produce a smaller but less capable
- * library. Note that you can leave certain source files out of the
- * compilation/linking process if you've #undef'd the corresponding symbols.
- * (You may HAVE to do that if your compiler doesn't like null source files.)
- */
-
-/* Arithmetic coding is unsupported for legal reasons. Complaints to IBM. */
-
-/* Capability options common to encoder and decoder: */
-
-#define DCT_ISLOW_SUPPORTED /* slow but accurate integer algorithm */
-#define DCT_IFAST_SUPPORTED /* faster, less accurate integer method */
-#define DCT_FLOAT_SUPPORTED /* floating-point: accurate, fast on fast HW */
-
-/* Encoder capability options: */
-
-#undef C_ARITH_CODING_SUPPORTED /* Arithmetic coding back end? */
-#define C_MULTISCAN_FILES_SUPPORTED /* Multiple-scan JPEG files? */
-#define C_PROGRESSIVE_SUPPORTED /* Progressive JPEG? (Requires MULTISCAN)*/
-#define ENTROPY_OPT_SUPPORTED /* Optimization of entropy coding parms? */
-/* Note: if you selected 12-bit data precision, it is dangerous to turn off
- * ENTROPY_OPT_SUPPORTED. The standard Huffman tables are only good for 8-bit
- * precision, so jchuff.c normally uses entropy optimization to compute
- * usable tables for higher precision. If you don't want to do optimization,
- * you'll have to supply different default Huffman tables.
- * The exact same statements apply for progressive JPEG: the default tables
- * don't work for progressive mode. (This may get fixed, however.)
- */
-#define INPUT_SMOOTHING_SUPPORTED /* Input image smoothing option? */
-
-/* Decoder capability options: */
-
-#undef D_ARITH_CODING_SUPPORTED /* Arithmetic coding back end? */
-#define D_MULTISCAN_FILES_SUPPORTED /* Multiple-scan JPEG files? */
-#define D_PROGRESSIVE_SUPPORTED /* Progressive JPEG? (Requires MULTISCAN)*/
-#define SAVE_MARKERS_SUPPORTED /* jpeg_save_markers() needed? */
-#define BLOCK_SMOOTHING_SUPPORTED /* Block smoothing? (Progressive only) */
-#define IDCT_SCALING_SUPPORTED /* Output rescaling via IDCT? */
-#undef UPSAMPLE_SCALING_SUPPORTED /* Output rescaling at upsample stage? */
-#define UPSAMPLE_MERGING_SUPPORTED /* Fast path for sloppy upsampling? */
-#define QUANT_1PASS_SUPPORTED /* 1-pass color quantization? */
-#define QUANT_2PASS_SUPPORTED /* 2-pass color quantization? */
-
-/* more capability options later, no doubt */
-
-
-/*
- * Ordering of RGB data in scanlines passed to or from the application.
- * If your application wants to deal with data in the order B,G,R, just
- * change these macros. You can also deal with formats such as R,G,B,X
- * (one extra byte per pixel) by changing RGB_PIXELSIZE. Note that changing
- * the offsets will also change the order in which colormap data is organized.
- * RESTRICTIONS:
- * 1. The sample applications cjpeg,djpeg do NOT support modified RGB formats.
- * 2. These macros only affect RGB<=>YCbCr color conversion, so they are not
- * useful if you are using JPEG color spaces other than YCbCr or grayscale.
- * 3. The color quantizer modules will not behave desirably if RGB_PIXELSIZE
- * is not 3 (they don't understand about dummy color components!). So you
- * can't use color quantization if you change that value.
- */
-
-#define RGB_RED 0 /* Offset of Red in an RGB scanline element */
-#define RGB_GREEN 1 /* Offset of Green */
-#define RGB_BLUE 2 /* Offset of Blue */
-#ifdef ANDROID_RGB
-#define RGB_ALPHA 3 /* Offset of Alpha */
-#endif
-#define RGB_PIXELSIZE 3 /* JSAMPLEs per RGB scanline element */
-
-/* Definitions for speed-related optimizations. */
-
-
-/* If your compiler supports inline functions, define INLINE
- * as the inline keyword; otherwise define it as empty.
- */
-
-#ifndef INLINE
-#ifdef __GNUC__ /* for instance, GNU C knows about inline */
-#define INLINE __inline__
-#endif
-#ifndef INLINE
-#define INLINE /* default is to define it as empty */
-#endif
-#endif
-
-
-/* On some machines (notably 68000 series) "int" is 32 bits, but multiplying
- * two 16-bit shorts is faster than multiplying two ints. Define MULTIPLIER
- * as short on such a machine. MULTIPLIER must be at least 16 bits wide.
- */
-
-#ifndef MULTIPLIER
-#define MULTIPLIER int /* type for fastest integer multiply */
-#endif
-
-
-/* FAST_FLOAT should be either float or double, whichever is done faster
- * by your compiler. (Note that this type is only used in the floating point
- * DCT routines, so it only matters if you've defined DCT_FLOAT_SUPPORTED.)
- * Typically, float is faster in ANSI C compilers, while double is faster in
- * pre-ANSI compilers (because they insist on converting to double anyway).
- * The code below therefore chooses float if we have ANSI-style prototypes.
- */
-
-#ifndef FAST_FLOAT
-#ifdef HAVE_PROTOTYPES
-#define FAST_FLOAT float
-#else
-#define FAST_FLOAT double
-#endif
-#endif
-
-#endif /* JPEG_INTERNAL_OPTIONS */
diff --git a/jpegenc/jpeglib.h b/jpegenc/jpeglib.h
deleted file mode 100755
index 0f3a547..0000000
--- a/jpegenc/jpeglib.h
+++ b/dev/null
@@ -1,1100 +0,0 @@
-/*
- * jpeglib.h
- *
- * Copyright (C) 1991-1998, Thomas G. Lane.
- * This file is part of the Independent JPEG Group's software.
- * For conditions of distribution and use, see the accompanying README file.
- *
- * This file defines the application interface for the JPEG library.
- * Most applications using the library need only include this file,
- * and perhaps jerror.h if they want to know the exact error codes.
- */
-
-#ifndef JPEGLIB_H
-#define JPEGLIB_H
-
-/*
- * First we include the configuration files that record how this
- * installation of the JPEG library is set up. jconfig.h can be
- * generated automatically for many systems. jmorecfg.h contains
- * manual configuration options that most people need not worry about.
- */
-
-#ifndef JCONFIG_INCLUDED /* in case jinclude.h already did */
-#include "jconfig.h" /* widely used configuration options */
-#endif
-#include "jmorecfg.h" /* seldom changed options */
-
-
-/* Version ID for the JPEG library.
- * Might be useful for tests like "#if JPEG_LIB_VERSION >= 60".
- */
-
-#define JPEG_LIB_VERSION 62 /* Version 6b */
-
-
-/* Various constants determining the sizes of things.
- * All of these are specified by the JPEG standard, so don't change them
- * if you want to be compatible.
- */
-
-#define DCTSIZE 8 /* The basic DCT block is 8x8 samples */
-#define DCTSIZE2 64 /* DCTSIZE squared; # of elements in a block */
-#define NUM_QUANT_TBLS 4 /* Quantization tables are numbered 0..3 */
-#define NUM_HUFF_TBLS 4 /* Huffman tables are numbered 0..3 */
-#define NUM_ARITH_TBLS 16 /* Arith-coding tables are numbered 0..15 */
-#define MAX_COMPS_IN_SCAN 4 /* JPEG limit on # of components in one scan */
-#define MAX_SAMP_FACTOR 4 /* JPEG limit on sampling factors */
-/* Unfortunately, some bozo at Adobe saw no reason to be bound by the standard;
- * the PostScript DCT filter can emit files with many more than 10 blocks/MCU.
- * If you happen to run across such a file, you can up D_MAX_BLOCKS_IN_MCU
- * to handle it. We even let you do this from the jconfig.h file. However,
- * we strongly discourage changing C_MAX_BLOCKS_IN_MCU; just because Adobe
- * sometimes emits noncompliant files doesn't mean you should too.
- */
-#define C_MAX_BLOCKS_IN_MCU 10 /* compressor's limit on blocks per MCU */
-#ifndef D_MAX_BLOCKS_IN_MCU
-#define D_MAX_BLOCKS_IN_MCU 10 /* decompressor's limit on blocks per MCU */
-#endif
-
-
-/* Data structures for images (arrays of samples and of DCT coefficients).
- * On 80x86 machines, the image arrays are too big for near pointers,
- * but the pointer arrays can fit in near memory.
- */
-
-typedef JSAMPLE FAR *JSAMPROW; /* ptr to one image row of pixel samples. */
-typedef JSAMPROW *JSAMPARRAY; /* ptr to some rows (a 2-D sample array) */
-typedef JSAMPARRAY *JSAMPIMAGE; /* a 3-D sample array: top index is color */
-
-typedef JCOEF JBLOCK[DCTSIZE2]; /* one block of coefficients */
-typedef JBLOCK FAR *JBLOCKROW; /* pointer to one row of coefficient blocks */
-typedef JBLOCKROW *JBLOCKARRAY; /* a 2-D array of coefficient blocks */
-typedef JBLOCKARRAY *JBLOCKIMAGE; /* a 3-D array of coefficient blocks */
-
-typedef JCOEF FAR *JCOEFPTR; /* useful in a couple of places */
-
-
-/* Types for JPEG compression parameters and working tables. */
-
-
-/* DCT coefficient quantization tables. */
-
-typedef struct {
- /* This array gives the coefficient quantizers in natural array order
- * (not the zigzag order in which they are stored in a JPEG DQT marker).
- * CAUTION: IJG versions prior to v6a kept this array in zigzag order.
- */
- UINT16 quantval[DCTSIZE2]; /* quantization step for each coefficient */
- /* This field is used only during compression. It's initialized FALSE when
- * the table is created, and set TRUE when it's been output to the file.
- * You could suppress output of a table by setting this to TRUE.
- * (See jpeg_suppress_tables for an example.)
- */
- boolean sent_table; /* TRUE when table has been output */
-} JQUANT_TBL;
-
-
-/* Huffman coding tables. */
-
-typedef struct {
- /* These two fields directly represent the contents of a JPEG DHT marker */
- UINT8 bits[17]; /* bits[k] = # of symbols with codes of */
- /* length k bits; bits[0] is unused */
- UINT8 huffval[256]; /* The symbols, in order of incr code length */
- /* This field is used only during compression. It's initialized FALSE when
- * the table is created, and set TRUE when it's been output to the file.
- * You could suppress output of a table by setting this to TRUE.
- * (See jpeg_suppress_tables for an example.)
- */
- boolean sent_table; /* TRUE when table has been output */
-} JHUFF_TBL;
-
-
-/* Basic info about one component (color channel). */
-
-typedef struct {
- /* These values are fixed over the whole image. */
- /* For compression, they must be supplied by parameter setup; */
- /* for decompression, they are read from the SOF marker. */
- int component_id; /* identifier for this component (0..255) */
- int component_index; /* its index in SOF or cinfo->comp_info[] */
- int h_samp_factor; /* horizontal sampling factor (1..4) */
- int v_samp_factor; /* vertical sampling factor (1..4) */
- int quant_tbl_no; /* quantization table selector (0..3) */
- /* These values may vary between scans. */
- /* For compression, they must be supplied by parameter setup; */
- /* for decompression, they are read from the SOS marker. */
- /* The decompressor output side may not use these variables. */
- int dc_tbl_no; /* DC entropy table selector (0..3) */
- int ac_tbl_no; /* AC entropy table selector (0..3) */
-
- /* Remaining fields should be treated as private by applications. */
-
- /* These values are computed during compression or decompression startup: */
- /* Component's size in DCT blocks.
- * Any dummy blocks added to complete an MCU are not counted; therefore
- * these values do not depend on whether a scan is interleaved or not.
- */
- JDIMENSION width_in_blocks;
- JDIMENSION height_in_blocks;
- /* Size of a DCT block in samples. Always DCTSIZE for compression.
- * For decompression this is the size of the output from one DCT block,
- * reflecting any scaling we choose to apply during the IDCT step.
- * Values of 1,2,4,8 are likely to be supported. Note that different
- * components may receive different IDCT scalings.
- */
- int DCT_scaled_size;
- /* The downsampled dimensions are the component's actual, unpadded number
- * of samples at the main buffer (preprocessing/compression interface), thus
- * downsampled_width = ceil(image_width * Hi/Hmax)
- * and similarly for height. For decompression, IDCT scaling is included, so
- * downsampled_width = ceil(image_width * Hi/Hmax * DCT_scaled_size/DCTSIZE)
- */
- JDIMENSION downsampled_width; /* actual width in samples */
- JDIMENSION downsampled_height; /* actual height in samples */
- /* This flag is used only for decompression. In cases where some of the
- * components will be ignored (eg grayscale output from YCbCr image),
- * we can skip most computations for the unused components.
- */
- boolean component_needed; /* do we need the value of this component? */
-
- /* These values are computed before starting a scan of the component. */
- /* The decompressor output side may not use these variables. */
- int MCU_width; /* number of blocks per MCU, horizontally */
- int MCU_height; /* number of blocks per MCU, vertically */
- int MCU_blocks; /* MCU_width * MCU_height */
- int MCU_sample_width; /* MCU width in samples, MCU_width*DCT_scaled_size */
- int last_col_width; /* # of non-dummy blocks across in last MCU */
- int last_row_height; /* # of non-dummy blocks down in last MCU */
-
- /* Saved quantization table for component; NULL if none yet saved.
- * See jdinput.c comments about the need for this information.
- * This field is currently used only for decompression.
- */
- JQUANT_TBL * quant_table;
-
- /* Private per-component storage for DCT or IDCT subsystem. */
- void * dct_table;
-} jpeg_component_info;
-
-
-/* The script for encoding a multiple-scan file is an array of these: */
-
-typedef struct {
- int comps_in_scan; /* number of components encoded in this scan */
- int component_index[MAX_COMPS_IN_SCAN]; /* their SOF/comp_info[] indexes */
- int Ss, Se; /* progressive JPEG spectral selection parms */
- int Ah, Al; /* progressive JPEG successive approx. parms */
-} jpeg_scan_info;
-
-/* The decompressor can save APPn and COM markers in a list of these: */
-
-typedef struct jpeg_marker_struct FAR * jpeg_saved_marker_ptr;
-
-struct jpeg_marker_struct {
- jpeg_saved_marker_ptr next; /* next in list, or NULL */
- UINT8 marker; /* marker code: JPEG_COM, or JPEG_APP0+n */
- unsigned int original_length; /* # bytes of data in the file */
- unsigned int data_length; /* # bytes of data saved at data[] */
- JOCTET FAR * data; /* the data contained in the marker */
- /* the marker length word is not counted in data_length or original_length */
-};
-
-/* Known color spaces. */
-
-typedef enum {
- JCS_UNKNOWN, /* error/unspecified */
- JCS_GRAYSCALE, /* monochrome */
- JCS_RGB, /* red/green/blue */
- JCS_YCbCr, /* Y/Cb/Cr (also known as YUV) */
- JCS_CMYK, /* C/M/Y/K */
- JCS_YCCK, /* Y/Cb/Cr/K */
-#ifdef ANDROID_RGB
- JCS_RGBA_8888, /* red/green/blue/alpha */
- JCS_RGB_565 /* red/green/blue in 565 format */
-#endif
-} J_COLOR_SPACE;
-
-/* DCT/IDCT algorithm options. */
-
-typedef enum {
- JDCT_ISLOW, /* slow but accurate integer algorithm */
- JDCT_IFAST, /* faster, less accurate integer method */
- JDCT_FLOAT /* floating-point: accurate, fast on fast HW */
-} J_DCT_METHOD;
-
-#ifndef JDCT_DEFAULT /* may be overridden in jconfig.h */
-#define JDCT_DEFAULT JDCT_ISLOW
-#endif
-#ifndef JDCT_FASTEST /* may be overridden in jconfig.h */
-#define JDCT_FASTEST JDCT_IFAST
-#endif
-
-/* Dithering options for decompression. */
-
-typedef enum {
- JDITHER_NONE, /* no dithering */
- JDITHER_ORDERED, /* simple ordered dither */
- JDITHER_FS /* Floyd-Steinberg error diffusion dither */
-} J_DITHER_MODE;
-
-
-/* Common fields between JPEG compression and decompression master structs. */
-
-#define jpeg_common_fields \
- struct jpeg_error_mgr * err; /* Error handler module */\
- struct jpeg_memory_mgr * mem; /* Memory manager module */\
- struct jpeg_progress_mgr * progress; /* Progress monitor, or NULL if none */\
- void * client_data; /* Available for use by application */\
- boolean is_decompressor; /* So common code can tell which is which */\
- int global_state /* For checking call sequence validity */
-
-/* Routines that are to be used by both halves of the library are declared
- * to receive a pointer to this structure. There are no actual instances of
- * jpeg_common_struct, only of jpeg_compress_struct and jpeg_decompress_struct.
- */
-struct jpeg_common_struct {
- jpeg_common_fields; /* Fields common to both master struct types */
- /* Additional fields follow in an actual jpeg_compress_struct or
- * jpeg_decompress_struct. All three structs must agree on these
- * initial fields! (This would be a lot cleaner in C++.)
- */
-};
-
-typedef struct jpeg_common_struct * j_common_ptr;
-typedef struct jpeg_compress_struct * j_compress_ptr;
-typedef struct jpeg_decompress_struct * j_decompress_ptr;
-
-
-/* Master record for a compression instance */
-
-struct jpeg_compress_struct {
- jpeg_common_fields; /* Fields shared with jpeg_decompress_struct */
-
- /* Destination for compressed data */
- struct jpeg_destination_mgr * dest;
-
- /* Description of source image --- these fields must be filled in by
- * outer application before starting compression. in_color_space must
- * be correct before you can even call jpeg_set_defaults().
- */
-
- JDIMENSION image_width; /* input image width */
- JDIMENSION image_height; /* input image height */
- int input_components; /* # of color components in input image */
- J_COLOR_SPACE in_color_space; /* colorspace of input image */
-
- double input_gamma; /* image gamma of input image */
-
- /* Compression parameters --- these fields must be set before calling
- * jpeg_start_compress(). We recommend calling jpeg_set_defaults() to
- * initialize everything to reasonable defaults, then changing anything
- * the application specifically wants to change. That way you won't get
- * burnt when new parameters are added. Also note that there are several
- * helper routines to simplify changing parameters.
- */
-
- int data_precision; /* bits of precision in image data */
-
- int num_components; /* # of color components in JPEG image */
- J_COLOR_SPACE jpeg_color_space; /* colorspace of JPEG image */
-
- jpeg_component_info * comp_info;
- /* comp_info[i] describes component that appears i'th in SOF */
-
- JQUANT_TBL * quant_tbl_ptrs[NUM_QUANT_TBLS];
- /* ptrs to coefficient quantization tables, or NULL if not defined */
-
- JHUFF_TBL * dc_huff_tbl_ptrs[NUM_HUFF_TBLS];
- JHUFF_TBL * ac_huff_tbl_ptrs[NUM_HUFF_TBLS];
- /* ptrs to Huffman coding tables, or NULL if not defined */
-
- UINT8 arith_dc_L[NUM_ARITH_TBLS]; /* L values for DC arith-coding tables */
- UINT8 arith_dc_U[NUM_ARITH_TBLS]; /* U values for DC arith-coding tables */
- UINT8 arith_ac_K[NUM_ARITH_TBLS]; /* Kx values for AC arith-coding tables */
-
- int num_scans; /* # of entries in scan_info array */
- const jpeg_scan_info * scan_info; /* script for multi-scan file, or NULL */
- /* The default value of scan_info is NULL, which causes a single-scan
- * sequential JPEG file to be emitted. To create a multi-scan file,
- * set num_scans and scan_info to point to an array of scan definitions.
- */
-
- boolean raw_data_in; /* TRUE=caller supplies downsampled data */
- boolean arith_code; /* TRUE=arithmetic coding, FALSE=Huffman */
- boolean optimize_coding; /* TRUE=optimize entropy encoding parms */
- boolean CCIR601_sampling; /* TRUE=first samples are cosited */
- int smoothing_factor; /* 1..100, or 0 for no input smoothing */
- J_DCT_METHOD dct_method; /* DCT algorithm selector */
-
- /* The restart interval can be specified in absolute MCUs by setting
- * restart_interval, or in MCU rows by setting restart_in_rows
- * (in which case the correct restart_interval will be figured
- * for each scan).
- */
- unsigned int restart_interval; /* MCUs per restart, or 0 for no restart */
- int restart_in_rows; /* if > 0, MCU rows per restart interval */
-
- /* Parameters controlling emission of special markers. */
-
- boolean write_JFIF_header; /* should a JFIF marker be written? */
- UINT8 JFIF_major_version; /* What to write for the JFIF version number */
- UINT8 JFIF_minor_version;
- /* These three values are not used by the JPEG code, merely copied */
- /* into the JFIF APP0 marker. density_unit can be 0 for unknown, */
- /* 1 for dots/inch, or 2 for dots/cm. Note that the pixel aspect */
- /* ratio is defined by X_density/Y_density even when density_unit=0. */
- UINT8 density_unit; /* JFIF code for pixel size units */
- UINT16 X_density; /* Horizontal pixel density */
- UINT16 Y_density; /* Vertical pixel density */
- boolean write_Adobe_marker; /* should an Adobe marker be written? */
-
- /* State variable: index of next scanline to be written to
- * jpeg_write_scanlines(). Application may use this to control its
- * processing loop, e.g., "while (next_scanline < image_height)".
- */
-
- JDIMENSION next_scanline; /* 0 .. image_height-1 */
-
- /* Remaining fields are known throughout compressor, but generally
- * should not be touched by a surrounding application.
- */
-
- /*
- * These fields are computed during compression startup
- */
- boolean progressive_mode; /* TRUE if scan script uses progressive mode */
- int max_h_samp_factor; /* largest h_samp_factor */
- int max_v_samp_factor; /* largest v_samp_factor */
-
- JDIMENSION total_iMCU_rows; /* # of iMCU rows to be input to coef ctlr */
- /* The coefficient controller receives data in units of MCU rows as defined
- * for fully interleaved scans (whether the JPEG file is interleaved or not).
- * There are v_samp_factor * DCTSIZE sample rows of each component in an
- * "iMCU" (interleaved MCU) row.
- */
-
- /*
- * These fields are valid during any one scan.
- * They describe the components and MCUs actually appearing in the scan.
- */
- int comps_in_scan; /* # of JPEG components in this scan */
- jpeg_component_info * cur_comp_info[MAX_COMPS_IN_SCAN];
- /* *cur_comp_info[i] describes component that appears i'th in SOS */
-
- JDIMENSION MCUs_per_row; /* # of MCUs across the image */
- JDIMENSION MCU_rows_in_scan; /* # of MCU rows in the image */
-
- int blocks_in_MCU; /* # of DCT blocks per MCU */
- int MCU_membership[C_MAX_BLOCKS_IN_MCU];
- /* MCU_membership[i] is index in cur_comp_info of component owning */
- /* i'th block in an MCU */
-
- int Ss, Se, Ah, Al; /* progressive JPEG parameters for scan */
-
- /*
- * Links to compression subobjects (methods and private variables of modules)
- */
- struct jpeg_comp_master * master;
- struct jpeg_c_main_controller * main;
- struct jpeg_c_prep_controller * prep;
- struct jpeg_c_coef_controller * coef;
- struct jpeg_marker_writer * marker;
- struct jpeg_color_converter * cconvert;
- struct jpeg_downsampler * downsample;
- struct jpeg_forward_dct * fdct;
- struct jpeg_entropy_encoder * entropy;
- jpeg_scan_info * script_space; /* workspace for jpeg_simple_progression */
- int script_space_size;
-};
-
-
-/* Master record for a decompression instance */
-
-struct jpeg_decompress_struct {
- jpeg_common_fields; /* Fields shared with jpeg_compress_struct */
-
- /* Source of compressed data */
- struct jpeg_source_mgr * src;
-
- /* Basic description of image --- filled in by jpeg_read_header(). */
- /* Application may inspect these values to decide how to process image. */
-
- JDIMENSION image_width; /* nominal image width (from SOF marker) */
- JDIMENSION image_height; /* nominal image height */
- int num_components; /* # of color components in JPEG image */
- J_COLOR_SPACE jpeg_color_space; /* colorspace of JPEG image */
-
- /* Decompression processing parameters --- these fields must be set before
- * calling jpeg_start_decompress(). Note that jpeg_read_header() initializes
- * them to default values.
- */
-
- J_COLOR_SPACE out_color_space; /* colorspace for output */
-
- unsigned int scale_num, scale_denom; /* fraction by which to scale image */
-
- double output_gamma; /* image gamma wanted in output */
-
- boolean buffered_image; /* TRUE=multiple output passes */
- boolean raw_data_out; /* TRUE=downsampled data wanted */
-
- J_DCT_METHOD dct_method; /* IDCT algorithm selector */
- boolean do_fancy_upsampling; /* TRUE=apply fancy upsampling */
- boolean do_block_smoothing; /* TRUE=apply interblock smoothing */
-
- boolean quantize_colors; /* TRUE=colormapped output wanted */
- /* the following are ignored if not quantize_colors: */
- J_DITHER_MODE dither_mode; /* type of color dithering to use */
- boolean two_pass_quantize; /* TRUE=use two-pass color quantization */
- int desired_number_of_colors; /* max # colors to use in created colormap */
- /* these are significant only in buffered-image mode: */
- boolean enable_1pass_quant; /* enable future use of 1-pass quantizer */
- boolean enable_external_quant;/* enable future use of external colormap */
- boolean enable_2pass_quant; /* enable future use of 2-pass quantizer */
-
- /* Description of actual output image that will be returned to application.
- * These fields are computed by jpeg_start_decompress().
- * You can also use jpeg_calc_output_dimensions() to determine these values
- * in advance of calling jpeg_start_decompress().
- */
-
- JDIMENSION output_width; /* scaled image width */
- JDIMENSION output_height; /* scaled image height */
- int out_color_components; /* # of color components in out_color_space */
- int output_components; /* # of color components returned */
- /* output_components is 1 (a colormap index) when quantizing colors;
- * otherwise it equals out_color_components.
- */
- int rec_outbuf_height; /* min recommended height of scanline buffer */
- /* If the buffer passed to jpeg_read_scanlines() is less than this many rows
- * high, space and time will be wasted due to unnecessary data copying.
- * Usually rec_outbuf_height will be 1 or 2, at most 4.
- */
-
- /* When quantizing colors, the output colormap is described by these fields.
- * The application can supply a colormap by setting colormap non-NULL before
- * calling jpeg_start_decompress; otherwise a colormap is created during
- * jpeg_start_decompress or jpeg_start_output.
- * The map has out_color_components rows and actual_number_of_colors columns.
- */
- int actual_number_of_colors; /* number of entries in use */
- JSAMPARRAY colormap; /* The color map as a 2-D pixel array */
-
- /* State variables: these variables indicate the progress of decompression.
- * The application may examine these but must not modify them.
- */
-
- /* Row index of next scanline to be read from jpeg_read_scanlines().
- * Application may use this to control its processing loop, e.g.,
- * "while (output_scanline < output_height)".
- */
- JDIMENSION output_scanline; /* 0 .. output_height-1 */
-
- /* Current input scan number and number of iMCU rows completed in scan.
- * These indicate the progress of the decompressor input side.
- */
- int input_scan_number; /* Number of SOS markers seen so far */
- JDIMENSION input_iMCU_row; /* Number of iMCU rows completed */
-
- /* The "output scan number" is the notional scan being displayed by the
- * output side. The decompressor will not allow output scan/row number
- * to get ahead of input scan/row, but it can fall arbitrarily far behind.
- */
- int output_scan_number; /* Nominal scan number being displayed */
- JDIMENSION output_iMCU_row; /* Number of iMCU rows read */
-
- /* Current progression status. coef_bits[c][i] indicates the precision
- * with which component c's DCT coefficient i (in zigzag order) is known.
- * It is -1 when no data has yet been received, otherwise it is the point
- * transform (shift) value for the most recent scan of the coefficient
- * (thus, 0 at completion of the progression).
- * This pointer is NULL when reading a non-progressive file.
- */
- int (*coef_bits)[DCTSIZE2]; /* -1 or current Al value for each coef */
-
- /* Internal JPEG parameters --- the application usually need not look at
- * these fields. Note that the decompressor output side may not use
- * any parameters that can change between scans.
- */
-
- /* Quantization and Huffman tables are carried forward across input
- * datastreams when processing abbreviated JPEG datastreams.
- */
-
- JQUANT_TBL * quant_tbl_ptrs[NUM_QUANT_TBLS];
- /* ptrs to coefficient quantization tables, or NULL if not defined */
-
- JHUFF_TBL * dc_huff_tbl_ptrs[NUM_HUFF_TBLS];
- JHUFF_TBL * ac_huff_tbl_ptrs[NUM_HUFF_TBLS];
- /* ptrs to Huffman coding tables, or NULL if not defined */
-
- /* These parameters are never carried across datastreams, since they
- * are given in SOF/SOS markers or defined to be reset by SOI.
- */
-
- int data_precision; /* bits of precision in image data */
-
- jpeg_component_info * comp_info;
- /* comp_info[i] describes component that appears i'th in SOF */
-
- boolean progressive_mode; /* TRUE if SOFn specifies progressive mode */
- boolean arith_code; /* TRUE=arithmetic coding, FALSE=Huffman */
-
- UINT8 arith_dc_L[NUM_ARITH_TBLS]; /* L values for DC arith-coding tables */
- UINT8 arith_dc_U[NUM_ARITH_TBLS]; /* U values for DC arith-coding tables */
- UINT8 arith_ac_K[NUM_ARITH_TBLS]; /* Kx values for AC arith-coding tables */
-
- unsigned int restart_interval; /* MCUs per restart interval, or 0 for no restart */
-
- /* These fields record data obtained from optional markers recognized by
- * the JPEG library.
- */
- boolean saw_JFIF_marker; /* TRUE iff a JFIF APP0 marker was found */
- /* Data copied from JFIF marker; only valid if saw_JFIF_marker is TRUE: */
- UINT8 JFIF_major_version; /* JFIF version number */
- UINT8 JFIF_minor_version;
- UINT8 density_unit; /* JFIF code for pixel size units */
- UINT16 X_density; /* Horizontal pixel density */
- UINT16 Y_density; /* Vertical pixel density */
- boolean saw_Adobe_marker; /* TRUE iff an Adobe APP14 marker was found */
- UINT8 Adobe_transform; /* Color transform code from Adobe marker */
-
- boolean CCIR601_sampling; /* TRUE=first samples are cosited */
-
- /* Aside from the specific data retained from APPn markers known to the
- * library, the uninterpreted contents of any or all APPn and COM markers
- * can be saved in a list for examination by the application.
- */
- jpeg_saved_marker_ptr marker_list; /* Head of list of saved markers */
-
- /* Remaining fields are known throughout decompressor, but generally
- * should not be touched by a surrounding application.
- */
-
- /*
- * These fields are computed during decompression startup
- */
- int max_h_samp_factor; /* largest h_samp_factor */
- int max_v_samp_factor; /* largest v_samp_factor */
-
- int min_DCT_scaled_size; /* smallest DCT_scaled_size of any component */
-
- JDIMENSION total_iMCU_rows; /* # of iMCU rows in image */
- /* The coefficient controller's input and output progress is measured in
- * units of "iMCU" (interleaved MCU) rows. These are the same as MCU rows
- * in fully interleaved JPEG scans, but are used whether the scan is
- * interleaved or not. We define an iMCU row as v_samp_factor DCT block
- * rows of each component. Therefore, the IDCT output contains
- * v_samp_factor*DCT_scaled_size sample rows of a component per iMCU row.
- */
-
- JSAMPLE * sample_range_limit; /* table for fast range-limiting */
-
- /*
- * These fields are valid during any one scan.
- * They describe the components and MCUs actually appearing in the scan.
- * Note that the decompressor output side must not use these fields.
- */
- int comps_in_scan; /* # of JPEG components in this scan */
- jpeg_component_info * cur_comp_info[MAX_COMPS_IN_SCAN];
- /* *cur_comp_info[i] describes component that appears i'th in SOS */
-
- JDIMENSION MCUs_per_row; /* # of MCUs across the image */
- JDIMENSION MCU_rows_in_scan; /* # of MCU rows in the image */
-
- int blocks_in_MCU; /* # of DCT blocks per MCU */
- int MCU_membership[D_MAX_BLOCKS_IN_MCU];
- /* MCU_membership[i] is index in cur_comp_info of component owning */
- /* i'th block in an MCU */
-
- int Ss, Se, Ah, Al; /* progressive JPEG parameters for scan */
-
- /* This field is shared between entropy decoder and marker parser.
- * It is either zero or the code of a JPEG marker that has been
- * read from the data source, but has not yet been processed.
- */
- int unread_marker;
-
- /*
- * Links to decompression subobjects (methods, private variables of modules)
- */
- struct jpeg_decomp_master * master;
- struct jpeg_d_main_controller * main;
- struct jpeg_d_coef_controller * coef;
- struct jpeg_d_post_controller * post;
- struct jpeg_input_controller * inputctl;
- struct jpeg_marker_reader * marker;
- struct jpeg_entropy_decoder * entropy;
- struct jpeg_inverse_dct * idct;
- struct jpeg_upsampler * upsample;
- struct jpeg_color_deconverter * cconvert;
- struct jpeg_color_quantizer * cquantize;
-};
-
-
-/* "Object" declarations for JPEG modules that may be supplied or called
- * directly by the surrounding application.
- * As with all objects in the JPEG library, these structs only define the
- * publicly visible methods and state variables of a module. Additional
- * private fields may exist after the public ones.
- */
-
-
-/* Error handler object */
-
-struct jpeg_error_mgr {
- /* Error exit handler: does not return to caller */
- JMETHOD(void, error_exit, (j_common_ptr cinfo));
- /* Conditionally emit a trace or warning message */
- JMETHOD(void, emit_message, (j_common_ptr cinfo, int msg_level));
- /* Routine that actually outputs a trace or error message */
- JMETHOD(void, output_message, (j_common_ptr cinfo));
- /* Format a message string for the most recent JPEG error or message */
- JMETHOD(void, format_message, (j_common_ptr cinfo, char * buffer));
-#define JMSG_LENGTH_MAX 200 /* recommended size of format_message buffer */
- /* Reset error state variables at start of a new image */
- JMETHOD(void, reset_error_mgr, (j_common_ptr cinfo));
-
- /* The message ID code and any parameters are saved here.
- * A message can have one string parameter or up to 8 int parameters.
- */
- int msg_code;
-#define JMSG_STR_PARM_MAX 80
- union {
- int i[8];
- char s[JMSG_STR_PARM_MAX];
- } msg_parm;
-
- /* Standard state variables for error facility */
-
- int trace_level; /* max msg_level that will be displayed */
-
- /* For recoverable corrupt-data errors, we emit a warning message,
- * but keep going unless emit_message chooses to abort. emit_message
- * should count warnings in num_warnings. The surrounding application
- * can check for bad data by seeing if num_warnings is nonzero at the
- * end of processing.
- */
- long num_warnings; /* number of corrupt-data warnings */
-
- /* These fields point to the table(s) of error message strings.
- * An application can change the table pointer to switch to a different
- * message list (typically, to change the language in which errors are
- * reported). Some applications may wish to add additional error codes
- * that will be handled by the JPEG library error mechanism; the second
- * table pointer is used for this purpose.
- *
- * First table includes all errors generated by JPEG library itself.
- * Error code 0 is reserved for a "no such error string" message.
- */
- const char * const * jpeg_message_table; /* Library errors */
- int last_jpeg_message; /* Table contains strings 0..last_jpeg_message */
- /* Second table can be added by application (see cjpeg/djpeg for example).
- * It contains strings numbered first_addon_message..last_addon_message.
- */
- const char * const * addon_message_table; /* Non-library errors */
- int first_addon_message; /* code for first string in addon table */
- int last_addon_message; /* code for last string in addon table */
-};
-
-
-/* Progress monitor object */
-
-struct jpeg_progress_mgr {
- JMETHOD(void, progress_monitor, (j_common_ptr cinfo));
-
- long pass_counter; /* work units completed in this pass */
- long pass_limit; /* total number of work units in this pass */
- int completed_passes; /* passes completed so far */
- int total_passes; /* total number of passes expected */
-};
-
-
-/* Data destination object for compression */
-
-struct jpeg_destination_mgr {
- JOCTET * next_output_byte; /* => next byte to write in buffer */
- size_t free_in_buffer; /* # of byte spaces remaining in buffer */
-
- JMETHOD(void, init_destination, (j_compress_ptr cinfo));
- JMETHOD(boolean, empty_output_buffer, (j_compress_ptr cinfo));
- JMETHOD(void, term_destination, (j_compress_ptr cinfo));
-};
-
-
-/* Data source object for decompression */
-
-struct jpeg_source_mgr {
- const JOCTET * next_input_byte; /* => next byte to read from buffer */
- size_t bytes_in_buffer; /* # of bytes remaining in buffer */
-
- JMETHOD(void, init_source, (j_decompress_ptr cinfo));
- JMETHOD(boolean, fill_input_buffer, (j_decompress_ptr cinfo));
- JMETHOD(void, skip_input_data, (j_decompress_ptr cinfo, long num_bytes));
- JMETHOD(boolean, resync_to_restart, (j_decompress_ptr cinfo, int desired));
- JMETHOD(void, term_source, (j_decompress_ptr cinfo));
-};
-
-
-/* Memory manager object.
- * Allocates "small" objects (a few K total), "large" objects (tens of K),
- * and "really big" objects (virtual arrays with backing store if needed).
- * The memory manager does not allow individual objects to be freed; rather,
- * each created object is assigned to a pool, and whole pools can be freed
- * at once. This is faster and more convenient than remembering exactly what
- * to free, especially where malloc()/free() are not too speedy.
- * NB: alloc routines never return NULL. They exit to error_exit if not
- * successful.
- */
-
-#define JPOOL_PERMANENT 0 /* lasts until master record is destroyed */
-#define JPOOL_IMAGE 1 /* lasts until done with image/datastream */
-#define JPOOL_NUMPOOLS 2
-
-typedef struct jvirt_sarray_control * jvirt_sarray_ptr;
-typedef struct jvirt_barray_control * jvirt_barray_ptr;
-
-
-struct jpeg_memory_mgr {
- /* Method pointers */
- JMETHOD(void *, alloc_small, (j_common_ptr cinfo, int pool_id,
- size_t sizeofobject));
- JMETHOD(void FAR *, alloc_large, (j_common_ptr cinfo, int pool_id,
- size_t sizeofobject));
- JMETHOD(JSAMPARRAY, alloc_sarray, (j_common_ptr cinfo, int pool_id,
- JDIMENSION samplesperrow,
- JDIMENSION numrows));
- JMETHOD(JBLOCKARRAY, alloc_barray, (j_common_ptr cinfo, int pool_id,
- JDIMENSION blocksperrow,
- JDIMENSION numrows));
- JMETHOD(jvirt_sarray_ptr, request_virt_sarray, (j_common_ptr cinfo,
- int pool_id,
- boolean pre_zero,
- JDIMENSION samplesperrow,
- JDIMENSION numrows,
- JDIMENSION maxaccess));
- JMETHOD(jvirt_barray_ptr, request_virt_barray, (j_common_ptr cinfo,
- int pool_id,
- boolean pre_zero,
- JDIMENSION blocksperrow,
- JDIMENSION numrows,
- JDIMENSION maxaccess));
- JMETHOD(void, realize_virt_arrays, (j_common_ptr cinfo));
- JMETHOD(JSAMPARRAY, access_virt_sarray, (j_common_ptr cinfo,
- jvirt_sarray_ptr ptr,
- JDIMENSION start_row,
- JDIMENSION num_rows,
- boolean writable));
- JMETHOD(JBLOCKARRAY, access_virt_barray, (j_common_ptr cinfo,
- jvirt_barray_ptr ptr,
- JDIMENSION start_row,
- JDIMENSION num_rows,
- boolean writable));
- JMETHOD(void, free_pool, (j_common_ptr cinfo, int pool_id));
- JMETHOD(void, self_destruct, (j_common_ptr cinfo));
-
- /* Limit on memory allocation for this JPEG object. (Note that this is
- * merely advisory, not a guaranteed maximum; it only affects the space
- * used for virtual-array buffers.) May be changed by outer application
- * after creating the JPEG object.
- */
- long max_memory_to_use;
-
- /* Maximum allocation request accepted by alloc_large. */
- long max_alloc_chunk;
-};
-
-
-/* Routine signature for application-supplied marker processing methods.
- * Need not pass marker code since it is stored in cinfo->unread_marker.
- */
-typedef JMETHOD(boolean, jpeg_marker_parser_method, (j_decompress_ptr cinfo));
-
-
-/* Declarations for routines called by application.
- * The JPP macro hides prototype parameters from compilers that can't cope.
- * Note JPP requires double parentheses.
- */
-
-#ifdef HAVE_PROTOTYPES
-#define JPP(arglist) arglist
-#else
-#define JPP(arglist) ()
-#endif
-
-
-/* Short forms of external names for systems with brain-damaged linkers.
- * We shorten external names to be unique in the first six letters, which
- * is good enough for all known systems.
- * (If your compiler itself needs names to be unique in less than 15
- * characters, you are out of luck. Get a better compiler.)
- */
-
-#ifdef NEED_SHORT_EXTERNAL_NAMES
-#define jpeg_std_error jStdError
-#define jpeg_CreateCompress jCreaCompress
-#define jpeg_CreateDecompress jCreaDecompress
-#define jpeg_destroy_compress jDestCompress
-#define jpeg_destroy_decompress jDestDecompress
-#define jpeg_stdio_dest jStdDest
-#define jpeg_stdio_src jStdSrc
-#define jpeg_set_defaults jSetDefaults
-#define jpeg_set_colorspace jSetColorspace
-#define jpeg_default_colorspace jDefColorspace
-#define jpeg_set_quality jSetQuality
-#define jpeg_set_linear_quality jSetLQuality
-#define jpeg_add_quant_table jAddQuantTable
-#define jpeg_quality_scaling jQualityScaling
-#define jpeg_simple_progression jSimProgress
-#define jpeg_suppress_tables jSuppressTables
-#define jpeg_alloc_quant_table jAlcQTable
-#define jpeg_alloc_huff_table jAlcHTable
-#define jpeg_start_compress jStrtCompress
-#define jpeg_write_scanlines jWrtScanlines
-#define jpeg_finish_compress jFinCompress
-#define jpeg_write_raw_data jWrtRawData
-#define jpeg_write_marker jWrtMarker
-#define jpeg_write_m_header jWrtMHeader
-#define jpeg_write_m_byte jWrtMByte
-#define jpeg_write_tables jWrtTables
-#define jpeg_read_header jReadHeader
-#define jpeg_start_decompress jStrtDecompress
-#define jpeg_read_scanlines jReadScanlines
-#define jpeg_finish_decompress jFinDecompress
-#define jpeg_read_raw_data jReadRawData
-#define jpeg_has_multiple_scans jHasMultScn
-#define jpeg_start_output jStrtOutput
-#define jpeg_finish_output jFinOutput
-#define jpeg_input_complete jInComplete
-#define jpeg_new_colormap jNewCMap
-#define jpeg_consume_input jConsumeInput
-#define jpeg_calc_output_dimensions jCalcDimensions
-#define jpeg_save_markers jSaveMarkers
-#define jpeg_set_marker_processor jSetMarker
-#define jpeg_read_coefficients jReadCoefs
-#define jpeg_write_coefficients jWrtCoefs
-#define jpeg_copy_critical_parameters jCopyCrit
-#define jpeg_abort_compress jAbrtCompress
-#define jpeg_abort_decompress jAbrtDecompress
-#define jpeg_abort jAbort
-#define jpeg_destroy jDestroy
-#define jpeg_resync_to_restart jResyncRestart
-#endif /* NEED_SHORT_EXTERNAL_NAMES */
-
-
-/* Default error-management setup */
-EXTERN(struct jpeg_error_mgr *) jpeg_std_error
- JPP((struct jpeg_error_mgr * err));
-
-/* Initialization of JPEG compression objects.
- * jpeg_create_compress() and jpeg_create_decompress() are the exported
- * names that applications should call. These expand to calls on
- * jpeg_CreateCompress and jpeg_CreateDecompress with additional information
- * passed for version mismatch checking.
- * NB: you must set up the error-manager BEFORE calling jpeg_create_xxx.
- */
-#define jpeg_create_compress(cinfo) \
- jpeg_CreateCompress((cinfo), JPEG_LIB_VERSION, \
- (size_t) sizeof(struct jpeg_compress_struct))
-#define jpeg_create_decompress(cinfo) \
- jpeg_CreateDecompress((cinfo), JPEG_LIB_VERSION, \
- (size_t) sizeof(struct jpeg_decompress_struct))
-EXTERN(void) jpeg_CreateCompress JPP((j_compress_ptr cinfo,
- int version, size_t structsize));
-EXTERN(void) jpeg_CreateDecompress JPP((j_decompress_ptr cinfo,
- int version, size_t structsize));
-/* Destruction of JPEG compression objects */
-EXTERN(void) jpeg_destroy_compress JPP((j_compress_ptr cinfo));
-EXTERN(void) jpeg_destroy_decompress JPP((j_decompress_ptr cinfo));
-
-/* Standard data source and destination managers: stdio streams. */
-/* Caller is responsible for opening the file before and closing after. */
-EXTERN(void) jpeg_stdio_dest JPP((j_compress_ptr cinfo, FILE * outfile));
-EXTERN(void) jpeg_stdio_src JPP((j_decompress_ptr cinfo, FILE * infile));
-
-/* Default parameter setup for compression */
-EXTERN(void) jpeg_set_defaults JPP((j_compress_ptr cinfo));
-/* Compression parameter setup aids */
-EXTERN(void) jpeg_set_colorspace JPP((j_compress_ptr cinfo,
- J_COLOR_SPACE colorspace));
-EXTERN(void) jpeg_default_colorspace JPP((j_compress_ptr cinfo));
-EXTERN(void) jpeg_set_quality JPP((j_compress_ptr cinfo, int quality,
- boolean force_baseline));
-EXTERN(void) jpeg_set_linear_quality JPP((j_compress_ptr cinfo,
- int scale_factor,
- boolean force_baseline));
-EXTERN(void) jpeg_add_quant_table JPP((j_compress_ptr cinfo, int which_tbl,
- const unsigned int *basic_table,
- int scale_factor,
- boolean force_baseline));
-EXTERN(int) jpeg_quality_scaling JPP((int quality));
-EXTERN(void) jpeg_simple_progression JPP((j_compress_ptr cinfo));
-EXTERN(void) jpeg_suppress_tables JPP((j_compress_ptr cinfo,
- boolean suppress));
-EXTERN(JQUANT_TBL *) jpeg_alloc_quant_table JPP((j_common_ptr cinfo));
-EXTERN(JHUFF_TBL *) jpeg_alloc_huff_table JPP((j_common_ptr cinfo));
-
-/* Main entry points for compression */
-EXTERN(void) jpeg_start_compress JPP((j_compress_ptr cinfo,
- boolean write_all_tables));
-EXTERN(JDIMENSION) jpeg_write_scanlines JPP((j_compress_ptr cinfo,
- JSAMPARRAY scanlines,
- JDIMENSION num_lines));
-EXTERN(void) jpeg_finish_compress JPP((j_compress_ptr cinfo));
-
-/* Replaces jpeg_write_scanlines when writing raw downsampled data. */
-EXTERN(JDIMENSION) jpeg_write_raw_data JPP((j_compress_ptr cinfo,
- JSAMPIMAGE data,
- JDIMENSION num_lines));
-
-/* Write a special marker. See libjpeg.doc concerning safe usage. */
-EXTERN(void) jpeg_write_marker
- JPP((j_compress_ptr cinfo, int marker,
- const JOCTET * dataptr, unsigned int datalen));
-/* Same, but piecemeal. */
-EXTERN(void) jpeg_write_m_header
- JPP((j_compress_ptr cinfo, int marker, unsigned int datalen));
-EXTERN(void) jpeg_write_m_byte
- JPP((j_compress_ptr cinfo, int val));
-
-/* Alternate compression function: just write an abbreviated table file */
-EXTERN(void) jpeg_write_tables JPP((j_compress_ptr cinfo));
-
-/* Decompression startup: read start of JPEG datastream to see what's there */
-EXTERN(int) jpeg_read_header JPP((j_decompress_ptr cinfo,
- boolean require_image));
-/* Return value is one of: */
-#define JPEG_SUSPENDED 0 /* Suspended due to lack of input data */
-#define JPEG_HEADER_OK 1 /* Found valid image datastream */
-#define JPEG_HEADER_TABLES_ONLY 2 /* Found valid table-specs-only datastream */
-/* If you pass require_image = TRUE (normal case), you need not check for
- * a TABLES_ONLY return code; an abbreviated file will cause an error exit.
- * JPEG_SUSPENDED is only possible if you use a data source module that can
- * give a suspension return (the stdio source module doesn't).
- */
-
-/* Main entry points for decompression */
-EXTERN(boolean) jpeg_start_decompress JPP((j_decompress_ptr cinfo));
-EXTERN(JDIMENSION) jpeg_read_scanlines JPP((j_decompress_ptr cinfo,
- JSAMPARRAY scanlines,
- JDIMENSION max_lines));
-EXTERN(boolean) jpeg_finish_decompress JPP((j_decompress_ptr cinfo));
-
-/* Replaces jpeg_read_scanlines when reading raw downsampled data. */
-EXTERN(JDIMENSION) jpeg_read_raw_data JPP((j_decompress_ptr cinfo,
- JSAMPIMAGE data,
- JDIMENSION max_lines));
-
-/* Additional entry points for buffered-image mode. */
-EXTERN(boolean) jpeg_has_multiple_scans JPP((j_decompress_ptr cinfo));
-EXTERN(boolean) jpeg_start_output JPP((j_decompress_ptr cinfo,
- int scan_number));
-EXTERN(boolean) jpeg_finish_output JPP((j_decompress_ptr cinfo));
-EXTERN(boolean) jpeg_input_complete JPP((j_decompress_ptr cinfo));
-EXTERN(void) jpeg_new_colormap JPP((j_decompress_ptr cinfo));
-EXTERN(int) jpeg_consume_input JPP((j_decompress_ptr cinfo));
-/* Return value is one of: */
-/* #define JPEG_SUSPENDED 0 Suspended due to lack of input data */
-#define JPEG_REACHED_SOS 1 /* Reached start of new scan */
-#define JPEG_REACHED_EOI 2 /* Reached end of image */
-#define JPEG_ROW_COMPLETED 3 /* Completed one iMCU row */
-#define JPEG_SCAN_COMPLETED 4 /* Completed last iMCU row of a scan */
-
-/* Precalculate output dimensions for current decompression parameters. */
-EXTERN(void) jpeg_calc_output_dimensions JPP((j_decompress_ptr cinfo));
-
-/* Control saving of COM and APPn markers into marker_list. */
-EXTERN(void) jpeg_save_markers
- JPP((j_decompress_ptr cinfo, int marker_code,
- unsigned int length_limit));
-
-/* Install a special processing method for COM or APPn markers. */
-EXTERN(void) jpeg_set_marker_processor
- JPP((j_decompress_ptr cinfo, int marker_code,
- jpeg_marker_parser_method routine));
-
-/* Read or write raw DCT coefficients --- useful for lossless transcoding. */
-EXTERN(jvirt_barray_ptr *) jpeg_read_coefficients JPP((j_decompress_ptr cinfo));
-EXTERN(void) jpeg_write_coefficients JPP((j_compress_ptr cinfo,
- jvirt_barray_ptr * coef_arrays));
-EXTERN(void) jpeg_copy_critical_parameters JPP((j_decompress_ptr srcinfo,
- j_compress_ptr dstinfo));
-
-/* If you choose to abort compression or decompression before completing
- * jpeg_finish_(de)compress, then you need to clean up to release memory,
- * temporary files, etc. You can just call jpeg_destroy_(de)compress
- * if you're done with the JPEG object, but if you want to clean it up and
- * reuse it, call this:
- */
-EXTERN(void) jpeg_abort_compress JPP((j_compress_ptr cinfo));
-EXTERN(void) jpeg_abort_decompress JPP((j_decompress_ptr cinfo));
-
-/* Generic versions of jpeg_abort and jpeg_destroy that work on either
- * flavor of JPEG object. These may be more convenient in some places.
- */
-EXTERN(void) jpeg_abort JPP((j_common_ptr cinfo));
-EXTERN(void) jpeg_destroy JPP((j_common_ptr cinfo));
-
-/* Default restart-marker-resync procedure for use by data source modules */
-EXTERN(boolean) jpeg_resync_to_restart JPP((j_decompress_ptr cinfo,
- int desired));
-
-
-/* These marker codes are exported since applications and data source modules
- * are likely to want to use them.
- */
-
-#define JPEG_RST0 0xD0 /* RST0 marker code */
-#define JPEG_EOI 0xD9 /* EOI marker code */
-#define JPEG_APP0 0xE0 /* APP0 marker code */
-#define JPEG_COM 0xFE /* COM marker code */
-
-
-/* If we have a brain-damaged compiler that emits warnings (or worse, errors)
- * for structure definitions that are never filled in, keep it quiet by
- * supplying dummy definitions for the various substructures.
- */
-
-#ifdef INCOMPLETE_TYPES_BROKEN
-#ifndef JPEG_INTERNALS /* will be defined in jpegint.h */
-struct jvirt_sarray_control { long dummy; };
-struct jvirt_barray_control { long dummy; };
-struct jpeg_comp_master { long dummy; };
-struct jpeg_c_main_controller { long dummy; };
-struct jpeg_c_prep_controller { long dummy; };
-struct jpeg_c_coef_controller { long dummy; };
-struct jpeg_marker_writer { long dummy; };
-struct jpeg_color_converter { long dummy; };
-struct jpeg_downsampler { long dummy; };
-struct jpeg_forward_dct { long dummy; };
-struct jpeg_entropy_encoder { long dummy; };
-struct jpeg_decomp_master { long dummy; };
-struct jpeg_d_main_controller { long dummy; };
-struct jpeg_d_coef_controller { long dummy; };
-struct jpeg_d_post_controller { long dummy; };
-struct jpeg_input_controller { long dummy; };
-struct jpeg_marker_reader { long dummy; };
-struct jpeg_entropy_decoder { long dummy; };
-struct jpeg_inverse_dct { long dummy; };
-struct jpeg_upsampler { long dummy; };
-struct jpeg_color_deconverter { long dummy; };
-struct jpeg_color_quantizer { long dummy; };
-#endif /* JPEG_INTERNALS */
-#endif /* INCOMPLETE_TYPES_BROKEN */
-
-
-/*
- * The JPEG library modules define JPEG_INTERNALS before including this file.
- * The internal structure declarations are read only when that is true.
- * Applications using the library should not include jpegint.h, but may wish
- * to include jerror.h.
- */
-
-#ifdef JPEG_INTERNALS
-#include "jpegint.h" /* fetch private declarations */
-#include "jerror.h" /* fetch error codes too */
-#endif
-
-#endif /* JPEGLIB_H */
diff --git a/util.cpp b/util.cpp
deleted file mode 100755
index a1e7eb2..0000000
--- a/util.cpp
+++ b/dev/null
@@ -1,285 +0,0 @@
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <sys/ioctl.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-
-#define swap_cbcr
-static void convert_rgb16_to_nv21(uint8_t *rgb, uint8_t *yuv, int width, int height)
-{
- int iy =0, iuv = 0;
- uint8_t* buf_y = yuv;
- uint8_t* buf_uv = buf_y + width * height;
- uint16_t* buf_rgb = (uint16_t *)rgb;
- int h,w,val_rgb,val_r,val_g,val_b;
- int y,u,v;
- for (h = 0; h < height; h++) {
- for (w = 0; w < width; w++) {
- val_rgb = buf_rgb[h * width + w];
- val_r = ((val_rgb & (0x1f << 11)) >> 11)<<3;
- val_g = ((val_rgb & (0x3f << 5)) >> 5)<<2;
- val_b = ((val_rgb & (0x1f << 0)) >> 0)<<3;
- y = 0.30078 * val_r + 0.5859 * val_g + 0.11328 * val_b;
- if (y > 255) {
- y = 255;
- } else if (y < 0) {
- y = 0;
- }
- buf_y[iy++] = y;
- if (0 == h % 2 && 0 == w % 2) {
- u = -0.11328 * val_r - 0.33984 * val_g + 0.51179 * val_b + 128;
- if (u > 255) {
- u = 255;
- } else if (u < 0) {
- u = 0;
- }
- v = 0.51179 * val_r - 0.429688 * val_g - 0.08203 * val_b + 128;
- if (v > 255) {
- v = 255;
- } else if (v < 0) {
- v = 0;
- }
-#ifdef swap_cbcr
- buf_uv[iuv++] = v;
- buf_uv[iuv++] = u;
-#else
- buf_uv[iuv++] = u;
- buf_uv[iuv++] = v;
-#endif
- }
- }
-}
-}
-
-static inline void yuv_to_rgb16(unsigned char y,unsigned char u,unsigned char v,unsigned char *rgb)
-{
- register int r,g,b;
- int rgb16;
-
- r = (1192 * (y - 16) + 1634 * (v - 128) ) >> 10;
- g = (1192 * (y - 16) - 833 * (v - 128) - 400 * (u -128) ) >> 10;
- b = (1192 * (y - 16) + 2066 * (u - 128) ) >> 10;
-
- r = r > 255 ? 255 : r < 0 ? 0 : r;
- g = g > 255 ? 255 : g < 0 ? 0 : g;
- b = b > 255 ? 255 : b < 0 ? 0 : b;
-
- rgb16 = (int)(((r >> 3)<<11) | ((g >> 2) << 5)| ((b >> 3) << 0));
-
- *rgb = (unsigned char)(rgb16 & 0xFF);
- rgb++;
- *rgb = (unsigned char)((rgb16 & 0xFF00) >> 8);
-
-}
-
-void yuyv422_to_rgb16(unsigned char *from, unsigned char *to, int size)
-{
- int x,y,z=0;
-
- for (y = 0; y < size; y+=4) {
- unsigned char Y1, Y2, U, V;
-
- Y1 = from[y + 0];
- U = from[y + 1];
- Y2 = from[y + 2];
- V = from[y + 3];
-
- yuv_to_rgb16(Y1, U, V, &to[y]);
- yuv_to_rgb16(Y2, U, V, &to[y + 2]);
- }
-}
-
-void yuyv422_to_rgb16(unsigned char *from, unsigned char *to, int width, int height)
-{
- yuyv422_to_rgb16(from,to,(width * height) * 2);
-}
-
-void yuyv422_to_nv21(unsigned char *bufsrc, unsigned char *bufdest, int width, int height)
-{
- unsigned char *ptrsrcy1, *ptrsrcy2;
- unsigned char *ptrsrcy3, *ptrsrcy4;
- unsigned char *ptrsrccb1, *ptrsrccb2;
- unsigned char *ptrsrccb3, *ptrsrccb4;
- unsigned char *ptrsrccr1, *ptrsrccr2;
- unsigned char *ptrsrccr3, *ptrsrccr4;
- int srcystride, srcccstride;
-
- ptrsrcy1 = bufsrc ;
- ptrsrcy2 = bufsrc + (width<<1) ;
- ptrsrcy3 = bufsrc + (width<<1)*2 ;
- ptrsrcy4 = bufsrc + (width<<1)*3 ;
-
- ptrsrccb1 = bufsrc + 1;
- ptrsrccb2 = bufsrc + (width<<1) + 1;
- ptrsrccb3 = bufsrc + (width<<1)*2 + 1;
- ptrsrccb4 = bufsrc + (width<<1)*3 + 1;
-
- ptrsrccr1 = bufsrc + 3;
- ptrsrccr2 = bufsrc + (width<<1) + 3;
- ptrsrccr3 = bufsrc + (width<<1)*2 + 3;
- ptrsrccr4 = bufsrc + (width<<1)*3 + 3;
-
- srcystride = (width<<1)*3;
- srcccstride = (width<<1)*3;
-
- unsigned char *ptrdesty1, *ptrdesty2;
- unsigned char *ptrdesty3, *ptrdesty4;
- unsigned char *ptrdestcb1, *ptrdestcb2;
- unsigned char *ptrdestcr1, *ptrdestcr2;
- int destystride, destccstride;
-
- ptrdesty1 = bufdest;
- ptrdesty2 = bufdest + width;
- ptrdesty3 = bufdest + width*2;
- ptrdesty4 = bufdest + width*3;
-
- ptrdestcb1 = bufdest + width*height;
- ptrdestcb2 = bufdest + width*height + width;
-
- ptrdestcr1 = bufdest + width*height + 1;
- ptrdestcr2 = bufdest + width*height + width + 1;
-
- destystride = (width)*3;
- destccstride = width;
-
- int i, j;
-
- for(j=0; j<(height/4); j++)
- {
- for(i=0;i<(width/2);i++)
- {
- (*ptrdesty1++) = (*ptrsrcy1);
- (*ptrdesty2++) = (*ptrsrcy2);
- (*ptrdesty3++) = (*ptrsrcy3);
- (*ptrdesty4++) = (*ptrsrcy4);
-
- ptrsrcy1 += 2;
- ptrsrcy2 += 2;
- ptrsrcy3 += 2;
- ptrsrcy4 += 2;
-
- (*ptrdesty1++) = (*ptrsrcy1);
- (*ptrdesty2++) = (*ptrsrcy2);
- (*ptrdesty3++) = (*ptrsrcy3);
- (*ptrdesty4++) = (*ptrsrcy4);
-
- ptrsrcy1 += 2;
- ptrsrcy2 += 2;
- ptrsrcy3 += 2;
- ptrsrcy4 += 2;
-
- #if 0
- (*ptrdestcb1) = (*ptrsrccb1);
- (*ptrdestcb2) = (*ptrsrccb3);
- #else
- (*ptrdestcb1) = (*ptrsrccr1);
- (*ptrdestcb2) = (*ptrsrccr3);
- #endif
-
- #if 0
- (*ptrdestcr1) = (*ptrsrccr1);
- (*ptrdestcr2) = (*ptrsrccr3);
- #else
- (*ptrdestcr1) = (*ptrsrccb1);
- (*ptrdestcr2) = (*ptrsrccb3);
- #endif
-
- ptrdestcb1 += 2;
- ptrdestcb2 += 2;
- ptrdestcr1 += 2;
- ptrdestcr2 += 2;
-
- ptrsrccb1 += 4;
- ptrsrccb3 += 4;
- ptrsrccr1 += 4;
- ptrsrccr3 += 4;
-
- }
-
-
- /* Update src pointers */
- ptrsrcy1 += srcystride;
- ptrsrcy2 += srcystride;
- ptrsrcy3 += srcystride;
- ptrsrcy4 += srcystride;
-
- ptrsrccb1 += srcccstride;
- ptrsrccb3 += srcccstride;
-
- ptrsrccr1 += srcccstride;
- ptrsrccr3 += srcccstride;
-
-
- /* Update dest pointers */
- ptrdesty1 += destystride;
- ptrdesty2 += destystride;
- ptrdesty3 += destystride;
- ptrdesty4 += destystride;
-
- ptrdestcb1 += destccstride;
- ptrdestcb2 += destccstride;
-
- ptrdestcr1 += destccstride;
- ptrdestcr2 += destccstride;
-
- }
-}
-
-static inline void yuv_to_rgb24(unsigned char y,unsigned char u,unsigned char v,unsigned char *rgb)
-{
- register int r,g,b;
- int rgb24;
-
- r = (1192 * (y - 16) + 1634 * (v - 128) ) >> 10;
- g = (1192 * (y - 16) - 833 * (v - 128) - 400 * (u -128) ) >> 10;
- b = (1192 * (y - 16) + 2066 * (u - 128) ) >> 10;
-
- r = r > 255 ? 255 : r < 0 ? 0 : r;
- g = g > 255 ? 255 : g < 0 ? 0 : g;
- b = b > 255 ? 255 : b < 0 ? 0 : b;
-
- rgb24 = (int)((r <<16) | (g << 8)| b);
-
- *rgb = (unsigned char)r;
- rgb++;
- *rgb = (unsigned char)g;
- rgb++;
- *rgb = (unsigned char)b;
-}
-
-void yuyv422_to_rgb24(unsigned char *buf, unsigned char *rgb, int width, int height)
-{
- int x,y,z=0;
- int blocks;
-
- blocks = (width * height) * 2;
-
- for (y = 0,z=0; y < blocks; y+=4,z+=6) {
- unsigned char Y1, Y2, U, V;
-
- Y1 = buf[y + 0];
- U = buf[y + 1];
- Y2 = buf[y + 2];
- V = buf[y + 3];
-
- yuv_to_rgb24(Y1, U, V, &rgb[z]);
- yuv_to_rgb24(Y2, U, V, &rgb[z + 3]);
-
- }
-}
-
-void convert_rgb24_to_rgb16(uint8_t *src, uint8_t *dst, int width, int height)
-{
- int src_len = width*height*3;
- int i = 0;
- int j = 0;
-
- for (i = 0; i < src_len; i += 3)
- {
- dst[j] = (src[i]&0x1f) | (src[i+1]>>5);
- dst[j+1] = ((src[i+1]>>2)<<5) | (src[i+2]>>3);
- j += 2;
- }
-}
-
diff --git a/util.h b/util.h
deleted file mode 100755
index b60aeee..0000000
--- a/util.h
+++ b/dev/null
@@ -1,10 +0,0 @@
-#ifndef AML_CAMERA_HARDWARE_INCLUDE_
-#define AML_CAMERA_HARDWARE_INCLUDE_
-
-void convert_rgb16_to_nv21(uint8_t *rgb, uint8_t *yuv, int width, int height);
-void convert_rgb24_to_rgb16(uint8_t *rgb888, uint8_t *rgb565, int width, int height);
-void yuyv422_to_rgb16(unsigned char *from, unsigned char *to, int width,int height);
-void yuyv422_to_rgb16(unsigned char *from, unsigned char *to, int size);
-void yuyv422_to_rgb24(unsigned char *buf, unsigned char *rgb, int width, int height);
-void yuyv422_to_nv21(unsigned char *bufsrc, unsigned char *bufdest, int width, int height);
-#endif /* AML_CAMERA_HARDWARE_INCLUDE_*/
diff --git a/utils/Android.mk b/utils/Android.mk
new file mode 100755
index 0000000..4b6a2b3
--- a/dev/null
+++ b/utils/Android.mk
@@ -0,0 +1,34 @@
+################################################
+
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_PRELINK_MODULE := false
+
+LOCAL_SRC_FILES:= \
+ MessageQueue.cpp \
+ Semaphore.cpp \
+ ErrorUtils.cpp
+
+LOCAL_SHARED_LIBRARIES:= \
+ libdl \
+ libui \
+ libbinder \
+ libutils \
+ libcutils
+
+LOCAL_C_INCLUDES += \
+ frameworks/base/include/utils \
+ bionic/libc/include \
+ hardware/ti/omap4xxx/domx/omx_core/inc \
+ hardware/ti/omap4xxx/domx/mm_osal/inc
+
+LOCAL_CFLAGS += -fno-short-enums
+
+# LOCAL_CFLAGS +=
+
+LOCAL_MODULE:= libtiutils
+LOCAL_MODULE_TAGS:= optional
+
+include $(BUILD_HEAPTRACKED_SHARED_LIBRARY)
diff --git a/utils/DebugUtils.h b/utils/DebugUtils.h
new file mode 100644
index 0000000..54edfc7
--- a/dev/null
+++ b/utils/DebugUtils.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef DEBUG_UTILS_H
+#define DEBUG_UTILS_H
+
+///Defines for debug statements - Macro LOG_TAG needs to be defined in the respective files
+#define DBGUTILS_LOGVA(str) LOGV("%s:%d %s - " str,__FILE__, __LINE__,__FUNCTION__);
+#define DBGUTILS_LOGVB(str,...) LOGV("%s:%d %s - " str,__FILE__, __LINE__, __FUNCTION__, __VA_ARGS__);
+#define DBGUTILS_LOGDA(str) LOGD("%s:%d %s - " str,__FILE__, __LINE__,__FUNCTION__);
+#define DBGUTILS_LOGDB(str, ...) LOGD("%s:%d %s - " str,__FILE__, __LINE__, __FUNCTION__, __VA_ARGS__);
+#define DBGUTILS_LOGEA(str) LOGE("%s:%d %s - " str,__FILE__, __LINE__, __FUNCTION__);
+#define DBGUTILS_LOGEB(str, ...) LOGE("%s:%d %s - " str,__FILE__, __LINE__,__FUNCTION__, __VA_ARGS__);
+#define LOG_FUNCTION_NAME LOGV("%d: %s() ENTER", __LINE__, __FUNCTION__);
+#define LOG_FUNCTION_NAME_EXIT LOGV("%d: %s() EXIT", __LINE__, __FUNCTION__);
+
+
+
+
+#endif //DEBUG_UTILS_H
+
diff --git a/utils/ErrorUtils.cpp b/utils/ErrorUtils.cpp
new file mode 100644
index 0000000..c35d24c
--- a/dev/null
+++ b/utils/ErrorUtils.cpp
@@ -0,0 +1,142 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include "ErrorUtils.h"
+
+namespace android {
+
+/**
+ @brief Method to convert from POSIX to Android errors
+
+ @param error Any of the standard POSIX error codes (defined in bionic/libc/kernel/common/asm-generic/errno.h)
+ @return Any of the standard Android error code (defined in frameworks/base/include/utils/Errors.h)
+ */
+status_t ErrorUtils::posixToAndroidError(int error)
+{
+ switch(error)
+ {
+ case 0:
+ return NO_ERROR;
+ case EINVAL:
+ case EFBIG:
+ case EMSGSIZE:
+ case E2BIG:
+ case EFAULT:
+ case EILSEQ:
+ return BAD_VALUE;
+ case ENOSYS:
+ return INVALID_OPERATION;
+ case EACCES:
+ case EPERM:
+ return PERMISSION_DENIED;
+ case EADDRINUSE:
+ case EAGAIN:
+ case EALREADY:
+ case EBUSY:
+ case EEXIST:
+ case EINPROGRESS:
+ return ALREADY_EXISTS;
+ case ENOMEM:
+ return NO_MEMORY;
+ default:
+ return UNKNOWN_ERROR;
+ };
+
+ return NO_ERROR;
+}
+
+
+#if 0
+/**
+ @brief Method to convert from TI OSAL to Android errors
+
+ @param error Any of the standard TI OSAL error codes (defined in
+ hardware/ti/omx/ducati/domx/system/mm_osal/inc/timm_osal_error.h)
+ @return Any of the standard Android error code (defined in frameworks/base/include/utils/Errors.h)
+ */
+status_t ErrorUtils::osalToAndroidError(TIMM_OSAL_ERRORTYPE error)
+{
+ switch(error)
+ {
+ case TIMM_OSAL_ERR_NONE:
+ return NO_ERROR;
+ case TIMM_OSAL_ERR_ALLOC:
+ return NO_MEMORY;
+ default:
+ return UNKNOWN_ERROR;
+ }
+
+ return NO_ERROR;
+}
+
+/**
+ @brief Method to convert from OMX to Android errors
+
+ @param error Any of the standard OMX error codes (defined in hardware/ti/omx/ducati/domx/system/omx_core/inc/OMX_Core.h)
+ @return Any of the standard Android error code (defined in frameworks/base/include/utils/Errors.h)
+ */
+status_t ErrorUtils::omxToAndroidError(OMX_ERRORTYPE error)
+{
+ switch(error)
+ {
+ case OMX_ErrorNone:
+ return NO_ERROR;
+ case OMX_ErrorBadParameter:
+ case OMX_ErrorInvalidComponentName:
+ case OMX_ErrorUndefined:
+ case OMX_ErrorInvalidState:
+ case OMX_ErrorStreamCorrupt:
+ case OMX_ErrorPortsNotCompatible:
+ case OMX_ErrorVersionMismatch:
+ case OMX_ErrorMbErrorsInFrame:
+ return BAD_VALUE;
+ case OMX_ErrorInsufficientResources:
+ return NO_MEMORY;
+ case OMX_ErrorComponentNotFound:
+ case OMX_ErrorNotImplemented:
+ case OMX_ErrorFormatNotDetected:
+ case OMX_ErrorUnsupportedSetting:
+ return NAME_NOT_FOUND;
+ case OMX_ErrorUnderflow:
+ case OMX_ErrorOverflow:
+ case OMX_ErrorUnsupportedIndex:
+ case OMX_ErrorBadPortIndex:
+ return BAD_INDEX;
+ case OMX_ErrorHardware:
+ case OMX_ErrorContentPipeCreationFailed:
+ case OMX_ErrorContentPipeOpenFailed:
+ return FAILED_TRANSACTION;
+ case OMX_ErrorTimeout:
+ return TIMED_OUT;
+ case OMX_ErrorSameState:
+ case OMX_ErrorIncorrectStateTransition:
+ case OMX_ErrorIncorrectStateOperation:
+ return PERMISSION_DENIED;
+ case OMX_ErrorTunnelingUnsupported:
+ return INVALID_OPERATION;
+ default:
+ return UNKNOWN_ERROR;
+ }
+
+ return NO_ERROR;
+}
+#endif
+
+};
+
+
+
diff --git a/utils/ErrorUtils.h b/utils/ErrorUtils.h
new file mode 100644
index 0000000..a4fb4a8
--- a/dev/null
+++ b/utils/ErrorUtils.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ERROR_UTILS_H
+#define ERROR_UTILS_H
+
+///Header file where all the android error codes are defined
+#include <Errors.h>
+
+///Header file where all the OMX error codes are defined
+//#include "OMX_Core.h"
+
+
+extern "C"
+{
+///Header file where all the TI OSAL error codes are defined
+//#include "timm_osal_error.h"
+};
+
+namespace android {
+
+///Generic class with static methods to convert any standard error type to Android error type
+class ErrorUtils
+{
+public:
+ ///Method to convert from POSIX to Android errors
+ static status_t posixToAndroidError(int error);
+
+ ///Method to convert from TI OSAL to Android errors
+ //static status_t osalToAndroidError(TIMM_OSAL_ERRORTYPE error);
+
+ ///Method to convert from OMX to Android errors
+ //static status_t omxToAndroidError(OMX_ERRORTYPE error);
+
+};
+
+};
+
+#endif /// ERROR_UTILS_H
diff --git a/utils/MessageQueue.cpp b/utils/MessageQueue.cpp
new file mode 100755
index 0000000..11f5407
--- a/dev/null
+++ b/utils/MessageQueue.cpp
@@ -0,0 +1,415 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include <errno.h>
+#include <string.h>
+#include <sys/types.h>
+#include <sys/poll.h>
+#include <unistd.h>
+#include <Errors.h>
+
+
+
+#define LOG_TAG "MessageQueue"
+#include <utils/Log.h>
+
+#include "MessageQueue.h"
+
+namespace TIUTILS {
+
+/**
+ @brief Constructor for the message queue class
+
+ @param none
+ @return none
+ */
+MessageQueue::MessageQueue()
+{
+ LOG_FUNCTION_NAME;
+
+ int fds[2] = {-1,-1};
+ android::status_t stat;
+
+ stat = pipe(fds);
+
+ if ( 0 > stat )
+ {
+ MSGQ_LOGEB("Error while openning pipe: %s", strerror(stat) );
+ this->fd_read = 0;
+ this->fd_write = 0;
+ mHasMsg = false;
+ }
+ else
+ {
+ this->fd_read = fds[0];
+ this->fd_write = fds[1];
+
+ mHasMsg = false;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Destructor for the semaphore class
+
+ @param none
+ @return none
+ */
+MessageQueue::~MessageQueue()
+{
+ LOG_FUNCTION_NAME;
+
+ if(this->fd_read >= 0)
+ {
+ close(this->fd_read);
+ }
+
+ if(this->fd_write >= 0)
+ {
+ close(this->fd_write);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Get a message from the queue
+
+ @param msg Message structure to hold the message to be retrieved
+ @return android::NO_ERROR On success
+ @return android::BAD_VALUE if the message pointer is NULL
+ @return android::NO_INIT If the file read descriptor is not set
+ @return android::UNKNOWN_ERROR if the read operation fromthe file read descriptor fails
+ */
+android::status_t MessageQueue::get(Message* msg)
+{
+ LOG_FUNCTION_NAME;
+
+ if(!msg)
+ {
+ MSGQ_LOGEA("msg is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::BAD_VALUE;
+ }
+
+ if(!this->fd_read)
+ {
+ MSGQ_LOGEA("read descriptor not initialized for message queue");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::NO_INIT;
+ }
+
+ char* p = (char*) msg;
+ size_t read_bytes = 0;
+
+ while( read_bytes < sizeof(*msg) )
+ {
+ int err = read(this->fd_read, p, sizeof(*msg) - read_bytes);
+
+ if( err < 0 )
+ {
+ MSGQ_LOGEB("read() error: %s", strerror(errno));
+ return android::UNKNOWN_ERROR;
+ }
+ else
+ {
+ read_bytes += err;
+ }
+ }
+
+ MSGQ_LOGDB("MQ.get(%d,%p,%p,%p,%p)", msg->command, msg->arg1,msg->arg2,msg->arg3,msg->arg4);
+
+ mHasMsg = false;
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return 0;
+}
+
+/**
+ @brief Get the input file descriptor of the message queue
+
+ @param none
+ @return file read descriptor
+ */
+
+int MessageQueue::getInFd()
+{
+ return this->fd_read;
+}
+
+/**
+ @brief Constructor for the message queue class
+
+ @param fd file read descriptor
+ @return none
+ */
+
+void MessageQueue::setInFd(int fd)
+{
+ LOG_FUNCTION_NAME;
+
+ if ( -1 != this->fd_read )
+ {
+ close(this->fd_read);
+ }
+
+ this->fd_read = fd;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+/**
+ @brief Queue a message
+
+ @param msg Message structure to hold the message to be retrieved
+ @return android::NO_ERROR On success
+ @return android::BAD_VALUE if the message pointer is NULL
+ @return android::NO_INIT If the file write descriptor is not set
+ @return android::UNKNOWN_ERROR if the write operation fromthe file write descriptor fails
+ */
+
+android::status_t MessageQueue::put(Message* msg)
+{
+ LOG_FUNCTION_NAME;
+
+ char* p = (char*) msg;
+ size_t bytes = 0;
+
+ if(!msg)
+ {
+ MSGQ_LOGEA("msg is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::BAD_VALUE;
+ }
+
+ if(!this->fd_write)
+ {
+ MSGQ_LOGEA("write descriptor not initialized for message queue");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::NO_INIT;
+ }
+
+
+ MSGQ_LOGDB("MQ.put(%d,%p,%p,%p,%p)", msg->command, msg->arg1,msg->arg2,msg->arg3,msg->arg4);
+
+ while( bytes < sizeof(msg) )
+ {
+ int err = write(this->fd_write, p, sizeof(*msg) - bytes);
+
+ if( err < 0 )
+ {
+ MSGQ_LOGEB("write() error: %s", strerror(errno));
+ LOG_FUNCTION_NAME_EXIT;
+ return android::UNKNOWN_ERROR;
+ }
+ else
+ {
+ bytes += err;
+ }
+ }
+
+ MSGQ_LOGDA("MessageQueue::put EXIT");
+
+ LOG_FUNCTION_NAME_EXIT;
+ return 0;
+}
+
+
+/**
+ @brief Returns if the message queue is empty or not
+
+ @param none
+ @return true If the queue is empty
+ @return false If the queue has at least one message
+ */
+bool MessageQueue::isEmpty()
+{
+ LOG_FUNCTION_NAME;
+
+ struct pollfd pfd;
+
+ pfd.fd = this->fd_read;
+ pfd.events = POLLIN;
+ pfd.revents = 0;
+
+ if(!this->fd_read)
+ {
+ MSGQ_LOGEA("read descriptor not initialized for message queue");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::NO_INIT;
+ }
+
+
+ if( -1 == poll(&pfd,1,0) )
+ {
+ MSGQ_LOGEB("poll() error: %s", strerror(errno));
+ LOG_FUNCTION_NAME_EXIT;
+ return false;
+ }
+
+ if(pfd.revents & POLLIN)
+ {
+ mHasMsg = true;
+ }
+ else
+ {
+ mHasMsg = false;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return !mHasMsg;
+}
+
+void MessageQueue::clear()
+{
+ if(!this->fd_read)
+ {
+ MSGQ_LOGEA("read descriptor not initialized for message queue");
+ LOG_FUNCTION_NAME_EXIT;
+ return;
+ }
+
+ Message msg;
+ while(!isEmpty())
+ {
+ get(&msg);
+ }
+
+}
+
+
+/**
+ @brief Force whether the message queue has message or not
+
+ @param hasMsg Whether the queue has a message or not
+ @return none
+ */
+void MessageQueue::setMsg(bool hasMsg)
+ {
+ mHasMsg = hasMsg;
+ }
+
+
+/**
+ @briefWait for message in maximum three different queues with a timeout
+
+ @param queue1 First queue. At least this should be set to a valid queue pointer
+ @param queue2 Second queue. Optional.
+ @param queue3 Third queue. Optional.
+ @param timeout The timeout value (in micro secs) to wait for a message in any of the queues
+ @return android::NO_ERROR On success
+ @return android::BAD_VALUE If queue1 is NULL
+ @return android::NO_INIT If the file read descriptor of any of the provided queues is not set
+ */
+android::status_t MessageQueue::waitForMsg(MessageQueue *queue1, MessageQueue *queue2, MessageQueue *queue3, int timeout)
+ {
+ LOG_FUNCTION_NAME;
+
+ int n =1;
+ struct pollfd pfd[3];
+
+ if(!queue1)
+ {
+ MSGQ_LOGEA("queue1 pointer is NULL");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::BAD_VALUE;
+ }
+
+ pfd[0].fd = queue1->getInFd();
+ if(!pfd[0].fd)
+ {
+ MSGQ_LOGEA("read descriptor not initialized for message queue1");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::NO_INIT;
+ }
+ pfd[0].events = POLLIN;
+ pfd[0].revents = 0;
+ if(queue2)
+ {
+ MSGQ_LOGDA("queue2 not-null");
+ pfd[1].fd = queue2->getInFd();
+ if(!pfd[1].fd)
+ {
+ MSGQ_LOGEA("read descriptor not initialized for message queue2");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::NO_INIT;
+ }
+
+ pfd[1].events = POLLIN;
+ pfd[1].revents = 0;
+ n++;
+ }
+
+ if(queue3)
+ {
+ MSGQ_LOGDA("queue3 not-null");
+ pfd[2].fd = queue3->getInFd();
+ if(!pfd[2].fd)
+ {
+ MSGQ_LOGEA("read descriptor not initialized for message queue3");
+ LOG_FUNCTION_NAME_EXIT;
+ return android::NO_INIT;
+ }
+
+ pfd[2].events = POLLIN;
+ pfd[2].revents = 0;
+ n++;
+ }
+
+
+ int ret = poll(pfd, n, timeout);
+ if(ret==0)
+ {
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+ if(ret<android::NO_ERROR)
+ {
+ MSGQ_LOGEB("Message queue returned error %d", ret);
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+ if (pfd[0].revents & POLLIN)
+ {
+ queue1->setMsg(true);
+ }
+
+ if(queue2)
+ {
+ if (pfd[1].revents & POLLIN)
+ {
+ queue2->setMsg(true);
+ }
+ }
+
+ if(queue3)
+ {
+ if (pfd[2].revents & POLLIN)
+ {
+ queue3->setMsg(true);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+ }
+
+};
diff --git a/utils/MessageQueue.h b/utils/MessageQueue.h
new file mode 100755
index 0000000..6d05201
--- a/dev/null
+++ b/utils/MessageQueue.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#ifndef __MESSAGEQUEUE_H__
+#define __MESSAGEQUEUE_H__
+
+#include "DebugUtils.h"
+#include <stdint.h>
+
+///Uncomment this macro to debug the message queue implementation
+//#define DEBUG_LOG
+
+///Camera HAL Logging Functions
+#ifndef DEBUG_LOG
+
+#define MSGQ_LOGDA(str)
+#define MSGQ_LOGDB(str, ...)
+
+#undef LOG_FUNCTION_NAME
+#undef LOG_FUNCTION_NAME_EXIT
+#define LOG_FUNCTION_NAME
+#define LOG_FUNCTION_NAME_EXIT
+
+#else
+
+#define MSGQ_LOGDA DBGUTILS_LOGDA
+#define MSGQ_LOGDB DBGUTILS_LOGDB
+
+#endif
+
+#define MSGQ_LOGEA DBGUTILS_LOGEA
+#define MSGQ_LOGEB DBGUTILS_LOGEB
+
+
+namespace TIUTILS {
+
+///Message type
+struct Message
+{
+ unsigned int command;
+ void* arg1;
+ void* arg2;
+ void* arg3;
+ void* arg4;
+ int64_t id;
+};
+
+///Message queue implementation
+class MessageQueue
+{
+public:
+
+ MessageQueue();
+ ~MessageQueue();
+
+ ///Get a message from the queue
+ android::status_t get(Message*);
+
+ ///Get the input file descriptor of the message queue
+ int getInFd();
+
+ ///Set the input file descriptor for the message queue
+ void setInFd(int fd);
+
+ ///Queue a message
+ android::status_t put(Message*);
+
+ ///Returns if the message queue is empty or not
+ bool isEmpty();
+
+ void clear();
+
+ ///Force whether the message queue has message or not
+ void setMsg(bool hasMsg=false);
+
+ ///Wait for message in maximum three different queues with a timeout
+ static int waitForMsg(MessageQueue *queue1, MessageQueue *queue2=0, MessageQueue *queue3=0, int timeout = 0);
+
+ bool hasMsg()
+ {
+ return mHasMsg;
+ }
+
+private:
+ int fd_read;
+ int fd_write;
+ bool mHasMsg;
+};
+
+};
+
+#endif
diff --git a/utils/Semaphore.cpp b/utils/Semaphore.cpp
new file mode 100644
index 0000000..41fa99c
--- a/dev/null
+++ b/utils/Semaphore.cpp
@@ -0,0 +1,232 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#include "Semaphore.h"
+#include "ErrorUtils.h"
+#include <utils/Log.h>
+#include <time.h>
+
+namespace android {
+
+/**
+ @brief Constructor for the semaphore class
+
+ @param none
+ @return none
+ */
+Semaphore::Semaphore()
+{
+ ///Initialize the semaphore to NULL
+ mSemaphore = NULL;
+}
+
+/**
+ @brief Destructor of the semaphore class
+
+ @param none
+ @return none
+
+ */
+Semaphore::~Semaphore()
+{
+ Release();
+}
+
+/**
+ @brief: Releases semaphore
+
+ @param count >=0
+ @return NO_ERROR On Success
+ @return One of the android error codes based on semaphore de-initialization
+ */
+
+status_t Semaphore::Release()
+{
+ int status = 0;
+
+ ///Destroy only if the semaphore has been created
+ if(mSemaphore)
+ {
+ status = sem_destroy(mSemaphore);
+
+ free(mSemaphore);
+
+ mSemaphore = NULL;
+ }
+
+ ///Initialize the semaphore and return the status
+ return ErrorUtils::posixToAndroidError(status);
+
+}
+
+/**
+ @brief Create the semaphore with initial count value
+
+ @param count >=0
+ @return NO_ERROR On Success
+ @return NO_MEMORY If unable to allocate memory for the semaphore
+ @return BAD_VALUE If an invalid count value is passed (<0)
+ @return One of the android error codes based on semaphore initialization
+ */
+
+status_t Semaphore::Create(int count)
+{
+ status_t ret = NO_ERROR;
+
+ ///count cannot be less than zero
+ if(count<0)
+ {
+ return BAD_VALUE;
+ }
+
+ ret = Release();
+ if ( NO_ERROR != ret )
+ {
+ return ret;
+ }
+
+ ///allocate memory for the semaphore
+ mSemaphore = (sem_t*)malloc(sizeof(sem_t)) ;
+
+ ///if memory is unavailable, return error
+ if(!mSemaphore)
+ {
+ return NO_MEMORY;
+ }
+
+ ///Initialize the semaphore and return the status
+ return ErrorUtils::posixToAndroidError(sem_init(mSemaphore, 0x00, count));
+
+}
+
+/**
+ @brief Wait operation
+
+ @param none
+ @return BAD_VALUE if the semaphore is not initialized
+ @return NO_ERROR On success
+ @return One of the android error codes based on semaphore wait operation
+ */
+status_t Semaphore::Wait()
+{
+ ///semaphore should have been created first
+ if(!mSemaphore)
+ {
+ return BAD_VALUE;
+ }
+
+ ///Wait and return the status after signalling
+ return ErrorUtils::posixToAndroidError(sem_wait(mSemaphore));
+
+
+}
+
+
+/**
+ @brief Signal operation
+
+ @param none
+ @return BAD_VALUE if the semaphore is not initialized
+ @return NO_ERROR On success
+ @return One of the android error codes based on semaphore signal operation
+ */
+
+status_t Semaphore::Signal()
+{
+ ///semaphore should have been created first
+ if(!mSemaphore)
+ {
+ return BAD_VALUE;
+ }
+
+ ///Post to the semaphore
+ return ErrorUtils::posixToAndroidError(sem_post(mSemaphore));
+
+}
+
+/**
+ @brief Current semaphore count
+
+ @param none
+ @return Current count value of the semaphore
+ */
+int Semaphore::Count()
+{
+ int val;
+
+ ///semaphore should have been created first
+ if(!mSemaphore)
+ {
+ return BAD_VALUE;
+ }
+
+ ///get the value of the semaphore
+ sem_getvalue(mSemaphore, &val);
+
+ return val;
+}
+
+/**
+ @brief Wait operation with a timeout
+
+ @param timeoutMicroSecs The timeout period in micro seconds
+ @return BAD_VALUE if the semaphore is not initialized
+ @return NO_ERROR On success
+ @return One of the android error codes based on semaphore wait operation
+ */
+
+status_t Semaphore::WaitTimeout(int timeoutMicroSecs)
+{
+ status_t ret = NO_ERROR;
+
+ struct timespec timeSpec;
+ struct timeval currentTime;
+
+ ///semaphore should have been created first
+ if( NULL == mSemaphore)
+ {
+ ret = BAD_VALUE;
+ }
+
+ if ( NO_ERROR == ret )
+ {
+
+ ///setup the timeout values - timeout is specified in seconds and nanoseconds
+ gettimeofday(&currentTime, NULL);
+ timeSpec.tv_sec = currentTime.tv_sec;
+ timeSpec.tv_nsec = currentTime.tv_usec * 1000;
+ timeSpec.tv_sec += ( timeoutMicroSecs / 1000000 );
+ timeSpec.tv_nsec += ( timeoutMicroSecs % 1000000) * 1000;
+
+ ///Wait for the timeout or signal and return the result based on whichever event occurred first
+ ret = sem_timedwait(mSemaphore, &timeSpec);
+ }
+
+ if ( NO_ERROR != ret )
+ {
+ Signal();
+ Create(0);
+ }
+
+ return ret;
+}
+
+
+};
+
+
diff --git a/utils/Semaphore.h b/utils/Semaphore.h
new file mode 100644
index 0000000..466e709
--- a/dev/null
+++ b/utils/Semaphore.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+#include <Errors.h>
+#include <semaphore.h>
+#include <stdio.h>
+#include <stdarg.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+
+namespace android {
+
+class Semaphore
+{
+public:
+
+ Semaphore();
+ ~Semaphore();
+
+ //Release semaphore
+ status_t Release();
+
+ ///Create the semaphore with initial count value
+ status_t Create(int count=0);
+
+ ///Wait operation
+ status_t Wait();
+
+ ///Signal operation
+ status_t Signal();
+
+ ///Current semaphore count
+ int Count();
+
+ ///Wait operation with a timeout
+ status_t WaitTimeout(int timeoutMicroSecs);
+
+private:
+ sem_t *mSemaphore;
+
+};
+
+};