From 1a2abcbc1a380c0a2d143701808373e6da3620e3 Mon Sep 17 00:00:00 2001 From: Guosong Zhou Date: Tue, 22 Sep 2015 04:39:03 +0000 Subject: PD#112645:fix camera is occupied Change-Id: Ia02d927e406dee3ce4e9cb77c991bfd2478c1fa5 --- diff --git a/v3/EmulatedFakeCamera3.cpp b/v3/EmulatedFakeCamera3.cpp index 69fd2c5..2d4254d 100644 --- a/v3/EmulatedFakeCamera3.cpp +++ b/v3/EmulatedFakeCamera3.cpp @@ -340,31 +340,33 @@ bool EmulatedFakeCamera3::getCameraStatus() } status_t EmulatedFakeCamera3::closeCamera() { - CAMHAL_LOGVB("%s, %d\n", __FUNCTION__, __LINE__); + DBG_LOGB("%s, %d\n", __FUNCTION__, __LINE__); + status_t res; { Mutex::Autolock l(mLock); if (mStatus == STATUS_CLOSED) return OK; - //res = mSensor->streamOff(); - - res = mSensor->shutDown(); - if (res != NO_ERROR) { - ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res); - return res; - } - mSensor.clear(); + } + mSensor->sendExitSingalToSensor(); + res = mSensor->shutDown(); + if (res != NO_ERROR) { + ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res); + return res; + } + mSensor.clear(); + { + Mutex::Autolock l(mLock); res = mReadoutThread->shutdownJpegCompressor(this); if (res != OK) { ALOGE("%s: Unable to shut down JpegCompressor: %d", __FUNCTION__, res); return res; } - + mReadoutThread->sendExitReadoutThreadSignal(); mReadoutThread->requestExit(); } - mReadoutThread->join(); - + DBG_LOGA("Sucess exit ReadOutThread"); { Mutex::Autolock l(mLock); // Clear out private stream information @@ -2633,6 +2635,7 @@ void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e, EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) : mParent(parent), mJpegWaiting(false) { + mExitReadoutThread = false; } EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() { @@ -2703,11 +2706,19 @@ status_t EmulatedFakeCamera3::ReadoutThread::shutdownJpegCompressor(EmulatedFake return res; } +void EmulatedFakeCamera3::ReadoutThread::sendExitReadoutThreadSignal(void) { + mExitReadoutThread = true; + mInFlightSignal.signal(); +} + bool EmulatedFakeCamera3::ReadoutThread::threadLoop() { status_t res; ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__); // First wait for a request from the in-flight queue + if (mExitReadoutThread) { + return false; + } if (mCurrentRequest.settings.isEmpty()) { Mutex::Autolock l(mLock); @@ -2723,6 +2734,11 @@ bool EmulatedFakeCamera3::ReadoutThread::threadLoop() { return false; } } + + if (mExitReadoutThread) { + return false; + } + mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber; mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings); mCurrentRequest.buffers = mInFlightQueue.begin()->buffers; @@ -2740,14 +2756,19 @@ bool EmulatedFakeCamera3::ReadoutThread::threadLoop() { __FUNCTION__); nsecs_t captureTime; - bool gotFrame = + status_t gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime); - if (!gotFrame) { + if (gotFrame == 0) { ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame", __FUNCTION__); return true; } + if (gotFrame == -1) { + DBG_LOGA("Sensor thread had exited , here should exit ReadoutThread Loop"); + return false; + } + ALOGVV("Sensor done with readout for frame %d, captured at %lld ", mCurrentRequest.frameNumber, captureTime); @@ -2797,6 +2818,7 @@ bool EmulatedFakeCamera3::ReadoutThread::threadLoop() { result.partial_result = 1; // Go idle if queue is empty, before sending result + bool signalIdle = false; { Mutex::Autolock l(mLock); @@ -2805,6 +2827,7 @@ bool EmulatedFakeCamera3::ReadoutThread::threadLoop() { signalIdle = true; } } + if (signalIdle) mParent->signalReadoutIdle(); // Send it off to the framework diff --git a/v3/EmulatedFakeCamera3.h b/v3/EmulatedFakeCamera3.h index b98beb8..e984cc3 100644 --- a/v3/EmulatedFakeCamera3.h +++ b/v3/EmulatedFakeCamera3.h @@ -199,7 +199,7 @@ private: static const uint64_t kAvailableJpegMinDurations[]; static const int64_t kSyncWaitTimeout = 10000000; // 10 ms - static const int32_t kMaxSyncTimeoutCount = 1000; // 1000 kSyncWaitTimeouts + static const int32_t kMaxSyncTimeoutCount = 300; // 1000 kSyncWaitTimeouts static const uint32_t kFenceTimeoutMs = 2000; // 2 s /**************************************************************************** @@ -287,6 +287,7 @@ private: status_t setJpegCompressorListener(EmulatedFakeCamera3 *parent); status_t startJpegCompressor(EmulatedFakeCamera3 *parent); status_t shutdownJpegCompressor(EmulatedFakeCamera3 * parent); + void sendExitReadoutThreadSignal(void); private: static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms @@ -307,7 +308,7 @@ private: Request mCurrentRequest; // Jpeg completion callbacks - + bool mExitReadoutThread; Mutex mJpegLock; bool mJpegWaiting; camera3_stream_buffer mJpegHalBuffer; diff --git a/v3/fake-pipeline2/Sensor.cpp b/v3/fake-pipeline2/Sensor.cpp index a651a5f..7dc38fc 100644 --- a/v3/fake-pipeline2/Sensor.cpp +++ b/v3/fake-pipeline2/Sensor.cpp @@ -174,6 +174,7 @@ Sensor::Sensor(): mFrameNumber(0), mCapturedBuffers(NULL), mListener(NULL), + mExitSensorThread(false), mIoctlSupport(0), msupportrotate(0), mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond) @@ -450,6 +451,24 @@ status_t Sensor::shutDown() { return res; } +void Sensor::sendExitSingalToSensor() { + { + Mutex::Autolock lock(mReadoutMutex); + mExitSensorThread = true; + mReadoutComplete.signal(); + } + + { + Mutex::Autolock lock(mControlMutex); + mVSync.signal(); + } + + { + Mutex::Autolock lock(mReadoutMutex); + mReadoutAvailable.signal(); + } +} + Scene &Sensor::getScene() { return mScene; } @@ -907,9 +926,12 @@ void Sensor::setFrameNumber(uint32_t frameNumber) { mFrameNumber = frameNumber; } -bool Sensor::waitForVSync(nsecs_t reltime) { +status_t Sensor::waitForVSync(nsecs_t reltime) { int res; Mutex::Autolock lock(mControlMutex); + if (mExitSensorThread) { + return -1; + } mGotVSync = false; res = mVSync.waitRelative(mControlMutex, reltime); @@ -920,10 +942,14 @@ bool Sensor::waitForVSync(nsecs_t reltime) { return mGotVSync; } -bool Sensor::waitForNewFrame(nsecs_t reltime, +status_t Sensor::waitForNewFrame(nsecs_t reltime, nsecs_t *captureTime) { Mutex::Autolock lock(mReadoutMutex); uint8_t *ret; + if (mExitSensorThread) { + return -1; + } + if (mCapturedBuffers == NULL) { int res; res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime); @@ -970,6 +996,9 @@ bool Sensor::threadLoop() { * in-order in time. */ + if (mExitSensorThread) { + return false; + } /** * Stage 1: Read in latest control parameters */ @@ -1035,6 +1064,9 @@ bool Sensor::threadLoop() { capturedBuffers = NULL; } + if (mExitSensorThread) { + return false; + } /** * Stage 2: Capture new image */ @@ -1064,6 +1096,11 @@ bool Sensor::threadLoop() { } mFramecount ++; } + + if (mExitSensorThread) { + return false; + } + if (mFramecount == 100) { gettimeofday(&mTimeEnd, NULL); int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec); @@ -2030,15 +2067,18 @@ void Sensor::captureNV21(StreamBuffer b, uint32_t gain) { return ; } while(1){ + if (mExitSensorThread) { + break; + } + src = (uint8_t *)get_frame(vinfo); if (NULL == src) { if (get_device_status(vinfo)) { break; - } else { - CAMHAL_LOGDA("get frame NULL, sleep 5ms"); - usleep(5000); - continue; } + CAMHAL_LOGDA("get frame NULL, sleep 5ms"); + usleep(5000); + continue; } if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) { @@ -2213,16 +2253,18 @@ void Sensor::captureYV12(StreamBuffer b, uint32_t gain) { return ; } while(1){ + if (mExitSensorThread) { + break; + } src = (uint8_t *)get_frame(vinfo); if (NULL == src) { if (get_device_status(vinfo)) { break; - } else { - CAMHAL_LOGDA("get frame NULL, sleep 5ms"); - usleep(5000); - continue; } + CAMHAL_LOGDA("get frame NULL, sleep 5ms"); + usleep(5000); + continue; } if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) { if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) { @@ -2237,10 +2279,12 @@ void Sensor::captureYV12(StreamBuffer b, uint32_t gain) { } else { yv12_memcpy_align32 (b.img, src, b.width, b.height); } + mKernelBuffer = b.img; } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { int width = vinfo->preview.format.fmt.pix.width; int height = vinfo->preview.format.fmt.pix.height; YUYVToYV12(src, b.img, width, height); + mKernelBuffer = b.img; } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) { int width = vinfo->preview.format.fmt.pix.width; int height = vinfo->preview.format.fmt.pix.height; @@ -2251,6 +2295,7 @@ void Sensor::captureYV12(StreamBuffer b, uint32_t gain) { DBG_LOGA("Decode MJPEG frame failed\n"); continue; } + mKernelBuffer = b.img; } else { ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat); } @@ -2258,7 +2303,7 @@ void Sensor::captureYV12(StreamBuffer b, uint32_t gain) { break; } #endif - mKernelBuffer = src; + //mKernelBuffer = src; ALOGVV("YV12 sensor image captured"); } @@ -2330,15 +2375,17 @@ void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) { } while(1) { + if (mExitSensorThread) { + break; + } src = (uint8_t *)get_frame(vinfo); if (NULL == src) { if (get_device_status(vinfo)) { break; - } else { - CAMHAL_LOGDA("get frame NULL, sleep 5ms"); - usleep(5000); - continue; } + CAMHAL_LOGDA("get frame NULL, sleep 5ms"); + usleep(5000); + continue; } if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) { if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) { @@ -2349,6 +2396,7 @@ void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) { } if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { memcpy(img, src, vinfo->preview.buf.length); + mKernelBuffer = src; } else { ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat); } @@ -2356,7 +2404,7 @@ void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) { break; } #endif - mKernelBuffer = src; + //mKernelBuffer = src; ALOGVV("YUYV sensor image captured"); } diff --git a/v3/fake-pipeline2/Sensor.h b/v3/fake-pipeline2/Sensor.h index 4d8d308..bcb4f7f 100644 --- a/v3/fake-pipeline2/Sensor.h +++ b/v3/fake-pipeline2/Sensor.h @@ -175,7 +175,7 @@ class Sensor: private Thread, public virtual RefBase { /* * Power control */ - + void sendExitSingalToSensor(); status_t startUp(int idx); status_t shutDown(); @@ -239,13 +239,13 @@ class Sensor: private Thread, public virtual RefBase { // Wait until the sensor outputs its next vertical sync signal, meaning it // is starting readout of its latest frame of data. Returns true if vertical // sync is signaled, false if the wait timed out. - bool waitForVSync(nsecs_t reltime); + status_t waitForVSync(nsecs_t reltime); // Wait until a new frame has been read out, and then return the time // capture started. May return immediately if a new frame has been pushed // since the last wait for a new frame. Returns true if new frame is // returned, false if timed out. - bool waitForNewFrame(nsecs_t reltime, + status_t waitForNewFrame(nsecs_t reltime, nsecs_t *captureTime); /* @@ -333,6 +333,8 @@ class Sensor: private Thread, public virtual RefBase { SensorListener *mListener; // End of readout variables + bool mExitSensorThread; + // Time of sensor startup, used for simulation zero-time point nsecs_t mStartupTime; diff --git a/v3/fake-pipeline2/camera_hw.cpp b/v3/fake-pipeline2/camera_hw.cpp index ec74667..6ea014c 100644 --- a/v3/fake-pipeline2/camera_hw.cpp +++ b/v3/fake-pipeline2/camera_hw.cpp @@ -205,7 +205,7 @@ int releasebuf_and_stop_capturing(struct VideoInfo *vinfo) DBG_LOGB("VIDIOC_STREAMOFF, errno=%d", errno); res = -1; } - if (vinfo->dev_status == -1) { + if (!vinfo->preview.buf.length) { vinfo->preview.buf.length = vinfo->tempbuflen; } for (i = 0; i < (int)vinfo->preview.rb.count; ++i) { @@ -291,8 +291,16 @@ int putback_frame(struct VideoInfo *vinfo) if (vinfo->dev_status == -1) return 0; - if (ioctl(vinfo->fd, VIDIOC_QBUF, &vinfo->preview.buf) < 0) - DBG_LOGB("QBUF failed error=%d\n", errno); + if (!vinfo->preview.buf.length) { + vinfo->preview.buf.length = vinfo->tempbuflen; + } + + if (ioctl(vinfo->fd, VIDIOC_QBUF, &vinfo->preview.buf) < 0) { + DBG_LOGB("QBUF failed error=%d\n", errno); + if (errno == ENODEV) { + set_device_status(vinfo); + } + } return 0; } diff --git a/v3/fake-pipeline2/camera_hw.h b/v3/fake-pipeline2/camera_hw.h index 3de2e22..aa1ee66 100755..100644 --- a/v3/fake-pipeline2/camera_hw.h +++ b/v3/fake-pipeline2/camera_hw.h @@ -27,6 +27,7 @@ #define CLEAR(x) memset(&(x), 0, sizeof(x)) #define V4L2_ROTATE_ID 0x980922 //V4L2_CID_ROTATE + typedef struct FrameV4L2Info { struct v4l2_format format; struct v4l2_buffer buf; @@ -57,6 +58,7 @@ struct VideoInfo { int dev_status; }; + extern int camera_open(struct VideoInfo *cam_dev); extern void camera_close(struct VideoInfo *vinfo); extern int setBuffersFormat(struct VideoInfo *cam_dev); -- cgit