summaryrefslogtreecommitdiff
authorguosong.zhou <guosong.zhou@amlogic.com>2014-12-05 07:48:03 (GMT)
committer guosong.zhou <guosong.zhou@amlogic.com>2014-12-05 07:48:03 (GMT)
commita130a20159f66595fd9f86096cfe390ee6486faa (patch)
tree6ddb9b91a896d57d262cc7366e00af8054f40bc6
parent064d0499f8c23c631285dc4baf0c4a6484d7b139 (diff)
downloadcamera-a130a20159f66595fd9f86096cfe390ee6486faa.zip
camera-a130a20159f66595fd9f86096cfe390ee6486faa.tar.gz
camera-a130a20159f66595fd9f86096cfe390ee6486faa.tar.bz2
Statistics preview fps , then dump preview fps,format,width and height information
Change-Id: Ia3fbecd16edb2f234b3ed6b0a224fc7555c639f3
Diffstat
-rwxr-xr-xv3/EmulatedFakeCamera3.cpp3
-rwxr-xr-xv3/fake-pipeline2/Sensor.cpp63
-rwxr-xr-xv3/fake-pipeline2/Sensor.h4
3 files changed, 68 insertions, 2 deletions
diff --git a/v3/EmulatedFakeCamera3.cpp b/v3/EmulatedFakeCamera3.cpp
index 7ec3335..8d8daf0 100755
--- a/v3/EmulatedFakeCamera3.cpp
+++ b/v3/EmulatedFakeCamera3.cpp
@@ -1363,6 +1363,7 @@ status_t EmulatedFakeCamera3::processCaptureRequest(
/** Debug methods */
void EmulatedFakeCamera3::dump(int fd) {
+ mSensor->dump(fd);
}
//flush all request
//TODO returned buffers every request held immediately with
@@ -1866,7 +1867,7 @@ status_t EmulatedFakeCamera3::constructStaticInfo() {
&supportedHardwareLevel,
/*count*/1);
- int32_t android_sync_max_latency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
+ static const uint8_t android_sync_max_latency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
info.update(ANDROID_SYNC_MAX_LATENCY, &android_sync_max_latency, 1);
uint8_t len[] = {1};
diff --git a/v3/fake-pipeline2/Sensor.cpp b/v3/fake-pipeline2/Sensor.cpp
index 37adec2..d4c8927 100755
--- a/v3/fake-pipeline2/Sensor.cpp
+++ b/v3/fake-pipeline2/Sensor.cpp
@@ -37,6 +37,8 @@
#include "NV12_resize.h"
#include "libyuv/scale.h"
#include "ge2d_stream.h"
+#include <sys/time.h>
+
#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
@@ -205,6 +207,9 @@ status_t Sensor::setOutputFormat(int width, int height, int pixelformat)
{
int res;
+ framecount = 0;
+ fps = 0;
+
if (pixelformat == V4L2_PIX_FMT_RGB24) {
vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
vinfo->picture.format.fmt.pix.width = width;
@@ -1726,6 +1731,9 @@ void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
}
#else
uint8_t *src;
+ if (framecount == 0) {
+ gettimeofday(&mTimeStart, NULL);
+ }
if (mKernelBuffer) {
src = mKernelBuffer;
if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
@@ -1828,12 +1836,15 @@ void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
usleep(30000);
if (NULL == src)
continue;
- if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21)
+ if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
memcpy(b.img, src, vinfo->preview.buf.length);
+ framecount++;
+ }
else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
int width = vinfo->preview.format.fmt.pix.width;
int height = vinfo->preview.format.fmt.pix.height;
YUYVToNV21(src, b.img, width, height);
+ framecount++;
}
else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
int width = vinfo->preview.format.fmt.pix.width;
@@ -1844,6 +1855,9 @@ void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
putback_frame(vinfo);
continue;
DBG_LOGA("Decode MJPEG frame failed\n");
+ } else {
+ framecount++;
+ DBG_LOGA("Decode MJPEG frame success\n");
}
} else {
ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
@@ -1852,6 +1866,12 @@ void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
break;
}
#endif
+ if (framecount == 100 ) {
+ gettimeofday(&mTimeend, NULL);
+ int intreval = (mTimeend.tv_sec - mTimeStart.tv_sec) * 1000 + ((mTimeend.tv_usec - mTimeStart.tv_usec))/1000;
+ fps = (framecount*1000)/intreval;
+ framecount = 0;
+ }
mKernelBuffer = src;
ALOGVV("NV21 sensor image captured");
}
@@ -1995,12 +2015,14 @@ void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
continue;
if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
memcpy(b.img, src, vinfo->preview.buf.length);
+ framecount++;
}
else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
int width = vinfo->preview.format.fmt.pix.width;
int height = vinfo->preview.format.fmt.pix.height;
YUYVToYV12(src, b.img, width, height);
+ framecount++;
}
else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
int width = vinfo->preview.format.fmt.pix.width;
@@ -2008,7 +2030,12 @@ void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
b.img + width * height, (width + 1) / 2, 0, 0, width, height,
width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
+ putback_frame(vinfo);
+ continue;
DBG_LOGA("Decode MJPEG frame failed\n");
+ } else {
+ framecount++;
+ DBG_LOGA("Decode MJPEG frame success\n");
}
} else {
ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
@@ -2017,6 +2044,12 @@ void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
break;
}
#endif
+ if (framecount == 100 ) {
+ gettimeofday(&mTimeend, NULL);
+ int intreval = (mTimeend.tv_sec - mTimeStart.tv_sec) * 1000 + ((mTimeend.tv_usec - mTimeStart.tv_usec))/1000;
+ fps = (framecount*1000)/intreval;
+ framecount = 0;
+ }
mKernelBuffer = src;
ALOGVV("YV12 sensor image captured");
}
@@ -2095,6 +2128,7 @@ void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
continue;
if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
memcpy(img, src, vinfo->preview.buf.length);
+ framecount++;
} else {
ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
}
@@ -2102,9 +2136,36 @@ void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
break;
}
#endif
+ if (framecount == 100 ) {
+ gettimeofday(&mTimeend, NULL);
+ int intreval = (mTimeend.tv_sec - mTimeStart.tv_sec) * 1000 + ((mTimeend.tv_usec - mTimeStart.tv_usec))/1000;
+ fps = (framecount*1000)/intreval;
+ framecount = 0;
+ }
mKernelBuffer = src;
ALOGVV("YUYV sensor image captured");
}
+void Sensor::dump(int fd) {
+ String8 result;
+ result = String8::format("camera preview information: \n");
+ result.appendFormat("camera preview fps: %d\n", fps);
+ result.appendFormat("camera preview width: %d , height =%d\n",
+ vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
+ if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
+ result.appendFormat("camera preview format: %s\n\n", "V4L2_PIX_FMT_NV21");
+ }
+ if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
+ result.appendFormat("camera preview format: %s\n\n", "V4L2_PIX_FMT_YVU420");
+ }
+ if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
+ result.appendFormat("camera preview format: %s\n\n", "V4L2_PIX_FMT_YUYV");
+ }
+ if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
+ result.appendFormat("camera preview format: %s\n\n", "V4L2_PIX_FMT_MJPEG");
+ }
+ write(fd, result.string(), result.size());
+}
+
} // namespace android
diff --git a/v3/fake-pipeline2/Sensor.h b/v3/fake-pipeline2/Sensor.h
index 2c4be1a..5ba1b57 100755
--- a/v3/fake-pipeline2/Sensor.h
+++ b/v3/fake-pipeline2/Sensor.h
@@ -177,6 +177,7 @@ class Sensor: private Thread, public virtual RefBase {
int getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size);
bool isStreaming();
bool isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat);
+ void dump(int fd);
/*
* Access to scene
*/
@@ -314,6 +315,9 @@ class Sensor: private Thread, public virtual RefBase {
//store the v4l2 info
struct VideoInfo *vinfo;
+ struct timeval mTimeStart,mTimeend;
+ unsigned int framecount;
+ unsigned int fps;
typedef enum sensor_type_e{
SENSOR_MMAP = 0,