summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: dd1e0347259eb5f4a3b01b19094a092bcad5df7e
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36//#include "libyuv.h"
37#include "NV12_resize.h"
38//#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44
45extern "C" int amlMjpegToNV21(const uint8_t* src_frame, size_t src_size,
46 uint8_t* dst_y, int dst_stride_y,
47 uint8_t* dst_uv, int dst_stride_uv,
48 int src_width, int src_height,
49 int dst_width, int dst_height,
50 uint32_t format);
51
52extern "C" int amlToI420(const uint8_t* src_frame, size_t src_size,
53 uint8_t* dst_y, int dst_stride_y,
54 uint8_t* dst_u, int dst_stride_u,
55 uint8_t* dst_v, int dst_stride_v,
56 int crop_x, int crop_y,
57 int src_width, int src_height,
58 int dst_width, int dst_height,
59 int rotation,
60 uint32_t format);
61
62
63extern "C" int I420Scale(const uint8_t* src_y, int src_stride_y,
64 const uint8_t* src_u, int src_stride_u,
65 const uint8_t* src_v, int src_stride_v,
66 int src_width, int src_height,
67 uint8_t* dst_y, int dst_stride_y,
68 uint8_t* dst_u, int dst_stride_u,
69 uint8_t* dst_v, int dst_stride_v,
70 int dst_width, int dst_height,
71 uint32_t filtering);
72
73#define FOURCC(a, b, c, d) ( \
74 (static_cast<uint32_t>(a)) | (static_cast<uint32_t>(b) << 8) | \
75 (static_cast<uint32_t>(c) << 16) | (static_cast<uint32_t>(d) << 24))
76
77
78#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
79
80namespace android {
81
82const unsigned int Sensor::kResolution[2] = {1600, 1200};
83
84const nsecs_t Sensor::kExposureTimeRange[2] =
85 {1000L, 30000000000L} ; // 1 us - 30 sec
86const nsecs_t Sensor::kFrameDurationRange[2] =
87 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
88const nsecs_t Sensor::kMinVerticalBlank = 10000L;
89
90const uint8_t Sensor::kColorFilterArrangement =
91 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
92
93// Output image data characteristics
94const uint32_t Sensor::kMaxRawValue = 4000;
95const uint32_t Sensor::kBlackLevel = 1000;
96
97// Sensor sensitivity
98const float Sensor::kSaturationVoltage = 0.520f;
99const uint32_t Sensor::kSaturationElectrons = 2000;
100const float Sensor::kVoltsPerLuxSecond = 0.100f;
101
102const float Sensor::kElectronsPerLuxSecond =
103 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
104 * Sensor::kVoltsPerLuxSecond;
105
106const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
107 Sensor::kSaturationElectrons;
108
109const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
110const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
111const float Sensor::kReadNoiseVarBeforeGain =
112 Sensor::kReadNoiseStddevBeforeGain *
113 Sensor::kReadNoiseStddevBeforeGain;
114const float Sensor::kReadNoiseVarAfterGain =
115 Sensor::kReadNoiseStddevAfterGain *
116 Sensor::kReadNoiseStddevAfterGain;
117
118// While each row has to read out, reset, and then expose, the (reset +
119// expose) sequence can be overlapped by other row readouts, so the final
120// minimum frame duration is purely a function of row readout time, at least
121// if there's a reasonable number of rows.
122const nsecs_t Sensor::kRowReadoutTime =
123 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
124
125const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
126const uint32_t Sensor::kDefaultSensitivity = 100;
127
128const usb_frmsize_discrete_t kUsbAvailablePictureSize[] = {
129 {4128, 3096},
130 {3264, 2448},
131 {2592, 1944},
132 {2592, 1936},
133 {2560, 1920},
134 {2688, 1520},
135 {2048, 1536},
136 {1600, 1200},
137 {1920, 1088},
138 {1920, 1080},
139 {1440, 1080},
140 {1280, 960},
141 {1280, 720},
142 {1024, 768},
143 {960, 720},
144 {720, 480},
145 {640, 480},
146 {320, 240},
147};
148
149/** A few utility functions for math, normal distributions */
150
151// Take advantage of IEEE floating-point format to calculate an approximate
152// square root. Accurate to within +-3.6%
153float sqrtf_approx(float r) {
154 // Modifier is based on IEEE floating-point representation; the
155 // manipulations boil down to finding approximate log2, dividing by two, and
156 // then inverting the log2. A bias is added to make the relative error
157 // symmetric about the real answer.
158 const int32_t modifier = 0x1FBB4000;
159
160 int32_t r_i = *(int32_t*)(&r);
161 r_i = (r_i >> 1) + modifier;
162
163 return *(float*)(&r_i);
164}
165
166void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
167{
168 int stride = (width + 31) & ( ~31);
169 int w, h;
170 for (h=0; h<height; h++)
171 {
172 memcpy( dst, src, width*3);
173 dst += width*3;
174 src += stride*3;
175 }
176}
177
178static int ALIGN(int x, int y) {
179 // y must be a power of 2.
180 return (x + y - 1) & ~(y - 1);
181}
182
183bool IsUsbAvailablePictureSize(const usb_frmsize_discrete_t AvailablePictureSize[], uint32_t width, uint32_t height)
184{
185 int i;
186 bool ret = false;
187 int count = sizeof(kUsbAvailablePictureSize)/sizeof(kUsbAvailablePictureSize[0]);
188 for (i = 0; i < count; i++) {
189 if ((width == AvailablePictureSize[i].width) && (height == AvailablePictureSize[i].height)) {
190 ret = true;
191 } else {
192 continue;
193 }
194 }
195 return ret;
196}
197
198void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
199{
200 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
201 (mmInt32)vinfo->preview.format.fmt.pix.height,
202 (mmInt32)vinfo->preview.format.fmt.pix.width,
203 IC_FORMAT_YCbCr420_lp,
204 (mmByte *) src,
205 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
206 0};
207
208 structConvImage output = {(mmInt32)width,
209 (mmInt32)height,
210 (mmInt32)width,
211 IC_FORMAT_YCbCr420_lp,
212 (mmByte *) img,
213 (mmByte *) img + width * height,
214 0};
215
216 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
217 ALOGE("Sclale NV21 frame down failed!\n");
218}
219
220Sensor::Sensor():
221 Thread(false),
222 mGotVSync(false),
223 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
224 mFrameDuration(kFrameDurationRange[0]),
225 mGainFactor(kDefaultSensitivity),
226 mNextBuffers(NULL),
227 mFrameNumber(0),
228 mCapturedBuffers(NULL),
229 mListener(NULL),
230 mTemp_buffer(NULL),
231 mExitSensorThread(false),
232 mIoctlSupport(0),
233 msupportrotate(0),
234 mTimeOutCount(0),
235 mWait(false),
236 mPre_width(0),
237 mPre_height(0),
238 mFlushFlag(false),
239 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
240{
241
242}
243
244Sensor::~Sensor() {
245 //shutDown();
246}
247
248status_t Sensor::startUp(int idx) {
249 ALOGV("%s: E", __FUNCTION__);
250 DBG_LOGA("ddd");
251
252 int res;
253 mCapturedBuffers = NULL;
254 res = run("EmulatedFakeCamera3::Sensor",
255 ANDROID_PRIORITY_URGENT_DISPLAY);
256
257 if (res != OK) {
258 ALOGE("Unable to start up sensor capture thread: %d", res);
259 }
260
261 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
262 vinfo->idx = idx;
263
264 res = camera_open(vinfo);
265 if (res < 0) {
266 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
267 }
268
269 mSensorType = SENSOR_MMAP;
270 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
271 mSensorType = SENSOR_USB;
272 }
273
274 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
275 mSensorType = SENSOR_SHARE_FD;
276 }
277
278 if (strstr((const char *)vinfo->cap.card, "front"))
279 mSensorFace = SENSOR_FACE_FRONT;
280 else if (strstr((const char *)vinfo->cap.card, "back"))
281 mSensorFace = SENSOR_FACE_BACK;
282 else
283 mSensorFace = SENSOR_FACE_NONE;
284
285 return res;
286}
287
288sensor_type_e Sensor::getSensorType(void)
289{
290 return mSensorType;
291}
292status_t Sensor::IoctlStateProbe(void) {
293 struct v4l2_queryctrl qc;
294 int ret = 0;
295 mIoctlSupport = 0;
296 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
297 qc.id = V4L2_ROTATE_ID;
298 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
299 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
300 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
301 }else{
302 mIoctlSupport |= IOCTL_MASK_ROTATE;
303 }
304
305 if(mIoctlSupport & IOCTL_MASK_ROTATE){
306 msupportrotate = true;
307 DBG_LOGA("camera support capture rotate");
308 }
309 return mIoctlSupport;
310}
311
312uint32_t Sensor::getStreamUsage(int stream_type)
313{
314 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
315
316 switch (stream_type) {
317 case CAMERA3_STREAM_OUTPUT:
318 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
319 break;
320 case CAMERA3_STREAM_INPUT:
321 usage = GRALLOC_USAGE_HW_CAMERA_READ;
322 break;
323 case CAMERA3_STREAM_BIDIRECTIONAL:
324 usage = GRALLOC_USAGE_HW_CAMERA_READ |
325 GRALLOC_USAGE_HW_CAMERA_WRITE;
326 break;
327 }
328 if ((mSensorType == SENSOR_MMAP)
329 || (mSensorType == SENSOR_USB)) {
330 usage = (GRALLOC_USAGE_HW_TEXTURE
331 | GRALLOC_USAGE_HW_RENDER
332 | GRALLOC_USAGE_SW_READ_MASK
333 | GRALLOC_USAGE_SW_WRITE_MASK
334 );
335 }
336
337 return usage;
338}
339
340status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
341{
342 int res;
343
344 mFramecount = 0;
345 mCurFps = 0;
346 gettimeofday(&mTimeStart, NULL);
347
348 if (isjpeg) {
349 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
350 vinfo->picture.format.fmt.pix.width = width;
351 vinfo->picture.format.fmt.pix.height = height;
352 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
353 } else {
354 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
355 vinfo->preview.format.fmt.pix.width = width;
356 vinfo->preview.format.fmt.pix.height = height;
357 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
358
359 res = setBuffersFormat(vinfo);
360 if (res < 0) {
361 ALOGE("set buffer failed\n");
362 return res;
363 }
364 }
365
366 if (NULL == mTemp_buffer) {
367 mPre_width = vinfo->preview.format.fmt.pix.width;
368 mPre_height = vinfo->preview.format.fmt.pix.height;
369 DBG_LOGB("setOutputFormat :: pre_width = %d, pre_height = %d \n" , mPre_width , mPre_height);
370 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
371 if (mTemp_buffer == NULL) {
372 ALOGE("first time allocate mTemp_buffer failed !");
373 return -1;
374 }
375 }
376
377 if ((mPre_width != vinfo->preview.format.fmt.pix.width) && (mPre_height != vinfo->preview.format.fmt.pix.height)) {
378 if (mTemp_buffer) {
379 delete [] mTemp_buffer;
380 mTemp_buffer = NULL;
381 }
382 mPre_width = vinfo->preview.format.fmt.pix.width;
383 mPre_height = vinfo->preview.format.fmt.pix.height;
384 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
385 if (mTemp_buffer == NULL) {
386 ALOGE("allocate mTemp_buffer failed !");
387 return -1;
388 }
389 }
390
391 return OK;
392
393}
394
395status_t Sensor::streamOn() {
396
397 return start_capturing(vinfo);
398}
399
400bool Sensor::isStreaming() {
401
402 return vinfo->isStreaming;
403}
404
405bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
406{
407 if ((vinfo->preview.format.fmt.pix.width != width)
408 ||(vinfo->preview.format.fmt.pix.height != height)
409 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
410 ) {
411
412 return true;
413
414 }
415
416 return false;
417}
418status_t Sensor::streamOff() {
419 if (mSensorType == SENSOR_USB) {
420 return releasebuf_and_stop_capturing(vinfo);
421 } else {
422 return stop_capturing(vinfo);
423 }
424}
425
426int Sensor::getOutputFormat()
427{
428 struct v4l2_fmtdesc fmt;
429 int ret;
430 memset(&fmt,0,sizeof(fmt));
431 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
432
433 fmt.index = 0;
434 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
435 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
436 return V4L2_PIX_FMT_MJPEG;
437 fmt.index++;
438 }
439
440 fmt.index = 0;
441 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
442 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
443 return V4L2_PIX_FMT_NV21;
444 fmt.index++;
445 }
446
447 fmt.index = 0;
448 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
449 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
450 return V4L2_PIX_FMT_YUYV;
451 fmt.index++;
452 }
453
454 ALOGE("Unable to find a supported sensor format!");
455 return BAD_VALUE;
456}
457
458/* if sensor supports MJPEG, return it first, otherwise
459 * trasform HAL format to v4l2 format then check whether
460 * it is supported.
461 */
462int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
463{
464 struct v4l2_fmtdesc fmt;
465 int ret;
466 memset(&fmt,0,sizeof(fmt));
467 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
468
469 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
470 pixelfmt = V4L2_PIX_FMT_YVU420;
471 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
472 pixelfmt = V4L2_PIX_FMT_NV21;
473 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
474 pixelfmt = V4L2_PIX_FMT_YUYV;
475 } else {
476 pixelfmt = V4L2_PIX_FMT_NV21;
477 }
478
479 fmt.index = 0;
480 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
481 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
482 return V4L2_PIX_FMT_MJPEG;
483 fmt.index++;
484 }
485
486 fmt.index = 0;
487 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
488 if (fmt.pixelformat == pixelfmt)
489 return pixelfmt;
490 fmt.index++;
491 }
492
493 fmt.index = 0;
494 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0) {
495 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
496 return V4L2_PIX_FMT_YUYV;
497 fmt.index++;
498 }
499 ALOGE("%s, Unable to find a supported sensor format!", __FUNCTION__);
500 return BAD_VALUE;
501}
502
503void Sensor::setPictureRotate(int rotate)
504{
505 mRotateValue = rotate;
506}
507int Sensor::getPictureRotate()
508{
509 return mRotateValue;
510}
511status_t Sensor::shutDown() {
512 ALOGV("%s: E", __FUNCTION__);
513
514 int res;
515
516 mTimeOutCount = 0;
517
518 res = requestExitAndWait();
519 if (res != OK) {
520 ALOGE("Unable to shut down sensor capture thread: %d", res);
521 }
522
523 if (vinfo != NULL) {
524 if (mSensorType == SENSOR_USB) {
525 releasebuf_and_stop_capturing(vinfo);
526 } else {
527 stop_capturing(vinfo);
528 }
529 }
530
531 camera_close(vinfo);
532
533 if (vinfo){
534 free(vinfo);
535 vinfo = NULL;
536 }
537
538 if (mTemp_buffer) {
539 delete [] mTemp_buffer;
540 mTemp_buffer = NULL;
541 }
542
543 ALOGD("%s: Exit", __FUNCTION__);
544 return res;
545}
546
547void Sensor::sendExitSingalToSensor() {
548 {
549 Mutex::Autolock lock(mReadoutMutex);
550 mExitSensorThread = true;
551 mReadoutComplete.signal();
552 }
553
554 {
555 Mutex::Autolock lock(mControlMutex);
556 mVSync.signal();
557 }
558
559 {
560 Mutex::Autolock lock(mReadoutMutex);
561 mReadoutAvailable.signal();
562 }
563}
564
565Scene &Sensor::getScene() {
566 return mScene;
567}
568
569int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
570{
571 int ret = 0;
572 struct v4l2_queryctrl qc;
573
574 memset(&qc, 0, sizeof(qc));
575 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
576 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
577
578 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
579 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
580 ret = -1;
581 *zoomMin = 0;
582 *zoomMax = 0;
583 *zoomStep = 1;
584 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
585 } else {
586 *zoomMin = qc.minimum;
587 *zoomMax = qc.maximum;
588 *zoomStep = qc.step;
589 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
590 }
591
592 return ret ;
593}
594
595int Sensor::setZoom(int zoomValue)
596{
597 int ret = 0;
598 struct v4l2_control ctl;
599
600 memset( &ctl, 0, sizeof(ctl));
601 ctl.value = zoomValue;
602 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
603 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
604 if (ret < 0) {
605 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
606 }
607 return ret ;
608}
609
610status_t Sensor::setEffect(uint8_t effect)
611{
612 int ret = 0;
613 struct v4l2_control ctl;
614 ctl.id = V4L2_CID_COLORFX;
615
616 switch (effect) {
617 case ANDROID_CONTROL_EFFECT_MODE_OFF:
618 ctl.value= CAM_EFFECT_ENC_NORMAL;
619 break;
620 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
621 ctl.value= CAM_EFFECT_ENC_COLORINV;
622 break;
623 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
624 ctl.value= CAM_EFFECT_ENC_SEPIA;
625 break;
626 default:
627 ALOGE("%s: Doesn't support effect mode %d",
628 __FUNCTION__, effect);
629 return BAD_VALUE;
630 }
631
632 DBG_LOGB("set effect mode:%d", effect);
633 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
634 if (ret < 0) {
635 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
636 }
637 return ret ;
638}
639
640#define MAX_LEVEL_FOR_EXPOSURE 16
641#define MIN_LEVEL_FOR_EXPOSURE 3
642
643int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
644{
645 struct v4l2_queryctrl qc;
646 int ret=0;
647 int level = 0;
648 int middle = 0;
649
650 memset( &qc, 0, sizeof(qc));
651
652 DBG_LOGA("getExposure\n");
653 qc.id = V4L2_CID_EXPOSURE;
654 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
655 if(ret < 0) {
656 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
657 *minExp = -4;
658 *maxExp = 4;
659 *def = 0;
660 step->numerator = 1;
661 step->denominator = 1;
662 return ret;
663 }
664
665 if(0 < qc.step)
666 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
667
668 if((level > MAX_LEVEL_FOR_EXPOSURE)
669 || (level < MIN_LEVEL_FOR_EXPOSURE)){
670 *minExp = -4;
671 *maxExp = 4;
672 *def = 0;
673 step->numerator = 1;
674 step->denominator = 1;
675 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
676 *minExp, *maxExp, *def);
677 return true;
678 }
679
680 middle = (qc.minimum+qc.maximum)/2;
681 *minExp = qc.minimum - middle;
682 *maxExp = qc.maximum - middle;
683 *def = qc.default_value - middle;
684 step->numerator = 1;
685 step->denominator = 2;//qc.step;
686 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
687 return ret;
688}
689
690status_t Sensor::setExposure(int expCmp)
691{
692 int ret = 0;
693 struct v4l2_control ctl;
694 struct v4l2_queryctrl qc;
695
696 if(mEV == expCmp){
697 return 0;
698 }else{
699 mEV = expCmp;
700 }
701 memset(&ctl, 0, sizeof(ctl));
702 memset(&qc, 0, sizeof(qc));
703
704 qc.id = V4L2_CID_EXPOSURE;
705
706 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
707 if (ret < 0) {
708 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
709 }
710
711 ctl.id = V4L2_CID_EXPOSURE;
712 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
713
714 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
715 if (ret < 0) {
716 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
717 }
718 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
719 return ret ;
720}
721
722int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
723{
724 struct v4l2_queryctrl qc;
725 struct v4l2_querymenu qm;
726 int ret;
727 int mode_count = -1;
728
729 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
730 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
731 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
732 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
733 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
734 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
735 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
736 } else {
737 memset(&qm, 0, sizeof(qm));
738
739 int index = 0;
740 mode_count = 1;
741 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
742
743 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
744 if (mode_count >= maxCont)
745 break;
746
747 memset(&qm, 0, sizeof(struct v4l2_querymenu));
748 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
749 qm.index = index;
750 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
751 continue;
752 } else {
753 if (strcmp((char*)qm.name,"50hz") == 0) {
754 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
755 mode_count++;
756 } else if (strcmp((char*)qm.name,"60hz") == 0) {
757 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
758 mode_count++;
759 } else if (strcmp((char*)qm.name,"auto") == 0) {
760 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
761 mode_count++;
762 }
763
764 }
765 }
766 }
767
768 return mode_count;
769}
770
771status_t Sensor::setAntiBanding(uint8_t antiBanding)
772{
773 int ret = 0;
774 struct v4l2_control ctl;
775 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
776
777 switch (antiBanding) {
778 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
779 ctl.value= CAM_ANTIBANDING_OFF;
780 break;
781 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
782 ctl.value= CAM_ANTIBANDING_50HZ;
783 break;
784 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
785 ctl.value= CAM_ANTIBANDING_60HZ;
786 break;
787 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
788 ctl.value= CAM_ANTIBANDING_AUTO;
789 break;
790 default:
791 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
792 __FUNCTION__, antiBanding);
793 return BAD_VALUE;
794 }
795
796 DBG_LOGB("anti banding mode:%d", antiBanding);
797 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
798 if ( ret < 0) {
799 CAMHAL_LOGDA("failed to set anti banding mode!\n");
800 return BAD_VALUE;
801 }
802 return ret;
803}
804
805status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
806{
807 int ret = 0;
808 struct v4l2_control ctl;
809 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
810 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
811 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
812
813 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
814 return ret;
815}
816
817
818int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
819{
820 struct v4l2_queryctrl qc;
821 struct v4l2_querymenu qm;
822 int ret;
823 int mode_count = -1;
824
825 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
826 qc.id = V4L2_CID_FOCUS_AUTO;
827 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
828 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
829 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
830 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
831 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
832 }else{
833 memset(&qm, 0, sizeof(qm));
834
835 int index = 0;
836 mode_count = 1;
837 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
838
839 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
840 if (mode_count >= maxCount)
841 break;
842
843 memset(&qm, 0, sizeof(struct v4l2_querymenu));
844 qm.id = V4L2_CID_FOCUS_AUTO;
845 qm.index = index;
846 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
847 continue;
848 } else {
849 if (strcmp((char*)qm.name,"auto") == 0) {
850 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
851 mode_count++;
852 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
853 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
854 mode_count++;
855 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
856 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
857 mode_count++;
858 }
859
860 }
861 }
862 }
863
864 return mode_count;
865}
866
867status_t Sensor::setAutoFocuas(uint8_t afMode)
868{
869 struct v4l2_control ctl;
870 ctl.id = V4L2_CID_FOCUS_AUTO;
871
872 switch (afMode) {
873 case ANDROID_CONTROL_AF_MODE_AUTO:
874 ctl.value = CAM_FOCUS_MODE_AUTO;
875 break;
876 case ANDROID_CONTROL_AF_MODE_MACRO:
877 ctl.value = CAM_FOCUS_MODE_MACRO;
878 break;
879 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
880 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
881 break;
882 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
883 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
884 break;
885 default:
886 ALOGE("%s: Emulator doesn't support AF mode %d",
887 __FUNCTION__, afMode);
888 return BAD_VALUE;
889 }
890
891 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
892 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
893 return BAD_VALUE;
894 }
895
896 return OK;
897}
898
899int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
900{
901 struct v4l2_queryctrl qc;
902 struct v4l2_querymenu qm;
903 int ret;
904 int mode_count = -1;
905
906 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
907 qc.id = V4L2_CID_DO_WHITE_BALANCE;
908 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
909 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
910 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
911 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
912 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
913 }else{
914 memset(&qm, 0, sizeof(qm));
915
916 int index = 0;
917 mode_count = 1;
918 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
919
920 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
921 if (mode_count >= maxCount)
922 break;
923
924 memset(&qm, 0, sizeof(struct v4l2_querymenu));
925 qm.id = V4L2_CID_DO_WHITE_BALANCE;
926 qm.index = index;
927 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
928 continue;
929 } else {
930 if (strcmp((char*)qm.name,"auto") == 0) {
931 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
932 mode_count++;
933 } else if (strcmp((char*)qm.name,"daylight") == 0) {
934 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
935 mode_count++;
936 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
937 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
938 mode_count++;
939 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
940 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
941 mode_count++;
942 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
943 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
944 mode_count++;
945 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
946 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
947 mode_count++;
948 } else if (strcmp((char*)qm.name,"twilight") == 0) {
949 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
950 mode_count++;
951 } else if (strcmp((char*)qm.name,"shade") == 0) {
952 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
953 mode_count++;
954 }
955
956 }
957 }
958 }
959
960 return mode_count;
961}
962
963status_t Sensor::setAWB(uint8_t awbMode)
964{
965 int ret = 0;
966 struct v4l2_control ctl;
967 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
968
969 switch (awbMode) {
970 case ANDROID_CONTROL_AWB_MODE_AUTO:
971 ctl.value = CAM_WB_AUTO;
972 break;
973 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
974 ctl.value = CAM_WB_INCANDESCENCE;
975 break;
976 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
977 ctl.value = CAM_WB_FLUORESCENT;
978 break;
979 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
980 ctl.value = CAM_WB_DAYLIGHT;
981 break;
982 case ANDROID_CONTROL_AWB_MODE_SHADE:
983 ctl.value = CAM_WB_SHADE;
984 break;
985 default:
986 ALOGE("%s: Emulator doesn't support AWB mode %d",
987 __FUNCTION__, awbMode);
988 return BAD_VALUE;
989 }
990 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
991 return ret;
992}
993
994void Sensor::setExposureTime(uint64_t ns) {
995 Mutex::Autolock lock(mControlMutex);
996 ALOGVV("Exposure set to %f", ns/1000000.f);
997 mExposureTime = ns;
998}
999
1000void Sensor::setFrameDuration(uint64_t ns) {
1001 Mutex::Autolock lock(mControlMutex);
1002 ALOGVV("Frame duration set to %f", ns/1000000.f);
1003 mFrameDuration = ns;
1004}
1005
1006void Sensor::setSensitivity(uint32_t gain) {
1007 Mutex::Autolock lock(mControlMutex);
1008 ALOGVV("Gain set to %d", gain);
1009 mGainFactor = gain;
1010}
1011
1012void Sensor::setDestinationBuffers(Buffers *buffers) {
1013 Mutex::Autolock lock(mControlMutex);
1014 mNextBuffers = buffers;
1015}
1016
1017void Sensor::setFrameNumber(uint32_t frameNumber) {
1018 Mutex::Autolock lock(mControlMutex);
1019 mFrameNumber = frameNumber;
1020}
1021
1022void Sensor::setFlushFlag(bool flushFlag) {
1023 mFlushFlag = flushFlag;
1024}
1025
1026status_t Sensor::waitForVSync(nsecs_t reltime) {
1027 int res;
1028 Mutex::Autolock lock(mControlMutex);
1029 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
1030 if (mExitSensorThread) {
1031 return -1;
1032 }
1033
1034 mGotVSync = false;
1035 res = mVSync.waitRelative(mControlMutex, reltime);
1036 if (res != OK && res != TIMED_OUT) {
1037 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
1038 return false;
1039 }
1040 CAMHAL_LOGVB("%s , X mControlMutex , mGotVSync = %d " , __FUNCTION__ , mGotVSync);
1041 return mGotVSync;
1042}
1043
1044status_t Sensor::waitForNewFrame(nsecs_t reltime,
1045 nsecs_t *captureTime) {
1046 Mutex::Autolock lock(mReadoutMutex);
1047 if (mExitSensorThread) {
1048 return -1;
1049 }
1050
1051 if (mCapturedBuffers == NULL) {
1052 int res;
1053 CAMHAL_LOGVB("%s , E mReadoutMutex , reltime = %d" , __FUNCTION__, reltime);
1054 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
1055 if (res == TIMED_OUT) {
1056 return false;
1057 } else if (res != OK || mCapturedBuffers == NULL) {
1058 if (mFlushFlag) {
1059 ALOGE("%s , return immediately , mWait = %d", __FUNCTION__, mWait);
1060 if (mWait) {
1061 mWait = false;
1062 *captureTime = mCaptureTime;
1063 mCapturedBuffers = NULL;
1064 mReadoutComplete.signal();
1065 } else {
1066 *captureTime = mCaptureTime;
1067 mCapturedBuffers = NULL;
1068 }
1069 return -2;
1070 } else {
1071 ALOGE("Error waiting for sensor readout signal: %d", res);
1072 return false;
1073 }
1074 }
1075 }
1076 if (mWait) {
1077 mWait = false;
1078 *captureTime = mCaptureTime;
1079 mCapturedBuffers = NULL;
1080 mReadoutComplete.signal();
1081 } else {
1082 *captureTime = mCaptureTime;
1083 mCapturedBuffers = NULL;
1084 }
1085 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1086 return true;
1087}
1088
1089Sensor::SensorListener::~SensorListener() {
1090}
1091
1092void Sensor::setSensorListener(SensorListener *listener) {
1093 Mutex::Autolock lock(mControlMutex);
1094 mListener = listener;
1095}
1096
1097status_t Sensor::readyToRun() {
1098 int res;
1099 ALOGV("Starting up sensor thread");
1100 mStartupTime = systemTime();
1101 mNextCaptureTime = 0;
1102 mNextCapturedBuffers = NULL;
1103
1104 DBG_LOGA("");
1105
1106 return OK;
1107}
1108
1109bool Sensor::threadLoop() {
1110 /**
1111 * Sensor capture operation main loop.
1112 *
1113 * Stages are out-of-order relative to a single frame's processing, but
1114 * in-order in time.
1115 */
1116
1117 if (mExitSensorThread) {
1118 return false;
1119 }
1120
1121 /**
1122 * Stage 1: Read in latest control parameters
1123 */
1124 uint64_t exposureDuration;
1125 uint64_t frameDuration;
1126 uint32_t gain;
1127 Buffers *nextBuffers;
1128 uint32_t frameNumber;
1129 SensorListener *listener = NULL;
1130 {
1131 Mutex::Autolock lock(mControlMutex);
1132 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
1133 exposureDuration = mExposureTime;
1134 frameDuration = mFrameDuration;
1135 gain = mGainFactor;
1136 nextBuffers = mNextBuffers;
1137 frameNumber = mFrameNumber;
1138 listener = mListener;
1139 // Don't reuse a buffer set
1140 mNextBuffers = NULL;
1141
1142 // Signal VSync for start of readout
1143 ALOGVV("Sensor VSync");
1144 mGotVSync = true;
1145 mVSync.signal();
1146 }
1147
1148 /**
1149 * Stage 3: Read out latest captured image
1150 */
1151
1152 Buffers *capturedBuffers = NULL;
1153 nsecs_t captureTime = 0;
1154
1155 nsecs_t startRealTime = systemTime();
1156 // Stagefright cares about system time for timestamps, so base simulated
1157 // time on that.
1158 nsecs_t simulatedTime = startRealTime;
1159 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1160 nsecs_t frameReadoutEndRealTime = startRealTime +
1161 kRowReadoutTime * kResolution[1];
1162
1163 if (mNextCapturedBuffers != NULL) {
1164 ALOGVV("Sensor starting readout");
1165 // Pretend we're doing readout now; will signal once enough time has elapsed
1166 capturedBuffers = mNextCapturedBuffers;
1167 captureTime = mNextCaptureTime;
1168 }
1169 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1170
1171 // TODO: Move this signal to another thread to simulate readout
1172 // time properly
1173 if (capturedBuffers != NULL) {
1174 ALOGVV("Sensor readout complete");
1175 Mutex::Autolock lock(mReadoutMutex);
1176 CAMHAL_LOGVB("%s , E mReadoutMutex" , __FUNCTION__);
1177 if (mCapturedBuffers != NULL) {
1178 ALOGE("Waiting for readout thread to catch up!");
1179 mWait = true;
1180 mReadoutComplete.wait(mReadoutMutex);
1181 }
1182
1183 mCapturedBuffers = capturedBuffers;
1184 mCaptureTime = captureTime;
1185 mReadoutAvailable.signal();
1186 capturedBuffers = NULL;
1187 }
1188 CAMHAL_LOGVB("%s , X mReadoutMutex" , __FUNCTION__);
1189
1190 if (mExitSensorThread) {
1191 return false;
1192 }
1193 /**
1194 * Stage 2: Capture new image
1195 */
1196 mNextCaptureTime = simulatedTime;
1197 mNextCapturedBuffers = nextBuffers;
1198
1199 if (mNextCapturedBuffers != NULL) {
1200 if (listener != NULL) {
1201#if 0
1202 if (get_device_status(vinfo)) {
1203 listener->onSensorEvent(frameNumber, SensorListener::ERROR_CAMERA_DEVICE, mNextCaptureTime);
1204 }
1205#endif
1206 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1207 mNextCaptureTime);
1208 }
1209
1210 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1211 (float)exposureDuration/1e6, gain);
1212 mScene.setExposureDuration((float)exposureDuration/1e9);
1213 mScene.calculateScene(mNextCaptureTime);
1214
1215 if ( mSensorType == SENSOR_SHARE_FD) {
1216 captureNewImageWithGe2d();
1217 } else {
1218 captureNewImage();
1219 }
1220 mFramecount ++;
1221 }
1222
1223 if (mExitSensorThread) {
1224 return false;
1225 }
1226
1227 if (mFramecount == 100) {
1228 gettimeofday(&mTimeEnd, NULL);
1229 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1230 mCurFps = mFramecount/(interval/1000000.0f);
1231 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1232 mFramecount = 0;
1233 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1234 }
1235 ALOGVV("Sensor vertical blanking interval");
1236 nsecs_t workDoneRealTime = systemTime();
1237 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1238 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1239 timespec t;
1240 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1241 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1242
1243 int ret;
1244 do {
1245 ret = nanosleep(&t, &t);
1246 } while (ret != 0);
1247 }
1248 nsecs_t endRealTime = systemTime();
1249 ALOGVV("Frame cycle took %d ms, target %d ms",
1250 (int)((endRealTime - startRealTime)/1000000),
1251 (int)(frameDuration / 1000000));
1252 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1253 return true;
1254};
1255
1256int Sensor::captureNewImageWithGe2d() {
1257
1258 uint32_t gain = mGainFactor;
1259 mKernelPhysAddr = 0;
1260
1261
1262 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1263 usleep(5000);
1264 }
1265
1266 // Might be adding more buffers, so size isn't constant
1267 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1268 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1269 fillStream(vinfo, mKernelPhysAddr, b);
1270 }
1271 putback_frame(vinfo);
1272 mKernelPhysAddr = 0;
1273
1274 return 0;
1275
1276}
1277
1278int Sensor::captureNewImage() {
1279 bool isjpeg = false;
1280 uint32_t gain = mGainFactor;
1281 mKernelBuffer = NULL;
1282
1283 // Might be adding more buffers, so size isn't constant
1284 ALOGVV("size=%d\n", mNextCapturedBuffers->size());
1285 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1286 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1287 ALOGVV("Sensor capturing buffer %d: stream %d,"
1288 " %d x %d, format %x, stride %d, buf %p, img %p",
1289 i, b.streamId, b.width, b.height, b.format, b.stride,
1290 b.buffer, b.img);
1291 switch (b.format) {
1292#if PLATFORM_SDK_VERSION <= 22
1293 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1294 captureRaw(b.img, gain, b.stride);
1295 break;
1296#endif
1297 case HAL_PIXEL_FORMAT_RGB_888:
1298 captureRGB(b.img, gain, b.stride);
1299 break;
1300 case HAL_PIXEL_FORMAT_RGBA_8888:
1301 captureRGBA(b.img, gain, b.stride);
1302 break;
1303 case HAL_PIXEL_FORMAT_BLOB:
1304 // Add auxillary buffer of the right size
1305 // Assumes only one BLOB (JPEG) buffer in
1306 // mNextCapturedBuffers
1307 StreamBuffer bAux;
1308 int orientation;
1309 orientation = getPictureRotate();
1310 ALOGD("bAux orientation=%d",orientation);
1311 uint32_t pixelfmt;
1312 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1313 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1314
1315 pixelfmt = getOutputFormat();
1316 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1317 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1318 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1319 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1320 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1321 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1322 } else {
1323 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1324 }
1325 } else {
1326 isjpeg = true;
1327 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1328 }
1329
1330 if (!msupportrotate) {
1331 bAux.streamId = 0;
1332 bAux.width = b.width;
1333 bAux.height = b.height;
1334 bAux.format = pixelfmt;
1335 bAux.stride = b.width;
1336 bAux.buffer = NULL;
1337 } else {
1338 if ((orientation == 90) || (orientation == 270)) {
1339 bAux.streamId = 0;
1340 bAux.width = b.height;
1341 bAux.height = b.width;
1342 bAux.format = pixelfmt;
1343 bAux.stride = b.height;
1344 bAux.buffer = NULL;
1345 } else {
1346 bAux.streamId = 0;
1347 bAux.width = b.width;
1348 bAux.height = b.height;
1349 bAux.format = pixelfmt;
1350 bAux.stride = b.width;
1351 bAux.buffer = NULL;
1352 }
1353 }
1354 // TODO: Reuse these
1355 bAux.img = new uint8_t[b.width * b.height * 3];
1356 mNextCapturedBuffers->push_back(bAux);
1357 break;
1358 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1359 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1360 captureNV21(b, gain);
1361 break;
1362 case HAL_PIXEL_FORMAT_YV12:
1363 captureYV12(b, gain);
1364 break;
1365 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1366 captureYUYV(b.img, gain, b.stride);
1367 break;
1368 default:
1369 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1370 b.format);
1371 break;
1372 }
1373 }
1374 if ((!isjpeg)&&(mKernelBuffer)) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1375 // whose buffer putback separately.
1376 putback_frame(vinfo);
1377 }
1378 mKernelBuffer = NULL;
1379
1380 return 0;
1381}
1382
1383int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1384 int res;
1385 int i, j, k, START;
1386 int count = 0;
1387 int pixelfmt;
1388 struct v4l2_frmsizeenum frmsize;
1389 char property[PROPERTY_VALUE_MAX];
1390 unsigned int support_w,support_h;
1391
1392 support_w = 10000;
1393 support_h = 10000;
1394 memset(property, 0, sizeof(property));
1395 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1396 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1397 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1398 support_w = 10000;
1399 support_h = 10000;
1400 }
1401 }
1402
1403 memset(&frmsize,0,sizeof(frmsize));
1404 frmsize.pixel_format = getOutputFormat();
1405
1406 START = 0;
1407 for (i = 0; ; i++) {
1408 frmsize.index = i;
1409 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1410 if (res < 0){
1411 DBG_LOGB("index=%d, break\n", i);
1412 break;
1413 }
1414
1415 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1416
1417 if (0 != (frmsize.discrete.width%16))
1418 continue;
1419
1420 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1421 continue;
1422
1423 if (count >= size)
1424 break;
1425
1426 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1427 picSizes[count+1] = frmsize.discrete.width;
1428 picSizes[count+2] = frmsize.discrete.height;
1429 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1430
1431 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1432 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1433 if (0 == i) {
1434 count += 4;
1435 continue;
1436 }
1437
1438 for (k = count; k > START; k -= 4) {
1439 if (frmsize.discrete.width * frmsize.discrete.height >
1440 picSizes[k - 3] * picSizes[k - 2]) {
1441 picSizes[k + 1] = picSizes[k - 3];
1442 picSizes[k + 2] = picSizes[k - 2];
1443
1444 } else {
1445 break;
1446 }
1447 }
1448 picSizes[k + 1] = frmsize.discrete.width;
1449 picSizes[k + 2] = frmsize.discrete.height;
1450
1451 count += 4;
1452 }
1453 }
1454
1455 START = count;
1456 for (i = 0; ; i++) {
1457 frmsize.index = i;
1458 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1459 if (res < 0){
1460 DBG_LOGB("index=%d, break\n", i);
1461 break;
1462 }
1463
1464 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1465
1466 if (0 != (frmsize.discrete.width%16))
1467 continue;
1468
1469 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1470 continue;
1471
1472 if (count >= size)
1473 break;
1474
1475 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1476 picSizes[count+1] = frmsize.discrete.width;
1477 picSizes[count+2] = frmsize.discrete.height;
1478 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1479
1480 DBG_LOGB("get output width=%d, height=%d, format =\
1481 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1482 frmsize.discrete.height);
1483 if (0 == i) {
1484 count += 4;
1485 continue;
1486 }
1487
1488 for (k = count; k > START; k -= 4) {
1489 if (frmsize.discrete.width * frmsize.discrete.height >
1490 picSizes[k - 3] * picSizes[k - 2]) {
1491 picSizes[k + 1] = picSizes[k - 3];
1492 picSizes[k + 2] = picSizes[k - 2];
1493
1494 } else {
1495 break;
1496 }
1497 }
1498 picSizes[k + 1] = frmsize.discrete.width;
1499 picSizes[k + 2] = frmsize.discrete.height;
1500
1501 count += 4;
1502 }
1503 }
1504
1505#if 0
1506 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1507 START = count;
1508 for (i = 0; ; i++) {
1509 frmsize.index = i;
1510 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1511 if (res < 0){
1512 DBG_LOGB("index=%d, break\n", i);
1513 break;
1514 }
1515
1516 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1517
1518 if (0 != (frmsize.discrete.width%16))
1519 continue;
1520
1521 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1522 continue;
1523
1524 if (count >= size)
1525 break;
1526
1527 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1528 picSizes[count+1] = frmsize.discrete.width;
1529 picSizes[count+2] = frmsize.discrete.height;
1530 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1531
1532 DBG_LOGB("get output width=%d, height=%d, format =\
1533 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1534 frmsize.discrete.height);
1535 if (0 == i) {
1536 count += 4;
1537 continue;
1538 }
1539
1540 for (k = count; k > START; k -= 4) {
1541 if (frmsize.discrete.width * frmsize.discrete.height >
1542 picSizes[k - 3] * picSizes[k - 2]) {
1543 picSizes[k + 1] = picSizes[k - 3];
1544 picSizes[k + 2] = picSizes[k - 2];
1545
1546 } else {
1547 break;
1548 }
1549 }
1550 picSizes[k + 1] = frmsize.discrete.width;
1551 picSizes[k + 2] = frmsize.discrete.height;
1552
1553 count += 4;
1554 }
1555 }
1556 }
1557#endif
1558
1559 uint32_t jpgSrcfmt[] = {
1560 V4L2_PIX_FMT_RGB24,
1561 V4L2_PIX_FMT_MJPEG,
1562 V4L2_PIX_FMT_YUYV,
1563 };
1564
1565 START = count;
1566 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1567 memset(&frmsize,0,sizeof(frmsize));
1568 frmsize.pixel_format = jpgSrcfmt[j];
1569
1570 for (i = 0; ; i++) {
1571 frmsize.index = i;
1572 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1573 if (res < 0){
1574 DBG_LOGB("index=%d, break\n", i);
1575 break;
1576 }
1577
1578 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1579
1580 if (0 != (frmsize.discrete.width%16))
1581 continue;
1582
1583 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1584 // continue;
1585
1586 if (count >= size)
1587 break;
1588
1589 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1590 if (!IsUsbAvailablePictureSize(kUsbAvailablePictureSize, frmsize.discrete.width, frmsize.discrete.height))
1591 continue;
1592 }
1593
1594 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1595 picSizes[count+1] = frmsize.discrete.width;
1596 picSizes[count+2] = frmsize.discrete.height;
1597 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1598
1599 if (0 == i) {
1600 count += 4;
1601 continue;
1602 }
1603
1604 //TODO insert in descend order
1605 for (k = count; k > START; k -= 4) {
1606 if (frmsize.discrete.width * frmsize.discrete.height >
1607 picSizes[k - 3] * picSizes[k - 2]) {
1608 picSizes[k + 1] = picSizes[k - 3];
1609 picSizes[k + 2] = picSizes[k - 2];
1610
1611 } else {
1612 break;
1613 }
1614 }
1615
1616 picSizes[k + 1] = frmsize.discrete.width;
1617 picSizes[k + 2] = frmsize.discrete.height;
1618
1619 count += 4;
1620 }
1621 }
1622
1623 if (frmsize.index > 0)
1624 break;
1625 }
1626
1627 if (frmsize.index == 0)
1628 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1629
1630 return count;
1631
1632}
1633
1634int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1635{
1636 int ret=0; int framerate=0; int temp_rate=0;
1637 struct v4l2_frmivalenum fival;
1638 int i,j=0;
1639 int count = 0;
1640 int tmp_size = size;
1641 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1642 int pixelfmt_tbl[] = {
1643 V4L2_PIX_FMT_MJPEG,
1644 V4L2_PIX_FMT_YVU420,
1645 V4L2_PIX_FMT_NV21,
1646 V4L2_PIX_FMT_RGB24,
1647 V4L2_PIX_FMT_YUYV,
1648 //V4L2_PIX_FMT_YVU420
1649 };
1650
1651 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1652 {
1653 /* we got all duration for each resolution for prev format*/
1654 if (count >= tmp_size)
1655 break;
1656
1657 for( ; size > 0; size-=4)
1658 {
1659 memset(&fival, 0, sizeof(fival));
1660
1661 for (fival.index = 0;;fival.index++)
1662 {
1663 fival.pixel_format = pixelfmt_tbl[i];
1664 fival.width = picSizes[size-3];
1665 fival.height = picSizes[size-2];
1666 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1667 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1668 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1669 if(framerate < temp_rate)
1670 framerate = temp_rate;
1671 duration[count+0] = (int64_t)(picSizes[size-4]);
1672 duration[count+1] = (int64_t)(picSizes[size-3]);
1673 duration[count+2] = (int64_t)(picSizes[size-2]);
1674 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1675 j++;
1676 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1677 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1678 if(framerate < temp_rate)
1679 framerate = temp_rate;
1680 duration[count+0] = (int64_t)picSizes[size-4];
1681 duration[count+1] = (int64_t)picSizes[size-3];
1682 duration[count+2] = (int64_t)picSizes[size-2];
1683 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1684 j++;
1685 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1686 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1687 if(framerate < temp_rate)
1688 framerate = temp_rate;
1689 duration[count+0] = (int64_t)picSizes[size-4];
1690 duration[count+1] = (int64_t)picSizes[size-3];
1691 duration[count+2] = (int64_t)picSizes[size-2];
1692 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1693 j++;
1694 }
1695 } else {
1696 if (j > 0) {
1697 if (count >= tmp_size)
1698 break;
1699 duration[count+0] = (int64_t)(picSizes[size-4]);
1700 duration[count+1] = (int64_t)(picSizes[size-3]);
1701 duration[count+2] = (int64_t)(picSizes[size-2]);
1702 if (framerate == 5) {
1703 duration[count+3] = (int64_t)200000000L;
1704 } else if (framerate == 10) {
1705 duration[count+3] = (int64_t)100000000L;
1706 } else if (framerate == 15) {
1707 duration[count+3] = (int64_t)66666666L;
1708 } else if (framerate == 30) {
1709 duration[count+3] = (int64_t)33333333L;
1710 } else {
1711 duration[count+3] = (int64_t)66666666L;
1712 }
1713 count += 4;
1714 break;
1715 } else {
1716 break;
1717 }
1718 }
1719 }
1720 j=0;
1721 }
1722 size = tmp_size;
1723 }
1724
1725 return count;
1726
1727}
1728
1729int64_t Sensor::getMinFrameDuration()
1730{
1731 int64_t tmpDuration = 66666666L; // 1/15 s
1732 int64_t frameDuration = 66666666L; // 1/15 s
1733 struct v4l2_frmivalenum fival;
1734 int i,j;
1735
1736 uint32_t pixelfmt_tbl[]={
1737 V4L2_PIX_FMT_MJPEG,
1738 V4L2_PIX_FMT_YUYV,
1739 V4L2_PIX_FMT_NV21,
1740 };
1741 struct v4l2_frmsize_discrete resolution_tbl[]={
1742 {1920, 1080},
1743 {1280, 960},
1744 {640, 480},
1745 {320, 240},
1746 };
1747
1748 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1749 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1750 memset(&fival, 0, sizeof(fival));
1751 fival.index = 0;
1752 fival.pixel_format = pixelfmt_tbl[i];
1753 fival.width = resolution_tbl[j].width;
1754 fival.height = resolution_tbl[j].height;
1755
1756 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1757 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1758 tmpDuration =
1759 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1760
1761 if (frameDuration > tmpDuration)
1762 frameDuration = tmpDuration;
1763 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1764 frameDuration =
1765 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1766 break;
1767 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1768 frameDuration =
1769 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1770 break;
1771 }
1772 fival.index++;
1773 }
1774 }
1775
1776 if (fival.index > 0) {
1777 break;
1778 }
1779 }
1780
1781 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1782 return frameDuration;
1783}
1784
1785int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1786 int res;
1787 int i;
1788 int count = 0;
1789 struct v4l2_frmsizeenum frmsize;
1790 char property[PROPERTY_VALUE_MAX];
1791 unsigned int support_w,support_h;
1792 int preview_fmt;
1793
1794 support_w = 10000;
1795 support_h = 10000;
1796 memset(property, 0, sizeof(property));
1797 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1798 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1799 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1800 support_w = 10000;
1801 support_h = 10000;
1802 }
1803 }
1804
1805
1806 memset(&frmsize,0,sizeof(frmsize));
1807 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1808
1809 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1810 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1811 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1812 if (preview == true)
1813 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1814 else
1815 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1816 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1817 if (preview == true)
1818 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1819 else
1820 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1821 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1822 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1823
1824 for (i = 0; ; i++) {
1825 frmsize.index = i;
1826 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1827 if (res < 0){
1828 DBG_LOGB("index=%d, break\n", i);
1829 break;
1830 }
1831
1832
1833 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1834
1835 if (0 != (frmsize.discrete.width%16))
1836 continue;
1837
1838 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1839 continue;
1840
1841 if (count >= size)
1842 break;
1843
1844 picSizes[count] = frmsize.discrete.width;
1845 picSizes[count+1] = frmsize.discrete.height;
1846
1847 if (0 == i) {
1848 count += 2;
1849 continue;
1850 }
1851
1852 //TODO insert in descend order
1853 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1854 picSizes[count + 0] = picSizes[count - 2];
1855 picSizes[count + 1] = picSizes[count - 1];
1856
1857 picSizes[count - 2] = frmsize.discrete.width;
1858 picSizes[count - 1] = frmsize.discrete.height;
1859 }
1860
1861 count += 2;
1862 }
1863 }
1864
1865 return count;
1866
1867}
1868
1869void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1870 float totalGain = gain/100.0 * kBaseGainFactor;
1871 float noiseVarGain = totalGain * totalGain;
1872 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1873 + kReadNoiseVarAfterGain;
1874
1875 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1876 mScene.setReadoutPixel(0,0);
1877 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1878 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1879 uint16_t *px = (uint16_t*)img + y * stride;
1880 for (unsigned int x = 0; x < kResolution[0]; x++) {
1881 uint32_t electronCount;
1882 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1883
1884 // TODO: Better pixel saturation curve?
1885 electronCount = (electronCount < kSaturationElectrons) ?
1886 electronCount : kSaturationElectrons;
1887
1888 // TODO: Better A/D saturation curve?
1889 uint16_t rawCount = electronCount * totalGain;
1890 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1891
1892 // Calculate noise value
1893 // TODO: Use more-correct Gaussian instead of uniform noise
1894 float photonNoiseVar = electronCount * noiseVarGain;
1895 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1896 // Scaled to roughly match gaussian/uniform noise stddev
1897 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1898
1899 rawCount += kBlackLevel;
1900 rawCount += noiseStddev * noiseSample;
1901
1902 *px++ = rawCount;
1903 }
1904 // TODO: Handle this better
1905 //simulatedTime += kRowReadoutTime;
1906 }
1907 ALOGVV("Raw sensor image captured");
1908}
1909
1910void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1911 float totalGain = gain/100.0 * kBaseGainFactor;
1912 // In fixed-point math, calculate total scaling from electrons to 8bpp
1913 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1914 uint32_t inc = kResolution[0] / stride;
1915
1916 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1917 uint8_t *px = img + outY * stride * 4;
1918 mScene.setReadoutPixel(0, y);
1919 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1920 uint32_t rCount, gCount, bCount;
1921 // TODO: Perfect demosaicing is a cheat
1922 const uint32_t *pixel = mScene.getPixelElectrons();
1923 rCount = pixel[Scene::R] * scale64x;
1924 gCount = pixel[Scene::Gr] * scale64x;
1925 bCount = pixel[Scene::B] * scale64x;
1926
1927 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1928 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1929 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1930 *px++ = 255;
1931 for (unsigned int j = 1; j < inc; j++)
1932 mScene.getPixelElectrons();
1933 }
1934 // TODO: Handle this better
1935 //simulatedTime += kRowReadoutTime;
1936 }
1937 ALOGVV("RGBA sensor image captured");
1938}
1939
1940void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1941#if 0
1942 float totalGain = gain/100.0 * kBaseGainFactor;
1943 // In fixed-point math, calculate total scaling from electrons to 8bpp
1944 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1945 uint32_t inc = kResolution[0] / stride;
1946
1947 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1948 mScene.setReadoutPixel(0, y);
1949 uint8_t *px = img + outY * stride * 3;
1950 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1951 uint32_t rCount, gCount, bCount;
1952 // TODO: Perfect demosaicing is a cheat
1953 const uint32_t *pixel = mScene.getPixelElectrons();
1954 rCount = pixel[Scene::R] * scale64x;
1955 gCount = pixel[Scene::Gr] * scale64x;
1956 bCount = pixel[Scene::B] * scale64x;
1957
1958 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1959 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1960 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1961 for (unsigned int j = 1; j < inc; j++)
1962 mScene.getPixelElectrons();
1963 }
1964 // TODO: Handle this better
1965 //simulatedTime += kRowReadoutTime;
1966 }
1967#else
1968 uint8_t *src = NULL;
1969 int ret = 0, rotate = 0;
1970 uint32_t width = 0, height = 0;
1971 int dqTryNum = 3;
1972
1973 rotate = getPictureRotate();
1974 width = vinfo->picture.format.fmt.pix.width;
1975 height = vinfo->picture.format.fmt.pix.height;
1976
1977 if (mSensorType == SENSOR_USB) {
1978 releasebuf_and_stop_capturing(vinfo);
1979 } else {
1980 stop_capturing(vinfo);
1981 }
1982
1983 ret = start_picture(vinfo,rotate);
1984 if (ret < 0)
1985 {
1986 ALOGD("start picture failed!");
1987 }
1988 while(1)
1989 {
1990 src = (uint8_t *)get_picture(vinfo);
1991 if ((NULL != src) && (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)) {
1992 while (dqTryNum > 0) {
1993 if (NULL != src) {
1994 putback_picture_frame(vinfo);
1995 }
1996 usleep(10000);
1997 dqTryNum --;
1998 src = (uint8_t *)get_picture(vinfo);
1999 }
2000 }
2001
2002 if (NULL != src) {
2003 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2004 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2005 if ( tmp_buffer == NULL) {
2006 ALOGE("new buffer failed!\n");
2007 return;
2008 }
2009 if (amlMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
2010 width, tmp_buffer + width * height, (width + 1) / 2, width,
2011 height, width, height, FOURCC('M', 'J', 'P', 'G')) != 0) {
2012 DBG_LOGA("Decode MJPEG frame failed\n");
2013 putback_picture_frame(vinfo);
2014 usleep(5000);
2015 } else {
2016 nv21_to_rgb24(tmp_buffer,img,width,height);
2017 if (tmp_buffer != NULL)
2018 delete [] tmp_buffer;
2019 break;
2020 }
2021 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2022 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
2023 yuyv422_to_rgb24(src,img,width,height);
2024 break;
2025 } else {
2026 putback_picture_frame(vinfo);
2027 usleep(5000);
2028 }
2029 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
2030 if (vinfo->picture.buf.length == width * height * 3) {
2031 memcpy(img, src, vinfo->picture.buf.length);
2032 } else {
2033 rgb24_memcpy(img, src, width, height);
2034 }
2035 break;
2036 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2037 memcpy(img, src, vinfo->picture.buf.length);
2038 break;
2039 }
2040 }
2041 }
2042 ALOGD("get picture success !");
2043
2044 if (mSensorType == SENSOR_USB) {
2045 releasebuf_and_stop_picture(vinfo);
2046 } else {
2047 stop_picture(vinfo);
2048 }
2049
2050#endif
2051}
2052
2053void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
2054{
2055 for (int i = 0; i < width * height * 2; i += 2) {
2056 *dst++ = *(src + i);
2057 }
2058
2059 for (int y = 0; y < height - 1; y +=2) {
2060 for (int j = 0; j < width * 2; j += 4) {
2061 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
2062 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
2063 }
2064 src += width * 2 * 2;
2065 }
2066
2067 if (height & 1)
2068 for (int j = 0; j < width * 2; j += 4) {
2069 *dst++ = *(src + 3 + j); //v
2070 *dst++ = *(src + 1 + j); //u
2071 }
2072}
2073
2074void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
2075{
2076 //width should be an even number.
2077 //uv ALIGN 32.
2078 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
2079 unsigned char *dst_copy,*src_copy;
2080
2081 dst_copy = dst;
2082 src_copy = src;
2083
2084 y_size = width*height;
2085 c_stride = ALIGN(width/2, 16);
2086 c_size = c_stride * height/2;
2087 cr_offset = y_size;
2088 cb_offset = y_size+c_size;
2089
2090 for(i=0;i< y_size;i++){
2091 *dst++ = *src;
2092 src += 2;
2093 }
2094
2095 dst = dst_copy;
2096 src = src_copy;
2097
2098 for(i=0;i<height;i+=2){
2099 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
2100 //ceil(u1+u2)/2
2101 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
2102 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
2103 }
2104 dst += c_stride;
2105 src += width*4;
2106 }
2107}
2108
2109status_t Sensor::force_reset_sensor() {
2110 DBG_LOGA("force_reset_sensor");
2111 status_t ret;
2112 mTimeOutCount = 0;
2113 ret = streamOff();
2114 ret = setBuffersFormat(vinfo);
2115 ret = streamOn();
2116 DBG_LOGB("%s , ret = %d", __FUNCTION__, ret);
2117 return ret;
2118}
2119
2120void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
2121#if 0
2122 float totalGain = gain/100.0 * kBaseGainFactor;
2123 // Using fixed-point math with 6 bits of fractional precision.
2124 // In fixed-point math, calculate total scaling from electrons to 8bpp
2125 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2126 // In fixed-point math, saturation point of sensor after gain
2127 const int saturationPoint = 64 * 255;
2128 // Fixed-point coefficients for RGB-YUV transform
2129 // Based on JFIF RGB->YUV transform.
2130 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2131 const int rgbToY[] = {19, 37, 7};
2132 const int rgbToCb[] = {-10,-21, 32, 524288};
2133 const int rgbToCr[] = {32,-26, -5, 524288};
2134 // Scale back to 8bpp non-fixed-point
2135 const int scaleOut = 64;
2136 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2137
2138 uint32_t inc = kResolution[0] / stride;
2139 uint32_t outH = kResolution[1] / inc;
2140 for (unsigned int y = 0, outY = 0;
2141 y < kResolution[1]; y+=inc, outY++) {
2142 uint8_t *pxY = img + outY * stride;
2143 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2144 mScene.setReadoutPixel(0,y);
2145 for (unsigned int outX = 0; outX < stride; outX++) {
2146 int32_t rCount, gCount, bCount;
2147 // TODO: Perfect demosaicing is a cheat
2148 const uint32_t *pixel = mScene.getPixelElectrons();
2149 rCount = pixel[Scene::R] * scale64x;
2150 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2151 gCount = pixel[Scene::Gr] * scale64x;
2152 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2153 bCount = pixel[Scene::B] * scale64x;
2154 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2155
2156 *pxY++ = (rgbToY[0] * rCount +
2157 rgbToY[1] * gCount +
2158 rgbToY[2] * bCount) / scaleOutSq;
2159 if (outY % 2 == 0 && outX % 2 == 0) {
2160 *pxVU++ = (rgbToCr[0] * rCount +
2161 rgbToCr[1] * gCount +
2162 rgbToCr[2] * bCount +
2163 rgbToCr[3]) / scaleOutSq;
2164 *pxVU++ = (rgbToCb[0] * rCount +
2165 rgbToCb[1] * gCount +
2166 rgbToCb[2] * bCount +
2167 rgbToCb[3]) / scaleOutSq;
2168 }
2169 for (unsigned int j = 1; j < inc; j++)
2170 mScene.getPixelElectrons();
2171 }
2172 }
2173#else
2174 uint8_t *src;
2175
2176 if (mKernelBuffer) {
2177 src = mKernelBuffer;
2178 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2179 uint32_t width = vinfo->preview.format.fmt.pix.width;
2180 uint32_t height = vinfo->preview.format.fmt.pix.height;
2181 if ((width == b.width) && (height == b.height)) {
2182 memcpy(b.img, src, b.width * b.height * 3/2);
2183 } else {
2184 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2185 }
2186 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2187 uint32_t width = vinfo->preview.format.fmt.pix.width;
2188 uint32_t height = vinfo->preview.format.fmt.pix.height;
2189
2190 if ((width == b.width) && (height == b.height)) {
2191 memcpy(b.img, src, b.width * b.height * 3/2);
2192 } else {
2193 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2194 }
2195 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2196 uint32_t width = vinfo->preview.format.fmt.pix.width;
2197 uint32_t height = vinfo->preview.format.fmt.pix.height;
2198
2199 if ((width == b.width) && (height == b.height)) {
2200 memcpy(b.img, src, b.width * b.height * 3/2);
2201 } else {
2202 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2203 }
2204 } else {
2205 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2206 }
2207 return ;
2208 }
2209 while(1){
2210 if (mFlushFlag) {
2211 break;
2212 }
2213
2214 if (mExitSensorThread) {
2215 break;
2216 }
2217
2218 src = (uint8_t *)get_frame(vinfo);
2219 if (NULL == src) {
2220 if (get_device_status(vinfo)) {
2221 break;
2222 }
2223 ALOGVV("get frame NULL, sleep 5ms");
2224 usleep(5000);
2225 mTimeOutCount++;
2226 if (mTimeOutCount > 300) {
2227 force_reset_sensor();
2228 }
2229 continue;
2230 }
2231 mTimeOutCount = 0;
2232 if (mSensorType == SENSOR_USB) {
2233 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2234 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2235 DBG_LOGB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2236 putback_frame(vinfo);
2237 continue;
2238 }
2239 }
2240 }
2241 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2242 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2243 memcpy(b.img, src, vinfo->preview.buf.length);
2244 } else {
2245 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2246 }
2247 mKernelBuffer = b.img;
2248 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2249 uint32_t width = vinfo->preview.format.fmt.pix.width;
2250 uint32_t height = vinfo->preview.format.fmt.pix.height;
2251 memset(mTemp_buffer, 0 , width * height * 3/2);
2252 YUYVToNV21(src, mTemp_buffer, width, height);
2253 if ((width == b.width) && (height == b.height)) {
2254 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2255 mKernelBuffer = b.img;
2256 } else {
2257 if ((b.height % 2) != 0) {
2258 DBG_LOGB("%d , b.height = %d", __LINE__, b.height);
2259 b.height = b.height - 1;
2260 }
2261 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2262 mKernelBuffer = mTemp_buffer;
2263 }
2264 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2265 uint32_t width = vinfo->preview.format.fmt.pix.width;
2266 uint32_t height = vinfo->preview.format.fmt.pix.height;
2267 memset(mTemp_buffer, 0 , width * height * 3/2);
2268 if (amlMjpegToNV21(src, vinfo->preview.buf.bytesused, mTemp_buffer,
2269 width, mTemp_buffer + width * height, (width + 1) / 2, width,
2270 height, width, height, FOURCC('M', 'J', 'P', 'G')) != 0) {
2271 putback_frame(vinfo);
2272 ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__);
2273 continue;
2274 }
2275 if ((width == b.width) && (height == b.height)) {
2276 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2277 mKernelBuffer = b.img;
2278 } else {
2279 if ((b.height % 2) != 0) {
2280 DBG_LOGB("%d, b.height = %d", __LINE__, b.height);
2281 b.height = b.height - 1;
2282 }
2283 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2284 mKernelBuffer = mTemp_buffer;
2285 }
2286 }
2287
2288 break;
2289 }
2290#endif
2291
2292 ALOGVV("NV21 sensor image captured");
2293}
2294
2295void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2296#if 0
2297 float totalGain = gain/100.0 * kBaseGainFactor;
2298 // Using fixed-point math with 6 bits of fractional precision.
2299 // In fixed-point math, calculate total scaling from electrons to 8bpp
2300 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2301 // In fixed-point math, saturation point of sensor after gain
2302 const int saturationPoint = 64 * 255;
2303 // Fixed-point coefficients for RGB-YUV transform
2304 // Based on JFIF RGB->YUV transform.
2305 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2306 const int rgbToY[] = {19, 37, 7};
2307 const int rgbToCb[] = {-10,-21, 32, 524288};
2308 const int rgbToCr[] = {32,-26, -5, 524288};
2309 // Scale back to 8bpp non-fixed-point
2310 const int scaleOut = 64;
2311 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2312
2313 uint32_t inc = kResolution[0] / stride;
2314 uint32_t outH = kResolution[1] / inc;
2315 for (unsigned int y = 0, outY = 0;
2316 y < kResolution[1]; y+=inc, outY++) {
2317 uint8_t *pxY = img + outY * stride;
2318 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2319 mScene.setReadoutPixel(0,y);
2320 for (unsigned int outX = 0; outX < stride; outX++) {
2321 int32_t rCount, gCount, bCount;
2322 // TODO: Perfect demosaicing is a cheat
2323 const uint32_t *pixel = mScene.getPixelElectrons();
2324 rCount = pixel[Scene::R] * scale64x;
2325 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2326 gCount = pixel[Scene::Gr] * scale64x;
2327 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2328 bCount = pixel[Scene::B] * scale64x;
2329 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2330
2331 *pxY++ = (rgbToY[0] * rCount +
2332 rgbToY[1] * gCount +
2333 rgbToY[2] * bCount) / scaleOutSq;
2334 if (outY % 2 == 0 && outX % 2 == 0) {
2335 *pxVU++ = (rgbToCr[0] * rCount +
2336 rgbToCr[1] * gCount +
2337 rgbToCr[2] * bCount +
2338 rgbToCr[3]) / scaleOutSq;
2339 *pxVU++ = (rgbToCb[0] * rCount +
2340 rgbToCb[1] * gCount +
2341 rgbToCb[2] * bCount +
2342 rgbToCb[3]) / scaleOutSq;
2343 }
2344 for (unsigned int j = 1; j < inc; j++)
2345 mScene.getPixelElectrons();
2346 }
2347 }
2348#else
2349 uint8_t *src;
2350 if (mKernelBuffer) {
2351 src = mKernelBuffer;
2352 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2353 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2354 ALOGI("Sclale YV12 frame down \n");
2355
2356 int width = vinfo->preview.format.fmt.pix.width;
2357 int height = vinfo->preview.format.fmt.pix.height;
2358 int ret = I420Scale(src, width,
2359 src + width * height, width / 2,
2360 src + width * height + width * height / 4, width / 2,
2361 width, height,
2362 b.img, b.width,
2363 b.img + b.width * b.height, b.width / 2,
2364 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2365 b.width, b.height,
2366 0);
2367 if (ret < 0)
2368 ALOGE("Sclale YV12 frame down failed!\n");
2369 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2370 int width = vinfo->preview.format.fmt.pix.width;
2371 int height = vinfo->preview.format.fmt.pix.height;
2372 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2373
2374 if ( tmp_buffer == NULL) {
2375 ALOGE("new buffer failed!\n");
2376 return;
2377 }
2378
2379 YUYVToYV12(src, tmp_buffer, width, height);
2380
2381 int ret = I420Scale(tmp_buffer, width,
2382 tmp_buffer + width * height, width / 2,
2383 tmp_buffer + width * height + width * height / 4, width / 2,
2384 width, height,
2385 b.img, b.width,
2386 b.img + b.width * b.height, b.width / 2,
2387 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2388 b.width, b.height,
2389 0);
2390 if (ret < 0)
2391 ALOGE("Sclale YV12 frame down failed!\n");
2392 delete [] tmp_buffer;
2393 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2394 int width = vinfo->preview.format.fmt.pix.width;
2395 int height = vinfo->preview.format.fmt.pix.height;
2396 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2397
2398 if ( tmp_buffer == NULL) {
2399 ALOGE("new buffer failed!\n");
2400 return;
2401 }
2402
2403 if (amlToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2404 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2405 width, height, 0, FOURCC('M', 'J', 'P', 'G')) != 0) {
2406 DBG_LOGA("Decode MJPEG frame failed\n");
2407 }
2408
2409 int ret = I420Scale(tmp_buffer, width,
2410 tmp_buffer + width * height, width / 2,
2411 tmp_buffer + width * height + width * height / 4, width / 2,
2412 width, height,
2413 b.img, b.width,
2414 b.img + b.width * b.height, b.width / 2,
2415 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2416 b.width, b.height,
2417 0);
2418 if (ret < 0)
2419 ALOGE("Sclale YV12 frame down failed!\n");
2420
2421 delete [] tmp_buffer;
2422 } else {
2423 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2424 }
2425 return ;
2426 }
2427 while(1){
2428 if (mFlushFlag) {
2429 break;
2430 }
2431 if (mExitSensorThread) {
2432 break;
2433 }
2434 src = (uint8_t *)get_frame(vinfo);
2435
2436 if (NULL == src) {
2437 if (get_device_status(vinfo)) {
2438 break;
2439 }
2440 ALOGVV("get frame NULL, sleep 5ms");
2441 usleep(5000);
2442 mTimeOutCount++;
2443 if (mTimeOutCount > 300) {
2444 force_reset_sensor();
2445 }
2446 continue;
2447 }
2448 mTimeOutCount = 0;
2449 if (mSensorType == SENSOR_USB) {
2450 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2451 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2452 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2453 putback_frame(vinfo);
2454 continue;
2455 }
2456 }
2457 }
2458 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2459 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2460 memcpy(b.img, src, vinfo->preview.buf.length);
2461 } else {
2462 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2463 }
2464 mKernelBuffer = b.img;
2465 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2466 int width = vinfo->preview.format.fmt.pix.width;
2467 int height = vinfo->preview.format.fmt.pix.height;
2468 YUYVToYV12(src, b.img, width, height);
2469 mKernelBuffer = b.img;
2470 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2471 int width = vinfo->preview.format.fmt.pix.width;
2472 int height = vinfo->preview.format.fmt.pix.height;
2473 if (amlToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2474 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2475 width, height, 0, FOURCC('M', 'J', 'P', 'G')) != 0) {
2476 putback_frame(vinfo);
2477 DBG_LOGA("Decode MJPEG frame failed\n");
2478 continue;
2479 }
2480 mKernelBuffer = b.img;
2481 } else {
2482 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2483 }
2484
2485 break;
2486 }
2487#endif
2488 //mKernelBuffer = src;
2489 ALOGVV("YV12 sensor image captured");
2490}
2491
2492void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2493#if 0
2494 float totalGain = gain/100.0 * kBaseGainFactor;
2495 // Using fixed-point math with 6 bits of fractional precision.
2496 // In fixed-point math, calculate total scaling from electrons to 8bpp
2497 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2498 // In fixed-point math, saturation point of sensor after gain
2499 const int saturationPoint = 64 * 255;
2500 // Fixed-point coefficients for RGB-YUV transform
2501 // Based on JFIF RGB->YUV transform.
2502 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2503 const int rgbToY[] = {19, 37, 7};
2504 const int rgbToCb[] = {-10,-21, 32, 524288};
2505 const int rgbToCr[] = {32,-26, -5, 524288};
2506 // Scale back to 8bpp non-fixed-point
2507 const int scaleOut = 64;
2508 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2509
2510 uint32_t inc = kResolution[0] / stride;
2511 uint32_t outH = kResolution[1] / inc;
2512 for (unsigned int y = 0, outY = 0;
2513 y < kResolution[1]; y+=inc, outY++) {
2514 uint8_t *pxY = img + outY * stride;
2515 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2516 mScene.setReadoutPixel(0,y);
2517 for (unsigned int outX = 0; outX < stride; outX++) {
2518 int32_t rCount, gCount, bCount;
2519 // TODO: Perfect demosaicing is a cheat
2520 const uint32_t *pixel = mScene.getPixelElectrons();
2521 rCount = pixel[Scene::R] * scale64x;
2522 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2523 gCount = pixel[Scene::Gr] * scale64x;
2524 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2525 bCount = pixel[Scene::B] * scale64x;
2526 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2527
2528 *pxY++ = (rgbToY[0] * rCount +
2529 rgbToY[1] * gCount +
2530 rgbToY[2] * bCount) / scaleOutSq;
2531 if (outY % 2 == 0 && outX % 2 == 0) {
2532 *pxVU++ = (rgbToCr[0] * rCount +
2533 rgbToCr[1] * gCount +
2534 rgbToCr[2] * bCount +
2535 rgbToCr[3]) / scaleOutSq;
2536 *pxVU++ = (rgbToCb[0] * rCount +
2537 rgbToCb[1] * gCount +
2538 rgbToCb[2] * bCount +
2539 rgbToCb[3]) / scaleOutSq;
2540 }
2541 for (unsigned int j = 1; j < inc; j++)
2542 mScene.getPixelElectrons();
2543 }
2544 }
2545#else
2546 uint8_t *src;
2547 if (mKernelBuffer) {
2548 src = mKernelBuffer;
2549 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2550 //TODO YUYV scale
2551 //memcpy(img, src, vinfo->preview.buf.length);
2552
2553 } else
2554 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2555
2556 return ;
2557 }
2558
2559 while(1) {
2560 if (mFlushFlag) {
2561 break;
2562 }
2563 if (mExitSensorThread) {
2564 break;
2565 }
2566 src = (uint8_t *)get_frame(vinfo);
2567 if (NULL == src) {
2568 if (get_device_status(vinfo)) {
2569 break;
2570 }
2571 ALOGVV("get frame NULL, sleep 5ms");
2572 usleep(5000);
2573 mTimeOutCount++;
2574 if (mTimeOutCount > 300) {
2575 force_reset_sensor();
2576 }
2577 continue;
2578 }
2579 mTimeOutCount = 0;
2580 if (mSensorType == SENSOR_USB) {
2581 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2582 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2583 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2584 putback_frame(vinfo);
2585 continue;
2586 }
2587 }
2588 }
2589 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2590 memcpy(img, src, vinfo->preview.buf.length);
2591 mKernelBuffer = src;
2592 } else {
2593 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2594 }
2595
2596 break;
2597 }
2598#endif
2599 //mKernelBuffer = src;
2600 ALOGVV("YUYV sensor image captured");
2601}
2602
2603void Sensor::dump(int fd) {
2604 String8 result;
2605 result = String8::format("%s, sensor preview information: \n", __FILE__);
2606 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2607 result.appendFormat("camera preview width: %d , height =%d\n",
2608 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2609
2610 result.appendFormat("camera preview format: %.4s\n\n",
2611 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2612
2613 write(fd, result.string(), result.size());
2614}
2615
2616} // namespace android
2617
2618