summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: f8660deaab975a310ad79cb229145f4f88953d02
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
45
46namespace android {
47
48const unsigned int Sensor::kResolution[2] = {1600, 1200};
49
50const nsecs_t Sensor::kExposureTimeRange[2] =
51 {1000L, 30000000000L} ; // 1 us - 30 sec
52const nsecs_t Sensor::kFrameDurationRange[2] =
53 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
54
55const nsecs_t Sensor::kMinVerticalBlank = 10000L;
56
57const uint8_t Sensor::kColorFilterArrangement =
58 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
59
60// Output image data characteristics
61const uint32_t Sensor::kMaxRawValue = 4000;
62const uint32_t Sensor::kBlackLevel = 1000;
63
64// Sensor sensitivity
65const float Sensor::kSaturationVoltage = 0.520f;
66const uint32_t Sensor::kSaturationElectrons = 2000;
67const float Sensor::kVoltsPerLuxSecond = 0.100f;
68
69const float Sensor::kElectronsPerLuxSecond =
70 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
71 * Sensor::kVoltsPerLuxSecond;
72
73const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
74 Sensor::kSaturationElectrons;
75
76const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
77const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
78const float Sensor::kReadNoiseVarBeforeGain =
79 Sensor::kReadNoiseStddevBeforeGain *
80 Sensor::kReadNoiseStddevBeforeGain;
81const float Sensor::kReadNoiseVarAfterGain =
82 Sensor::kReadNoiseStddevAfterGain *
83 Sensor::kReadNoiseStddevAfterGain;
84
85// While each row has to read out, reset, and then expose, the (reset +
86// expose) sequence can be overlapped by other row readouts, so the final
87// minimum frame duration is purely a function of row readout time, at least
88// if there's a reasonable number of rows.
89const nsecs_t Sensor::kRowReadoutTime =
90 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
91
92const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
93const uint32_t Sensor::kDefaultSensitivity = 100;
94
95const usb_frmsize_discrete_t kUsbAvailablePictureSize[] = {
96 {4128, 3096},
97 {3264, 2448},
98 {2592, 1944},
99 {2592, 1936},
100 {2560, 1920},
101 {2688, 1520},
102 {2048, 1536},
103 {1600, 1200},
104 {1920, 1088},
105 {1920, 1080},
106 {1440, 1080},
107 {1280, 960},
108 {1280, 720},
109 {1024, 768},
110 {960, 720},
111 {720, 480},
112 {640, 480},
113 {352, 288},
114 {320, 240},
115};
116
117/** A few utility functions for math, normal distributions */
118
119// Take advantage of IEEE floating-point format to calculate an approximate
120// square root. Accurate to within +-3.6%
121float sqrtf_approx(float r) {
122 // Modifier is based on IEEE floating-point representation; the
123 // manipulations boil down to finding approximate log2, dividing by two, and
124 // then inverting the log2. A bias is added to make the relative error
125 // symmetric about the real answer.
126 const int32_t modifier = 0x1FBB4000;
127
128 int32_t r_i = *(int32_t*)(&r);
129 r_i = (r_i >> 1) + modifier;
130
131 return *(float*)(&r_i);
132}
133
134void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
135{
136 int stride = (width + 31) & ( ~31);
137 int w, h;
138 for (h=0; h<height; h++)
139 {
140 memcpy( dst, src, width*3);
141 dst += width*3;
142 src += stride*3;
143 }
144}
145
146static int ALIGN(int x, int y) {
147 // y must be a power of 2.
148 return (x + y - 1) & ~(y - 1);
149}
150
151bool IsUsbAvailablePictureSize(const usb_frmsize_discrete_t AvailablePictureSize[], uint32_t width, uint32_t height)
152{
153 int i;
154 bool ret = false;
155 int count = sizeof(kUsbAvailablePictureSize)/sizeof(kUsbAvailablePictureSize[0]);
156 for (i = 0; i < count; i++) {
157 if ((width == AvailablePictureSize[i].width) && (height == AvailablePictureSize[i].height)) {
158 ret = true;
159 } else {
160 continue;
161 }
162 }
163 return ret;
164}
165
166void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
167{
168 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
169 (mmInt32)vinfo->preview.format.fmt.pix.height,
170 (mmInt32)vinfo->preview.format.fmt.pix.width,
171 IC_FORMAT_YCbCr420_lp,
172 (mmByte *) src,
173 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
174 0};
175
176 structConvImage output = {(mmInt32)width,
177 (mmInt32)height,
178 (mmInt32)width,
179 IC_FORMAT_YCbCr420_lp,
180 (mmByte *) img,
181 (mmByte *) img + width * height,
182 0};
183
184 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
185 ALOGE("Sclale NV21 frame down failed!\n");
186}
187
188Sensor::Sensor():
189 Thread(false),
190 mGotVSync(false),
191 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
192 mFrameDuration(kFrameDurationRange[0]),
193 mGainFactor(kDefaultSensitivity),
194 mNextBuffers(NULL),
195 mFrameNumber(0),
196 mCapturedBuffers(NULL),
197 mListener(NULL),
198 mTemp_buffer(NULL),
199 mExitSensorThread(false),
200 mIoctlSupport(0),
201 msupportrotate(0),
202 mTimeOutCount(0),
203 mWait(false),
204 mPre_width(0),
205 mPre_height(0),
206 mFlushFlag(false),
207 mSensorWorkFlag(false),
208 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
209{
210
211}
212
213Sensor::~Sensor() {
214 //shutDown();
215}
216
217status_t Sensor::startUp(int idx) {
218 ALOGV("%s: E", __FUNCTION__);
219 DBG_LOGA("ddd");
220
221 int res;
222 mCapturedBuffers = NULL;
223 res = run("EmulatedFakeCamera3::Sensor",
224 ANDROID_PRIORITY_URGENT_DISPLAY);
225
226 if (res != OK) {
227 ALOGE("Unable to start up sensor capture thread: %d", res);
228 }
229
230 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
231 vinfo->idx = idx;
232
233 res = camera_open(vinfo);
234 if (res < 0) {
235 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
236 }
237
238 mSensorType = SENSOR_MMAP;
239 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
240 mSensorType = SENSOR_USB;
241 }
242
243 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
244 mSensorType = SENSOR_SHARE_FD;
245 }
246
247 if (strstr((const char *)vinfo->cap.card, "front"))
248 mSensorFace = SENSOR_FACE_FRONT;
249 else if (strstr((const char *)vinfo->cap.card, "back"))
250 mSensorFace = SENSOR_FACE_BACK;
251 else
252 mSensorFace = SENSOR_FACE_NONE;
253
254 return res;
255}
256
257sensor_type_e Sensor::getSensorType(void)
258{
259 return mSensorType;
260}
261status_t Sensor::IoctlStateProbe(void) {
262 struct v4l2_queryctrl qc;
263 int ret = 0;
264 mIoctlSupport = 0;
265 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
266 qc.id = V4L2_ROTATE_ID;
267 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
268 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
269 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
270 }else{
271 mIoctlSupport |= IOCTL_MASK_ROTATE;
272 }
273
274 if(mIoctlSupport & IOCTL_MASK_ROTATE){
275 msupportrotate = true;
276 DBG_LOGA("camera support capture rotate");
277 }
278 return mIoctlSupport;
279}
280
281uint32_t Sensor::getStreamUsage(int stream_type)
282{
283 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
284
285 switch (stream_type) {
286 case CAMERA3_STREAM_OUTPUT:
287 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
288 break;
289 case CAMERA3_STREAM_INPUT:
290 usage = GRALLOC_USAGE_HW_CAMERA_READ;
291 break;
292 case CAMERA3_STREAM_BIDIRECTIONAL:
293 usage = GRALLOC_USAGE_HW_CAMERA_READ |
294 GRALLOC_USAGE_HW_CAMERA_WRITE;
295 break;
296 }
297 if ((mSensorType == SENSOR_MMAP)
298 || (mSensorType == SENSOR_USB)) {
299 usage = (GRALLOC_USAGE_HW_TEXTURE
300 | GRALLOC_USAGE_HW_RENDER
301 | GRALLOC_USAGE_SW_READ_MASK
302 | GRALLOC_USAGE_SW_WRITE_MASK
303 );
304 }
305
306 return usage;
307}
308
309status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
310{
311 int res;
312
313 mFramecount = 0;
314 mCurFps = 0;
315 gettimeofday(&mTimeStart, NULL);
316
317 if (isjpeg) {
318 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
319 vinfo->picture.format.fmt.pix.width = width;
320 vinfo->picture.format.fmt.pix.height = height;
321 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
322 } else {
323 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
324 vinfo->preview.format.fmt.pix.width = width;
325 vinfo->preview.format.fmt.pix.height = height;
326 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
327
328 res = setBuffersFormat(vinfo);
329 if (res < 0) {
330 ALOGE("set buffer failed\n");
331 return res;
332 }
333 }
334
335 if (NULL == mTemp_buffer) {
336 mPre_width = vinfo->preview.format.fmt.pix.width;
337 mPre_height = vinfo->preview.format.fmt.pix.height;
338 DBG_LOGB("setOutputFormat :: pre_width = %d, pre_height = %d \n" , mPre_width , mPre_height);
339 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
340 if (mTemp_buffer == NULL) {
341 ALOGE("first time allocate mTemp_buffer failed !");
342 return -1;
343 }
344 }
345
346 if ((mPre_width != vinfo->preview.format.fmt.pix.width) && (mPre_height != vinfo->preview.format.fmt.pix.height)) {
347 if (mTemp_buffer) {
348 delete [] mTemp_buffer;
349 mTemp_buffer = NULL;
350 }
351 mPre_width = vinfo->preview.format.fmt.pix.width;
352 mPre_height = vinfo->preview.format.fmt.pix.height;
353 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
354 if (mTemp_buffer == NULL) {
355 ALOGE("allocate mTemp_buffer failed !");
356 return -1;
357 }
358 }
359
360 return OK;
361
362}
363
364status_t Sensor::streamOn() {
365
366 return start_capturing(vinfo);
367}
368
369bool Sensor::isStreaming() {
370
371 return vinfo->isStreaming;
372}
373
374bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
375{
376 if ((vinfo->preview.format.fmt.pix.width != width)
377 ||(vinfo->preview.format.fmt.pix.height != height)
378 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
379 ) {
380
381 return true;
382
383 }
384
385 return false;
386}
387status_t Sensor::streamOff() {
388 if (mSensorType == SENSOR_USB) {
389 return releasebuf_and_stop_capturing(vinfo);
390 } else {
391 return stop_capturing(vinfo);
392 }
393}
394
395int Sensor::getOutputFormat()
396{
397 struct v4l2_fmtdesc fmt;
398 int ret;
399 memset(&fmt,0,sizeof(fmt));
400 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
401
402 fmt.index = 0;
403 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
404 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
405 return V4L2_PIX_FMT_MJPEG;
406 fmt.index++;
407 }
408
409 fmt.index = 0;
410 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
411 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
412 return V4L2_PIX_FMT_NV21;
413 fmt.index++;
414 }
415
416 fmt.index = 0;
417 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
418 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
419 return V4L2_PIX_FMT_YUYV;
420 fmt.index++;
421 }
422
423 ALOGE("Unable to find a supported sensor format!");
424 return BAD_VALUE;
425}
426
427/* if sensor supports MJPEG, return it first, otherwise
428 * trasform HAL format to v4l2 format then check whether
429 * it is supported.
430 */
431int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
432{
433 struct v4l2_fmtdesc fmt;
434 int ret;
435 memset(&fmt,0,sizeof(fmt));
436 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
437
438 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
439 pixelfmt = V4L2_PIX_FMT_YVU420;
440 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
441 pixelfmt = V4L2_PIX_FMT_NV21;
442 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
443 pixelfmt = V4L2_PIX_FMT_YUYV;
444 } else {
445 pixelfmt = V4L2_PIX_FMT_NV21;
446 }
447
448 fmt.index = 0;
449 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
450 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
451 return V4L2_PIX_FMT_MJPEG;
452 fmt.index++;
453 }
454
455 fmt.index = 0;
456 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
457 if (fmt.pixelformat == pixelfmt)
458 return pixelfmt;
459 fmt.index++;
460 }
461
462 fmt.index = 0;
463 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0) {
464 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
465 return V4L2_PIX_FMT_YUYV;
466 fmt.index++;
467 }
468 ALOGE("%s, Unable to find a supported sensor format!", __FUNCTION__);
469 return BAD_VALUE;
470}
471
472void Sensor::setPictureRotate(int rotate)
473{
474 mRotateValue = rotate;
475}
476int Sensor::getPictureRotate()
477{
478 return mRotateValue;
479}
480status_t Sensor::shutDown() {
481 ALOGV("%s: E", __FUNCTION__);
482
483 int res;
484
485 mTimeOutCount = 0;
486
487 res = requestExitAndWait();
488 if (res != OK) {
489 ALOGE("Unable to shut down sensor capture thread: %d", res);
490 }
491
492 if (vinfo != NULL) {
493 if (mSensorType == SENSOR_USB) {
494 releasebuf_and_stop_capturing(vinfo);
495 } else {
496 stop_capturing(vinfo);
497 }
498 }
499
500 camera_close(vinfo);
501
502 if (vinfo){
503 free(vinfo);
504 vinfo = NULL;
505 }
506
507 if (mTemp_buffer) {
508 delete [] mTemp_buffer;
509 mTemp_buffer = NULL;
510 }
511
512 mSensorWorkFlag = false;
513
514 ALOGD("%s: Exit", __FUNCTION__);
515 return res;
516}
517
518void Sensor::sendExitSingalToSensor() {
519 {
520 Mutex::Autolock lock(mReadoutMutex);
521 mExitSensorThread = true;
522 mReadoutComplete.signal();
523 }
524
525 {
526 Mutex::Autolock lock(mControlMutex);
527 mVSync.signal();
528 }
529
530 {
531 Mutex::Autolock lock(mReadoutMutex);
532 mReadoutAvailable.signal();
533 }
534}
535
536Scene &Sensor::getScene() {
537 return mScene;
538}
539
540int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
541{
542 int ret = 0;
543 struct v4l2_queryctrl qc;
544
545 memset(&qc, 0, sizeof(qc));
546 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
547 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
548
549 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
550 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
551 ret = -1;
552 *zoomMin = 0;
553 *zoomMax = 0;
554 *zoomStep = 1;
555 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
556 } else {
557 *zoomMin = qc.minimum;
558 *zoomMax = qc.maximum;
559 *zoomStep = qc.step;
560 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
561 }
562
563 return ret ;
564}
565
566int Sensor::setZoom(int zoomValue)
567{
568 int ret = 0;
569 struct v4l2_control ctl;
570
571 memset( &ctl, 0, sizeof(ctl));
572 ctl.value = zoomValue;
573 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
574 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
575 if (ret < 0) {
576 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
577 }
578 return ret ;
579}
580
581status_t Sensor::setEffect(uint8_t effect)
582{
583 int ret = 0;
584 struct v4l2_control ctl;
585 ctl.id = V4L2_CID_COLORFX;
586
587 switch (effect) {
588 case ANDROID_CONTROL_EFFECT_MODE_OFF:
589 ctl.value= CAM_EFFECT_ENC_NORMAL;
590 break;
591 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
592 ctl.value= CAM_EFFECT_ENC_COLORINV;
593 break;
594 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
595 ctl.value= CAM_EFFECT_ENC_SEPIA;
596 break;
597 default:
598 ALOGE("%s: Doesn't support effect mode %d",
599 __FUNCTION__, effect);
600 return BAD_VALUE;
601 }
602
603 DBG_LOGB("set effect mode:%d", effect);
604 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
605 if (ret < 0) {
606 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
607 }
608 return ret ;
609}
610
611#define MAX_LEVEL_FOR_EXPOSURE 16
612#define MIN_LEVEL_FOR_EXPOSURE 3
613
614int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
615{
616 struct v4l2_queryctrl qc;
617 int ret=0;
618 int level = 0;
619 int middle = 0;
620
621 memset( &qc, 0, sizeof(qc));
622
623 DBG_LOGA("getExposure\n");
624 qc.id = V4L2_CID_EXPOSURE;
625 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
626 if(ret < 0) {
627 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
628 *minExp = -4;
629 *maxExp = 4;
630 *def = 0;
631 step->numerator = 1;
632 step->denominator = 1;
633 return ret;
634 }
635
636 if(0 < qc.step)
637 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
638
639 if((level > MAX_LEVEL_FOR_EXPOSURE)
640 || (level < MIN_LEVEL_FOR_EXPOSURE)){
641 *minExp = -4;
642 *maxExp = 4;
643 *def = 0;
644 step->numerator = 1;
645 step->denominator = 1;
646 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
647 *minExp, *maxExp, *def);
648 return true;
649 }
650
651 middle = (qc.minimum+qc.maximum)/2;
652 *minExp = qc.minimum - middle;
653 *maxExp = qc.maximum - middle;
654 *def = qc.default_value - middle;
655 step->numerator = 1;
656 step->denominator = 2;//qc.step;
657 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
658 return ret;
659}
660
661status_t Sensor::setExposure(int expCmp)
662{
663 int ret = 0;
664 struct v4l2_control ctl;
665 struct v4l2_queryctrl qc;
666
667 if(mEV == expCmp){
668 return 0;
669 }else{
670 mEV = expCmp;
671 }
672 memset(&ctl, 0, sizeof(ctl));
673 memset(&qc, 0, sizeof(qc));
674
675 qc.id = V4L2_CID_EXPOSURE;
676
677 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
678 if (ret < 0) {
679 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
680 }
681
682 ctl.id = V4L2_CID_EXPOSURE;
683 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
684
685 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
686 if (ret < 0) {
687 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
688 }
689 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
690 return ret ;
691}
692
693int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
694{
695 struct v4l2_queryctrl qc;
696 struct v4l2_querymenu qm;
697 int ret;
698 int mode_count = -1;
699
700 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
701 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
702 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
703 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
704 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
705 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
706 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
707 } else {
708 memset(&qm, 0, sizeof(qm));
709
710 int index = 0;
711 mode_count = 1;
712 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
713
714 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
715 if (mode_count >= maxCont)
716 break;
717
718 memset(&qm, 0, sizeof(struct v4l2_querymenu));
719 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
720 qm.index = index;
721 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
722 continue;
723 } else {
724 if (strcmp((char*)qm.name,"50hz") == 0) {
725 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
726 mode_count++;
727 } else if (strcmp((char*)qm.name,"60hz") == 0) {
728 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
729 mode_count++;
730 } else if (strcmp((char*)qm.name,"auto") == 0) {
731 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
732 mode_count++;
733 }
734
735 }
736 }
737 }
738
739 return mode_count;
740}
741
742status_t Sensor::setAntiBanding(uint8_t antiBanding)
743{
744 int ret = 0;
745 struct v4l2_control ctl;
746 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
747
748 switch (antiBanding) {
749 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
750 ctl.value= CAM_ANTIBANDING_OFF;
751 break;
752 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
753 ctl.value= CAM_ANTIBANDING_50HZ;
754 break;
755 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
756 ctl.value= CAM_ANTIBANDING_60HZ;
757 break;
758 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
759 ctl.value= CAM_ANTIBANDING_AUTO;
760 break;
761 default:
762 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
763 __FUNCTION__, antiBanding);
764 return BAD_VALUE;
765 }
766
767 DBG_LOGB("anti banding mode:%d", antiBanding);
768 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
769 if ( ret < 0) {
770 CAMHAL_LOGDA("failed to set anti banding mode!\n");
771 return BAD_VALUE;
772 }
773 return ret;
774}
775
776status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
777{
778 int ret = 0;
779 struct v4l2_control ctl;
780 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
781 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
782 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
783
784 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
785 return ret;
786}
787
788
789int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
790{
791 struct v4l2_queryctrl qc;
792 struct v4l2_querymenu qm;
793 int ret;
794 int mode_count = -1;
795
796 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
797 qc.id = V4L2_CID_FOCUS_AUTO;
798 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
799 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
800 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
801 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
802 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
803 }else{
804 memset(&qm, 0, sizeof(qm));
805
806 int index = 0;
807 mode_count = 1;
808 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
809
810 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
811 if (mode_count >= maxCount)
812 break;
813
814 memset(&qm, 0, sizeof(struct v4l2_querymenu));
815 qm.id = V4L2_CID_FOCUS_AUTO;
816 qm.index = index;
817 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
818 continue;
819 } else {
820 if (strcmp((char*)qm.name,"auto") == 0) {
821 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
822 mode_count++;
823 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
824 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
825 mode_count++;
826 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
827 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
828 mode_count++;
829 }
830
831 }
832 }
833 }
834
835 return mode_count;
836}
837
838status_t Sensor::setAutoFocuas(uint8_t afMode)
839{
840 struct v4l2_control ctl;
841 ctl.id = V4L2_CID_FOCUS_AUTO;
842
843 switch (afMode) {
844 case ANDROID_CONTROL_AF_MODE_AUTO:
845 ctl.value = CAM_FOCUS_MODE_AUTO;
846 break;
847 case ANDROID_CONTROL_AF_MODE_MACRO:
848 ctl.value = CAM_FOCUS_MODE_MACRO;
849 break;
850 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
851 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
852 break;
853 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
854 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
855 break;
856 default:
857 ALOGE("%s: Emulator doesn't support AF mode %d",
858 __FUNCTION__, afMode);
859 return BAD_VALUE;
860 }
861
862 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
863 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
864 return BAD_VALUE;
865 }
866
867 return OK;
868}
869
870int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
871{
872 struct v4l2_queryctrl qc;
873 struct v4l2_querymenu qm;
874 int ret;
875 int mode_count = -1;
876
877 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
878 qc.id = V4L2_CID_DO_WHITE_BALANCE;
879 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
880 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
881 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
882 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
883 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
884 }else{
885 memset(&qm, 0, sizeof(qm));
886
887 int index = 0;
888 mode_count = 1;
889 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
890
891 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
892 if (mode_count >= maxCount)
893 break;
894
895 memset(&qm, 0, sizeof(struct v4l2_querymenu));
896 qm.id = V4L2_CID_DO_WHITE_BALANCE;
897 qm.index = index;
898 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
899 continue;
900 } else {
901 if (strcmp((char*)qm.name,"auto") == 0) {
902 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
903 mode_count++;
904 } else if (strcmp((char*)qm.name,"daylight") == 0) {
905 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
906 mode_count++;
907 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
908 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
909 mode_count++;
910 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
911 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
912 mode_count++;
913 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
914 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
915 mode_count++;
916 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
917 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
918 mode_count++;
919 } else if (strcmp((char*)qm.name,"twilight") == 0) {
920 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
921 mode_count++;
922 } else if (strcmp((char*)qm.name,"shade") == 0) {
923 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
924 mode_count++;
925 }
926
927 }
928 }
929 }
930
931 return mode_count;
932}
933
934status_t Sensor::setAWB(uint8_t awbMode)
935{
936 int ret = 0;
937 struct v4l2_control ctl;
938 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
939
940 switch (awbMode) {
941 case ANDROID_CONTROL_AWB_MODE_AUTO:
942 ctl.value = CAM_WB_AUTO;
943 break;
944 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
945 ctl.value = CAM_WB_INCANDESCENCE;
946 break;
947 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
948 ctl.value = CAM_WB_FLUORESCENT;
949 break;
950 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
951 ctl.value = CAM_WB_DAYLIGHT;
952 break;
953 case ANDROID_CONTROL_AWB_MODE_SHADE:
954 ctl.value = CAM_WB_SHADE;
955 break;
956 default:
957 ALOGE("%s: Emulator doesn't support AWB mode %d",
958 __FUNCTION__, awbMode);
959 return BAD_VALUE;
960 }
961 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
962 return ret;
963}
964
965void Sensor::setExposureTime(uint64_t ns) {
966 Mutex::Autolock lock(mControlMutex);
967 ALOGVV("Exposure set to %f", ns/1000000.f);
968 mExposureTime = ns;
969}
970
971void Sensor::setFrameDuration(uint64_t ns) {
972 Mutex::Autolock lock(mControlMutex);
973 ALOGVV("Frame duration set to %f", ns/1000000.f);
974 mFrameDuration = ns;
975}
976
977void Sensor::setSensitivity(uint32_t gain) {
978 Mutex::Autolock lock(mControlMutex);
979 ALOGVV("Gain set to %d", gain);
980 mGainFactor = gain;
981}
982
983void Sensor::setDestinationBuffers(Buffers *buffers) {
984 Mutex::Autolock lock(mControlMutex);
985 mNextBuffers = buffers;
986}
987
988void Sensor::setFrameNumber(uint32_t frameNumber) {
989 Mutex::Autolock lock(mControlMutex);
990 mFrameNumber = frameNumber;
991}
992
993void Sensor::setFlushFlag(bool flushFlag) {
994 mFlushFlag = flushFlag;
995}
996
997status_t Sensor::waitForVSync(nsecs_t reltime) {
998 int res;
999 Mutex::Autolock lock(mControlMutex);
1000 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
1001 if (mExitSensorThread) {
1002 return -1;
1003 }
1004
1005 mGotVSync = false;
1006 res = mVSync.waitRelative(mControlMutex, reltime);
1007 if (res != OK && res != TIMED_OUT) {
1008 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
1009 return false;
1010 }
1011 CAMHAL_LOGVB("%s , X mControlMutex , mGotVSync = %d " , __FUNCTION__ , mGotVSync);
1012 return mGotVSync;
1013}
1014
1015status_t Sensor::waitForNewFrame(nsecs_t reltime,
1016 nsecs_t *captureTime) {
1017 Mutex::Autolock lock(mReadoutMutex);
1018 if (mExitSensorThread) {
1019 return -1;
1020 }
1021
1022 if (mCapturedBuffers == NULL) {
1023 int res;
1024 CAMHAL_LOGVB("%s , E mReadoutMutex , reltime = %d" , __FUNCTION__, reltime);
1025 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
1026 if (res == TIMED_OUT) {
1027 return false;
1028 } else if (res != OK || mCapturedBuffers == NULL) {
1029 if (mFlushFlag) {
1030 ALOGE("%s , return immediately , mWait = %d", __FUNCTION__, mWait);
1031 if (mWait) {
1032 mWait = false;
1033 *captureTime = mCaptureTime;
1034 mCapturedBuffers = NULL;
1035 mReadoutComplete.signal();
1036 } else {
1037 *captureTime = mCaptureTime;
1038 mCapturedBuffers = NULL;
1039 }
1040 return -2;
1041 } else {
1042 ALOGE("Error waiting for sensor readout signal: %d", res);
1043 return false;
1044 }
1045 }
1046 }
1047 if (mWait) {
1048 mWait = false;
1049 *captureTime = mCaptureTime;
1050 mCapturedBuffers = NULL;
1051 mReadoutComplete.signal();
1052 } else {
1053 *captureTime = mCaptureTime;
1054 mCapturedBuffers = NULL;
1055 }
1056 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1057 return true;
1058}
1059
1060Sensor::SensorListener::~SensorListener() {
1061}
1062
1063void Sensor::setSensorListener(SensorListener *listener) {
1064 Mutex::Autolock lock(mControlMutex);
1065 mListener = listener;
1066}
1067
1068status_t Sensor::readyToRun() {
1069 int res;
1070 ALOGV("Starting up sensor thread");
1071 mStartupTime = systemTime();
1072 mNextCaptureTime = 0;
1073 mNextCapturedBuffers = NULL;
1074
1075 DBG_LOGA("");
1076
1077 return OK;
1078}
1079
1080bool Sensor::threadLoop() {
1081 /**
1082 * Sensor capture operation main loop.
1083 *
1084 * Stages are out-of-order relative to a single frame's processing, but
1085 * in-order in time.
1086 */
1087
1088 if (mExitSensorThread) {
1089 return false;
1090 }
1091
1092 /**
1093 * Stage 1: Read in latest control parameters
1094 */
1095 uint64_t exposureDuration;
1096 uint64_t frameDuration;
1097 uint32_t gain;
1098 Buffers *nextBuffers;
1099 uint32_t frameNumber;
1100 SensorListener *listener = NULL;
1101 {
1102 Mutex::Autolock lock(mControlMutex);
1103 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
1104 exposureDuration = mExposureTime;
1105 frameDuration = mFrameDuration;
1106 gain = mGainFactor;
1107 nextBuffers = mNextBuffers;
1108 frameNumber = mFrameNumber;
1109 listener = mListener;
1110 // Don't reuse a buffer set
1111 mNextBuffers = NULL;
1112
1113 // Signal VSync for start of readout
1114 ALOGVV("Sensor VSync");
1115 mGotVSync = true;
1116 mVSync.signal();
1117 }
1118
1119 /**
1120 * Stage 3: Read out latest captured image
1121 */
1122
1123 Buffers *capturedBuffers = NULL;
1124 nsecs_t captureTime = 0;
1125
1126 nsecs_t startRealTime = systemTime();
1127 // Stagefright cares about system time for timestamps, so base simulated
1128 // time on that.
1129 nsecs_t simulatedTime = startRealTime;
1130 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1131 nsecs_t frameReadoutEndRealTime = startRealTime +
1132 kRowReadoutTime * kResolution[1];
1133
1134 if (mNextCapturedBuffers != NULL) {
1135 ALOGVV("Sensor starting readout");
1136 // Pretend we're doing readout now; will signal once enough time has elapsed
1137 capturedBuffers = mNextCapturedBuffers;
1138 captureTime = mNextCaptureTime;
1139 }
1140 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1141
1142 // TODO: Move this signal to another thread to simulate readout
1143 // time properly
1144 if (capturedBuffers != NULL) {
1145 ALOGVV("Sensor readout complete");
1146 Mutex::Autolock lock(mReadoutMutex);
1147 CAMHAL_LOGVB("%s , E mReadoutMutex" , __FUNCTION__);
1148 if (mCapturedBuffers != NULL) {
1149 ALOGE("Waiting for readout thread to catch up!");
1150 mWait = true;
1151 mReadoutComplete.wait(mReadoutMutex);
1152 }
1153
1154 mCapturedBuffers = capturedBuffers;
1155 mCaptureTime = captureTime;
1156 mReadoutAvailable.signal();
1157 capturedBuffers = NULL;
1158 }
1159 CAMHAL_LOGVB("%s , X mReadoutMutex" , __FUNCTION__);
1160
1161 if (mExitSensorThread) {
1162 return false;
1163 }
1164 /**
1165 * Stage 2: Capture new image
1166 */
1167 mNextCaptureTime = simulatedTime;
1168 mNextCapturedBuffers = nextBuffers;
1169
1170 if (mNextCapturedBuffers != NULL) {
1171 if (listener != NULL) {
1172#if 0
1173 if (get_device_status(vinfo)) {
1174 listener->onSensorEvent(frameNumber, SensorListener::ERROR_CAMERA_DEVICE, mNextCaptureTime);
1175 }
1176#endif
1177 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1178 mNextCaptureTime);
1179 }
1180
1181 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1182 (float)exposureDuration/1e6, gain);
1183 mScene.setExposureDuration((float)exposureDuration/1e9);
1184 mScene.calculateScene(mNextCaptureTime);
1185
1186 if ( mSensorType == SENSOR_SHARE_FD) {
1187 captureNewImageWithGe2d();
1188 } else {
1189 captureNewImage();
1190 }
1191 mFramecount ++;
1192 }
1193
1194 if (mExitSensorThread) {
1195 return false;
1196 }
1197
1198 if (mFramecount == 100) {
1199 gettimeofday(&mTimeEnd, NULL);
1200 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1201 mCurFps = mFramecount/(interval/1000000.0f);
1202 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1203 mFramecount = 0;
1204 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1205 }
1206 ALOGVV("Sensor vertical blanking interval");
1207 nsecs_t workDoneRealTime = systemTime();
1208 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1209 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1210 timespec t;
1211 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1212 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1213
1214 int ret;
1215 do {
1216 ret = nanosleep(&t, &t);
1217 } while (ret != 0);
1218 }
1219 nsecs_t endRealTime = systemTime();
1220 ALOGVV("Frame cycle took %d ms, target %d ms",
1221 (int)((endRealTime - startRealTime)/1000000),
1222 (int)(frameDuration / 1000000));
1223 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1224 return true;
1225};
1226
1227int Sensor::captureNewImageWithGe2d() {
1228
1229 uint32_t gain = mGainFactor;
1230 mKernelPhysAddr = 0;
1231
1232
1233 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1234 usleep(5000);
1235 }
1236
1237 // Might be adding more buffers, so size isn't constant
1238 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1239 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1240 fillStream(vinfo, mKernelPhysAddr, b);
1241 }
1242 putback_frame(vinfo);
1243 mKernelPhysAddr = 0;
1244
1245 return 0;
1246
1247}
1248
1249int Sensor::captureNewImage() {
1250 bool isjpeg = false;
1251 uint32_t gain = mGainFactor;
1252 mKernelBuffer = NULL;
1253
1254 // Might be adding more buffers, so size isn't constant
1255 ALOGVV("size=%d\n", mNextCapturedBuffers->size());
1256 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1257 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1258 ALOGVV("Sensor capturing buffer %d: stream %d,"
1259 " %d x %d, format %x, stride %d, buf %p, img %p",
1260 i, b.streamId, b.width, b.height, b.format, b.stride,
1261 b.buffer, b.img);
1262 switch (b.format) {
1263#if PLATFORM_SDK_VERSION <= 22
1264 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1265 captureRaw(b.img, gain, b.stride);
1266 break;
1267#endif
1268 case HAL_PIXEL_FORMAT_RGB_888:
1269 captureRGB(b.img, gain, b.stride);
1270 break;
1271 case HAL_PIXEL_FORMAT_RGBA_8888:
1272 captureRGBA(b.img, gain, b.stride);
1273 break;
1274 case HAL_PIXEL_FORMAT_BLOB:
1275 // Add auxillary buffer of the right size
1276 // Assumes only one BLOB (JPEG) buffer in
1277 // mNextCapturedBuffers
1278 StreamBuffer bAux;
1279 int orientation;
1280 orientation = getPictureRotate();
1281 ALOGD("bAux orientation=%d",orientation);
1282 uint32_t pixelfmt;
1283 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1284 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1285
1286 pixelfmt = getOutputFormat();
1287 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1288 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1289 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1290 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1291 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1292 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1293 } else {
1294 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1295 }
1296 } else {
1297 isjpeg = true;
1298 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1299 }
1300
1301 if (!msupportrotate) {
1302 bAux.streamId = 0;
1303 bAux.width = b.width;
1304 bAux.height = b.height;
1305 bAux.format = pixelfmt;
1306 bAux.stride = b.width;
1307 bAux.buffer = NULL;
1308 } else {
1309 if ((orientation == 90) || (orientation == 270)) {
1310 bAux.streamId = 0;
1311 bAux.width = b.height;
1312 bAux.height = b.width;
1313 bAux.format = pixelfmt;
1314 bAux.stride = b.height;
1315 bAux.buffer = NULL;
1316 } else {
1317 bAux.streamId = 0;
1318 bAux.width = b.width;
1319 bAux.height = b.height;
1320 bAux.format = pixelfmt;
1321 bAux.stride = b.width;
1322 bAux.buffer = NULL;
1323 }
1324 }
1325 // TODO: Reuse these
1326 bAux.img = new uint8_t[b.width * b.height * 3];
1327 mNextCapturedBuffers->push_back(bAux);
1328 break;
1329 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1330 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1331 captureNV21(b, gain);
1332 break;
1333 case HAL_PIXEL_FORMAT_YV12:
1334 captureYV12(b, gain);
1335 break;
1336 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1337 captureYUYV(b.img, gain, b.stride);
1338 break;
1339 default:
1340 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1341 b.format);
1342 break;
1343 }
1344 }
1345 if ((!isjpeg)&&(mKernelBuffer)) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1346 // whose buffer putback separately.
1347 putback_frame(vinfo);
1348 }
1349 mKernelBuffer = NULL;
1350
1351 return 0;
1352}
1353
1354int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1355 int res;
1356 int i, j, k, START;
1357 int count = 0;
1358 int pixelfmt;
1359 struct v4l2_frmsizeenum frmsize;
1360 char property[PROPERTY_VALUE_MAX];
1361 unsigned int support_w,support_h;
1362
1363 support_w = 10000;
1364 support_h = 10000;
1365 memset(property, 0, sizeof(property));
1366 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1367 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1368 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1369 support_w = 10000;
1370 support_h = 10000;
1371 }
1372 }
1373
1374 memset(&frmsize,0,sizeof(frmsize));
1375 frmsize.pixel_format = getOutputFormat();
1376
1377 START = 0;
1378 for (i = 0; ; i++) {
1379 frmsize.index = i;
1380 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1381 if (res < 0){
1382 DBG_LOGB("index=%d, break\n", i);
1383 break;
1384 }
1385
1386 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1387
1388 if (0 != (frmsize.discrete.width%16))
1389 continue;
1390
1391 if ((frmsize.discrete.width * frmsize.discrete.height) > (support_w * support_h))
1392 continue;
1393 if (count >= size)
1394 break;
1395
1396 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1397 picSizes[count+1] = frmsize.discrete.width;
1398 picSizes[count+2] = frmsize.discrete.height;
1399 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1400
1401 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1402 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1403 if (0 == i) {
1404 count += 4;
1405 continue;
1406 }
1407
1408 for (k = count; k > START; k -= 4) {
1409 if (frmsize.discrete.width * frmsize.discrete.height >
1410 picSizes[k - 3] * picSizes[k - 2]) {
1411 picSizes[k + 1] = picSizes[k - 3];
1412 picSizes[k + 2] = picSizes[k - 2];
1413
1414 } else {
1415 break;
1416 }
1417 }
1418 picSizes[k + 1] = frmsize.discrete.width;
1419 picSizes[k + 2] = frmsize.discrete.height;
1420
1421 count += 4;
1422 }
1423 }
1424
1425 START = count;
1426 for (i = 0; ; i++) {
1427 frmsize.index = i;
1428 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1429 if (res < 0){
1430 DBG_LOGB("index=%d, break\n", i);
1431 break;
1432 }
1433
1434 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1435
1436 if (0 != (frmsize.discrete.width%16))
1437 continue;
1438
1439 if ((frmsize.discrete.width * frmsize.discrete.height) > (support_w * support_h))
1440 continue;
1441 if (count >= size)
1442 break;
1443
1444 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1445 picSizes[count+1] = frmsize.discrete.width;
1446 picSizes[count+2] = frmsize.discrete.height;
1447 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1448
1449 DBG_LOGB("get output width=%d, height=%d, format =\
1450 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1451 frmsize.discrete.height);
1452 if (0 == i) {
1453 count += 4;
1454 continue;
1455 }
1456
1457 for (k = count; k > START; k -= 4) {
1458 if (frmsize.discrete.width * frmsize.discrete.height >
1459 picSizes[k - 3] * picSizes[k - 2]) {
1460 picSizes[k + 1] = picSizes[k - 3];
1461 picSizes[k + 2] = picSizes[k - 2];
1462
1463 } else {
1464 break;
1465 }
1466 }
1467 picSizes[k + 1] = frmsize.discrete.width;
1468 picSizes[k + 2] = frmsize.discrete.height;
1469
1470 count += 4;
1471 }
1472 }
1473
1474#if 0
1475 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1476 START = count;
1477 for (i = 0; ; i++) {
1478 frmsize.index = i;
1479 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1480 if (res < 0){
1481 DBG_LOGB("index=%d, break\n", i);
1482 break;
1483 }
1484
1485 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1486
1487 if (0 != (frmsize.discrete.width%16))
1488 continue;
1489
1490 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1491 continue;
1492
1493 if (count >= size)
1494 break;
1495
1496 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1497 picSizes[count+1] = frmsize.discrete.width;
1498 picSizes[count+2] = frmsize.discrete.height;
1499 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1500
1501 DBG_LOGB("get output width=%d, height=%d, format =\
1502 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1503 frmsize.discrete.height);
1504 if (0 == i) {
1505 count += 4;
1506 continue;
1507 }
1508
1509 for (k = count; k > START; k -= 4) {
1510 if (frmsize.discrete.width * frmsize.discrete.height >
1511 picSizes[k - 3] * picSizes[k - 2]) {
1512 picSizes[k + 1] = picSizes[k - 3];
1513 picSizes[k + 2] = picSizes[k - 2];
1514
1515 } else {
1516 break;
1517 }
1518 }
1519 picSizes[k + 1] = frmsize.discrete.width;
1520 picSizes[k + 2] = frmsize.discrete.height;
1521
1522 count += 4;
1523 }
1524 }
1525 }
1526#endif
1527
1528 uint32_t jpgSrcfmt[] = {
1529 V4L2_PIX_FMT_RGB24,
1530 V4L2_PIX_FMT_MJPEG,
1531 V4L2_PIX_FMT_YUYV,
1532 };
1533
1534 START = count;
1535 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1536 memset(&frmsize,0,sizeof(frmsize));
1537 frmsize.pixel_format = jpgSrcfmt[j];
1538
1539 for (i = 0; ; i++) {
1540 frmsize.index = i;
1541 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1542 if (res < 0){
1543 DBG_LOGB("index=%d, break\n", i);
1544 break;
1545 }
1546
1547 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1548
1549 if (0 != (frmsize.discrete.width%16))
1550 continue;
1551
1552 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1553 // continue;
1554
1555 if (count >= size)
1556 break;
1557
1558 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1559 if (!IsUsbAvailablePictureSize(kUsbAvailablePictureSize, frmsize.discrete.width, frmsize.discrete.height))
1560 continue;
1561 }
1562
1563 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1564 picSizes[count+1] = frmsize.discrete.width;
1565 picSizes[count+2] = frmsize.discrete.height;
1566 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1567
1568 if (0 == i) {
1569 count += 4;
1570 continue;
1571 }
1572
1573 //TODO insert in descend order
1574 for (k = count; k > START; k -= 4) {
1575 if (frmsize.discrete.width * frmsize.discrete.height >
1576 picSizes[k - 3] * picSizes[k - 2]) {
1577 picSizes[k + 1] = picSizes[k - 3];
1578 picSizes[k + 2] = picSizes[k - 2];
1579
1580 } else {
1581 break;
1582 }
1583 }
1584
1585 picSizes[k + 1] = frmsize.discrete.width;
1586 picSizes[k + 2] = frmsize.discrete.height;
1587
1588 count += 4;
1589 }
1590 }
1591
1592 if (frmsize.index > 0)
1593 break;
1594 }
1595
1596 if (frmsize.index == 0)
1597 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1598
1599 return count;
1600
1601}
1602
1603int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size, bool flag)
1604{
1605 int ret=0; int framerate=0; int temp_rate=0;
1606 struct v4l2_frmivalenum fival;
1607 int i,j=0;
1608 int count = 0;
1609 int tmp_size = size;
1610 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1611 int pixelfmt_tbl[] = {
1612 V4L2_PIX_FMT_MJPEG,
1613 V4L2_PIX_FMT_YVU420,
1614 V4L2_PIX_FMT_NV21,
1615 V4L2_PIX_FMT_RGB24,
1616 V4L2_PIX_FMT_YUYV,
1617 //V4L2_PIX_FMT_YVU420
1618 };
1619
1620 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1621 {
1622 /* we got all duration for each resolution for prev format*/
1623 if (count >= tmp_size)
1624 break;
1625
1626 for( ; size > 0; size-=4)
1627 {
1628 memset(&fival, 0, sizeof(fival));
1629
1630 for (fival.index = 0;;fival.index++)
1631 {
1632 fival.pixel_format = pixelfmt_tbl[i];
1633 fival.width = picSizes[size-3];
1634 fival.height = picSizes[size-2];
1635 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1636 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1637 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1638 if(framerate < temp_rate)
1639 framerate = temp_rate;
1640 duration[count+0] = (int64_t)(picSizes[size-4]);
1641 duration[count+1] = (int64_t)(picSizes[size-3]);
1642 duration[count+2] = (int64_t)(picSizes[size-2]);
1643 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1644 j++;
1645 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1646 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1647 if(framerate < temp_rate)
1648 framerate = temp_rate;
1649 duration[count+0] = (int64_t)picSizes[size-4];
1650 duration[count+1] = (int64_t)picSizes[size-3];
1651 duration[count+2] = (int64_t)picSizes[size-2];
1652 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1653 j++;
1654 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1655 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1656 if(framerate < temp_rate)
1657 framerate = temp_rate;
1658 duration[count+0] = (int64_t)picSizes[size-4];
1659 duration[count+1] = (int64_t)picSizes[size-3];
1660 duration[count+2] = (int64_t)picSizes[size-2];
1661 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1662 j++;
1663 }
1664 } else {
1665 if (j > 0) {
1666 if (count >= tmp_size)
1667 break;
1668 duration[count+0] = (int64_t)(picSizes[size-4]);
1669 duration[count+1] = (int64_t)(picSizes[size-3]);
1670 duration[count+2] = (int64_t)(picSizes[size-2]);
1671 if (framerate == 5) {
1672 if ((!flag) && ((duration[count+0] == HAL_PIXEL_FORMAT_YCbCr_420_888)
1673 || (duration[count+0] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)))
1674 duration[count+3] = 0;
1675 else
1676 duration[count+3] = (int64_t)200000000L;
1677 } else if (framerate == 10) {
1678 if ((!flag) && ((duration[count+0] == HAL_PIXEL_FORMAT_YCbCr_420_888)
1679 || (duration[count+0] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)))
1680 duration[count+3] = 0;
1681 else
1682 duration[count+3] = (int64_t)100000000L;
1683 } else if (framerate == 15) {
1684 if ((!flag) && ((duration[count+0] == HAL_PIXEL_FORMAT_YCbCr_420_888)
1685 || (duration[count+0] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)))
1686 duration[count+3] = 0;
1687 else
1688 duration[count+3] = (int64_t)66666666L;
1689 } else if (framerate == 30) {
1690 if ((!flag) && ((duration[count+0] == HAL_PIXEL_FORMAT_YCbCr_420_888)
1691 || (duration[count+0] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)))
1692 duration[count+3] = 0;
1693 else
1694 duration[count+3] = (int64_t)33333333L;
1695 } else {
1696 if ((!flag) && ((duration[count+0] == HAL_PIXEL_FORMAT_YCbCr_420_888)
1697 || (duration[count+0] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)))
1698 duration[count+3] = 0;
1699 else
1700 duration[count+3] = (int64_t)66666666L;
1701 }
1702 count += 4;
1703 break;
1704 } else {
1705 break;
1706 }
1707 }
1708 }
1709 j=0;
1710 }
1711 size = tmp_size;
1712 }
1713
1714 return count;
1715
1716}
1717
1718int64_t Sensor::getMinFrameDuration()
1719{
1720 int64_t tmpDuration = 66666666L; // 1/15 s
1721 int64_t frameDuration = 66666666L; // 1/15 s
1722 struct v4l2_frmivalenum fival;
1723 int i,j;
1724
1725 uint32_t pixelfmt_tbl[]={
1726 V4L2_PIX_FMT_MJPEG,
1727 V4L2_PIX_FMT_YUYV,
1728 V4L2_PIX_FMT_NV21,
1729 };
1730 struct v4l2_frmsize_discrete resolution_tbl[]={
1731 {1920, 1080},
1732 {1280, 960},
1733 {640, 480},
1734 {320, 240},
1735 };
1736
1737 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1738 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1739 memset(&fival, 0, sizeof(fival));
1740 fival.index = 0;
1741 fival.pixel_format = pixelfmt_tbl[i];
1742 fival.width = resolution_tbl[j].width;
1743 fival.height = resolution_tbl[j].height;
1744
1745 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1746 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1747 tmpDuration =
1748 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1749
1750 if (frameDuration > tmpDuration)
1751 frameDuration = tmpDuration;
1752 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1753 frameDuration =
1754 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1755 break;
1756 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1757 frameDuration =
1758 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1759 break;
1760 }
1761 fival.index++;
1762 }
1763 }
1764
1765 if (fival.index > 0) {
1766 break;
1767 }
1768 }
1769
1770 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1771 return frameDuration;
1772}
1773
1774int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1775 int res;
1776 int i;
1777 int count = 0;
1778 struct v4l2_frmsizeenum frmsize;
1779 char property[PROPERTY_VALUE_MAX];
1780 unsigned int support_w,support_h;
1781 int preview_fmt;
1782
1783 support_w = 10000;
1784 support_h = 10000;
1785 memset(property, 0, sizeof(property));
1786 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1787 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1788 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1789 support_w = 10000;
1790 support_h = 10000;
1791 }
1792 }
1793
1794
1795 memset(&frmsize,0,sizeof(frmsize));
1796 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1797
1798 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1799 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1800 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1801 if (preview == true)
1802 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1803 else
1804 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1805 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1806 if (preview == true)
1807 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1808 else
1809 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1810 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1811 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1812
1813 for (i = 0; ; i++) {
1814 frmsize.index = i;
1815 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1816 if (res < 0){
1817 DBG_LOGB("index=%d, break\n", i);
1818 break;
1819 }
1820
1821
1822 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1823
1824 if (0 != (frmsize.discrete.width%16))
1825 continue;
1826
1827 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1828 continue;
1829
1830 if (count >= size)
1831 break;
1832
1833 picSizes[count] = frmsize.discrete.width;
1834 picSizes[count+1] = frmsize.discrete.height;
1835
1836 if (0 == i) {
1837 count += 2;
1838 continue;
1839 }
1840
1841 //TODO insert in descend order
1842 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1843 picSizes[count + 0] = picSizes[count - 2];
1844 picSizes[count + 1] = picSizes[count - 1];
1845
1846 picSizes[count - 2] = frmsize.discrete.width;
1847 picSizes[count - 1] = frmsize.discrete.height;
1848 }
1849
1850 count += 2;
1851 }
1852 }
1853
1854 return count;
1855
1856}
1857
1858bool Sensor::get_sensor_status() {
1859 return mSensorWorkFlag;
1860}
1861
1862void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1863 float totalGain = gain/100.0 * kBaseGainFactor;
1864 float noiseVarGain = totalGain * totalGain;
1865 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1866 + kReadNoiseVarAfterGain;
1867
1868 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1869 mScene.setReadoutPixel(0,0);
1870 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1871 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1872 uint16_t *px = (uint16_t*)img + y * stride;
1873 for (unsigned int x = 0; x < kResolution[0]; x++) {
1874 uint32_t electronCount;
1875 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1876
1877 // TODO: Better pixel saturation curve?
1878 electronCount = (electronCount < kSaturationElectrons) ?
1879 electronCount : kSaturationElectrons;
1880
1881 // TODO: Better A/D saturation curve?
1882 uint16_t rawCount = electronCount * totalGain;
1883 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1884
1885 // Calculate noise value
1886 // TODO: Use more-correct Gaussian instead of uniform noise
1887 float photonNoiseVar = electronCount * noiseVarGain;
1888 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1889 // Scaled to roughly match gaussian/uniform noise stddev
1890 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1891
1892 rawCount += kBlackLevel;
1893 rawCount += noiseStddev * noiseSample;
1894
1895 *px++ = rawCount;
1896 }
1897 // TODO: Handle this better
1898 //simulatedTime += kRowReadoutTime;
1899 }
1900 ALOGVV("Raw sensor image captured");
1901}
1902
1903void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1904 float totalGain = gain/100.0 * kBaseGainFactor;
1905 // In fixed-point math, calculate total scaling from electrons to 8bpp
1906 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1907 uint32_t inc = kResolution[0] / stride;
1908
1909 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1910 uint8_t *px = img + outY * stride * 4;
1911 mScene.setReadoutPixel(0, y);
1912 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1913 uint32_t rCount, gCount, bCount;
1914 // TODO: Perfect demosaicing is a cheat
1915 const uint32_t *pixel = mScene.getPixelElectrons();
1916 rCount = pixel[Scene::R] * scale64x;
1917 gCount = pixel[Scene::Gr] * scale64x;
1918 bCount = pixel[Scene::B] * scale64x;
1919
1920 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1921 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1922 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1923 *px++ = 255;
1924 for (unsigned int j = 1; j < inc; j++)
1925 mScene.getPixelElectrons();
1926 }
1927 // TODO: Handle this better
1928 //simulatedTime += kRowReadoutTime;
1929 }
1930 ALOGVV("RGBA sensor image captured");
1931}
1932
1933void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1934#if 0
1935 float totalGain = gain/100.0 * kBaseGainFactor;
1936 // In fixed-point math, calculate total scaling from electrons to 8bpp
1937 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1938 uint32_t inc = kResolution[0] / stride;
1939
1940 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1941 mScene.setReadoutPixel(0, y);
1942 uint8_t *px = img + outY * stride * 3;
1943 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1944 uint32_t rCount, gCount, bCount;
1945 // TODO: Perfect demosaicing is a cheat
1946 const uint32_t *pixel = mScene.getPixelElectrons();
1947 rCount = pixel[Scene::R] * scale64x;
1948 gCount = pixel[Scene::Gr] * scale64x;
1949 bCount = pixel[Scene::B] * scale64x;
1950
1951 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1952 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1953 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1954 for (unsigned int j = 1; j < inc; j++)
1955 mScene.getPixelElectrons();
1956 }
1957 // TODO: Handle this better
1958 //simulatedTime += kRowReadoutTime;
1959 }
1960#else
1961 uint8_t *src = NULL;
1962 int ret = 0, rotate = 0;
1963 uint32_t width = 0, height = 0;
1964 int dqTryNum = 3;
1965
1966 rotate = getPictureRotate();
1967 width = vinfo->picture.format.fmt.pix.width;
1968 height = vinfo->picture.format.fmt.pix.height;
1969
1970 if (mSensorType == SENSOR_USB) {
1971 releasebuf_and_stop_capturing(vinfo);
1972 } else {
1973 stop_capturing(vinfo);
1974 }
1975
1976 ret = start_picture(vinfo,rotate);
1977 if (ret < 0)
1978 {
1979 ALOGD("start picture failed!");
1980 }
1981 while(1)
1982 {
1983 src = (uint8_t *)get_picture(vinfo);
1984 if (NULL == src) {
1985 usleep(10000);
1986 continue;
1987 }
1988 if ((NULL != src) && (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)) {
1989 while (dqTryNum > 0) {
1990 if (NULL != src) {
1991 putback_picture_frame(vinfo);
1992 }
1993 usleep(10000);
1994 dqTryNum --;
1995 src = (uint8_t *)get_picture(vinfo);
1996 }
1997 }
1998
1999 if (NULL != src) {
2000 mSensorWorkFlag = true;
2001 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2002 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2003 if ( tmp_buffer == NULL) {
2004 ALOGE("new buffer failed!\n");
2005 return;
2006 }
2007#if ANDROID_PLATFORM_SDK_VERSION > 23
2008 uint8_t *vBuffer = new uint8_t[width * height / 4];
2009 if (vBuffer == NULL)
2010 ALOGE("alloc temperary v buffer failed\n");
2011 uint8_t *uBuffer = new uint8_t[width * height / 4];
2012 if (uBuffer == NULL)
2013 ALOGE("alloc temperary u buffer failed\n");
2014
2015 if (ConvertToI420(src, vinfo->picture.buf.bytesused, tmp_buffer, width, uBuffer, (width + 1) / 2,
2016 vBuffer, (width + 1) / 2, 0, 0, width, height,
2017 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2018 DBG_LOGA("Decode MJPEG frame failed\n");
2019 putback_picture_frame(vinfo);
2020 usleep(5000);
2021 delete vBuffer;
2022 delete uBuffer;
2023 } else {
2024
2025 uint8_t *pUVBuffer = tmp_buffer + width * height;
2026 for (int i = 0; i < width * height / 4; i++) {
2027 *pUVBuffer++ = *(vBuffer + i);
2028 *pUVBuffer++ = *(uBuffer + i);
2029 }
2030
2031 delete vBuffer;
2032 delete uBuffer;
2033 nv21_to_rgb24(tmp_buffer,img,width,height);
2034 if (tmp_buffer != NULL)
2035 delete [] tmp_buffer;
2036 break;
2037 }
2038#else
2039 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
2040 width, tmp_buffer + width * height, (width + 1) / 2, width,
2041 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2042 DBG_LOGA("Decode MJPEG frame failed\n");
2043 putback_picture_frame(vinfo);
2044 usleep(5000);
2045 } else {
2046 nv21_to_rgb24(tmp_buffer,img,width,height);
2047 if (tmp_buffer != NULL)
2048 delete [] tmp_buffer;
2049 break;
2050 }
2051#endif
2052 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2053 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
2054 yuyv422_to_rgb24(src,img,width,height);
2055 break;
2056 } else {
2057 putback_picture_frame(vinfo);
2058 usleep(5000);
2059 }
2060 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
2061 if (vinfo->picture.buf.length == width * height * 3) {
2062 memcpy(img, src, vinfo->picture.buf.length);
2063 } else {
2064 rgb24_memcpy(img, src, width, height);
2065 }
2066 break;
2067 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2068 memcpy(img, src, vinfo->picture.buf.length);
2069 break;
2070 }
2071 }
2072 }
2073 ALOGD("get picture success !");
2074
2075 if (mSensorType == SENSOR_USB) {
2076 releasebuf_and_stop_picture(vinfo);
2077 } else {
2078 stop_picture(vinfo);
2079 }
2080
2081#endif
2082}
2083
2084void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
2085{
2086 for (int i = 0; i < width * height * 2; i += 2) {
2087 *dst++ = *(src + i);
2088 }
2089
2090 for (int y = 0; y < height - 1; y +=2) {
2091 for (int j = 0; j < width * 2; j += 4) {
2092 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
2093 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
2094 }
2095 src += width * 2 * 2;
2096 }
2097
2098 if (height & 1)
2099 for (int j = 0; j < width * 2; j += 4) {
2100 *dst++ = *(src + 3 + j); //v
2101 *dst++ = *(src + 1 + j); //u
2102 }
2103}
2104
2105void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
2106{
2107 //width should be an even number.
2108 //uv ALIGN 32.
2109 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
2110 unsigned char *dst_copy,*src_copy;
2111
2112 dst_copy = dst;
2113 src_copy = src;
2114
2115 y_size = width*height;
2116 c_stride = ALIGN(width/2, 16);
2117 c_size = c_stride * height/2;
2118 cr_offset = y_size;
2119 cb_offset = y_size+c_size;
2120
2121 for(i=0;i< y_size;i++){
2122 *dst++ = *src;
2123 src += 2;
2124 }
2125
2126 dst = dst_copy;
2127 src = src_copy;
2128
2129 for(i=0;i<height;i+=2){
2130 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
2131 //ceil(u1+u2)/2
2132 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
2133 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
2134 }
2135 dst += c_stride;
2136 src += width*4;
2137 }
2138}
2139
2140status_t Sensor::force_reset_sensor() {
2141 DBG_LOGA("force_reset_sensor");
2142 status_t ret;
2143 mTimeOutCount = 0;
2144 ret = streamOff();
2145 ret = setBuffersFormat(vinfo);
2146 ret = streamOn();
2147 DBG_LOGB("%s , ret = %d", __FUNCTION__, ret);
2148 return ret;
2149}
2150
2151void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
2152#if 0
2153 float totalGain = gain/100.0 * kBaseGainFactor;
2154 // Using fixed-point math with 6 bits of fractional precision.
2155 // In fixed-point math, calculate total scaling from electrons to 8bpp
2156 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2157 // In fixed-point math, saturation point of sensor after gain
2158 const int saturationPoint = 64 * 255;
2159 // Fixed-point coefficients for RGB-YUV transform
2160 // Based on JFIF RGB->YUV transform.
2161 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2162 const int rgbToY[] = {19, 37, 7};
2163 const int rgbToCb[] = {-10,-21, 32, 524288};
2164 const int rgbToCr[] = {32,-26, -5, 524288};
2165 // Scale back to 8bpp non-fixed-point
2166 const int scaleOut = 64;
2167 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2168
2169 uint32_t inc = kResolution[0] / stride;
2170 uint32_t outH = kResolution[1] / inc;
2171 for (unsigned int y = 0, outY = 0;
2172 y < kResolution[1]; y+=inc, outY++) {
2173 uint8_t *pxY = img + outY * stride;
2174 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2175 mScene.setReadoutPixel(0,y);
2176 for (unsigned int outX = 0; outX < stride; outX++) {
2177 int32_t rCount, gCount, bCount;
2178 // TODO: Perfect demosaicing is a cheat
2179 const uint32_t *pixel = mScene.getPixelElectrons();
2180 rCount = pixel[Scene::R] * scale64x;
2181 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2182 gCount = pixel[Scene::Gr] * scale64x;
2183 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2184 bCount = pixel[Scene::B] * scale64x;
2185 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2186
2187 *pxY++ = (rgbToY[0] * rCount +
2188 rgbToY[1] * gCount +
2189 rgbToY[2] * bCount) / scaleOutSq;
2190 if (outY % 2 == 0 && outX % 2 == 0) {
2191 *pxVU++ = (rgbToCr[0] * rCount +
2192 rgbToCr[1] * gCount +
2193 rgbToCr[2] * bCount +
2194 rgbToCr[3]) / scaleOutSq;
2195 *pxVU++ = (rgbToCb[0] * rCount +
2196 rgbToCb[1] * gCount +
2197 rgbToCb[2] * bCount +
2198 rgbToCb[3]) / scaleOutSq;
2199 }
2200 for (unsigned int j = 1; j < inc; j++)
2201 mScene.getPixelElectrons();
2202 }
2203 }
2204#else
2205 uint8_t *src;
2206
2207 if (mKernelBuffer) {
2208 src = mKernelBuffer;
2209 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2210 uint32_t width = vinfo->preview.format.fmt.pix.width;
2211 uint32_t height = vinfo->preview.format.fmt.pix.height;
2212 if ((width == b.width) && (height == b.height)) {
2213 memcpy(b.img, src, b.width * b.height * 3/2);
2214 } else {
2215 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2216 }
2217 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2218 uint32_t width = vinfo->preview.format.fmt.pix.width;
2219 uint32_t height = vinfo->preview.format.fmt.pix.height;
2220
2221 if ((width == b.width) && (height == b.height)) {
2222 memcpy(b.img, src, b.width * b.height * 3/2);
2223 } else {
2224 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2225 }
2226 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2227 uint32_t width = vinfo->preview.format.fmt.pix.width;
2228 uint32_t height = vinfo->preview.format.fmt.pix.height;
2229
2230 if ((width == b.width) && (height == b.height)) {
2231 memcpy(b.img, src, b.width * b.height * 3/2);
2232 } else {
2233 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2234 }
2235 } else {
2236 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2237 }
2238 return ;
2239 }
2240 while(1){
2241 if (mFlushFlag) {
2242 break;
2243 }
2244
2245 if (mExitSensorThread) {
2246 break;
2247 }
2248
2249 src = (uint8_t *)get_frame(vinfo);
2250 if (NULL == src) {
2251 if (get_device_status(vinfo)) {
2252 break;
2253 }
2254 ALOGVV("get frame NULL, sleep 5ms");
2255 usleep(5000);
2256 mTimeOutCount++;
2257 if (mTimeOutCount > 300) {
2258 DBG_LOGA("force sensor reset.\n");
2259 force_reset_sensor();
2260 }
2261 continue;
2262 }
2263 mTimeOutCount = 0;
2264 if (mSensorType == SENSOR_USB) {
2265 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2266 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2267 DBG_LOGB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2268 putback_frame(vinfo);
2269 continue;
2270 }
2271 }
2272 }
2273 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2274 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2275 memcpy(b.img, src, vinfo->preview.buf.length);
2276 } else {
2277 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2278 }
2279 mKernelBuffer = b.img;
2280 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2281 uint32_t width = vinfo->preview.format.fmt.pix.width;
2282 uint32_t height = vinfo->preview.format.fmt.pix.height;
2283 memset(mTemp_buffer, 0 , width * height * 3/2);
2284 YUYVToNV21(src, mTemp_buffer, width, height);
2285 if ((width == b.width) && (height == b.height)) {
2286 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2287 mKernelBuffer = b.img;
2288 } else {
2289 if ((b.height % 2) != 0) {
2290 DBG_LOGB("%d , b.height = %d", __LINE__, b.height);
2291 b.height = b.height - 1;
2292 }
2293 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2294 mKernelBuffer = mTemp_buffer;
2295 }
2296 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2297 uint32_t width = vinfo->preview.format.fmt.pix.width;
2298 uint32_t height = vinfo->preview.format.fmt.pix.height;
2299 memset(mTemp_buffer, 0 , width * height * 3/2);
2300#if ANDROID_PLATFORM_SDK_VERSION > 23
2301 uint8_t *vBuffer = new uint8_t[width * height / 4];
2302 if (vBuffer == NULL)
2303 ALOGE("alloc temperary v buffer failed\n");
2304 uint8_t *uBuffer = new uint8_t[width * height / 4];
2305 if (uBuffer == NULL)
2306 ALOGE("alloc temperary u buffer failed\n");
2307
2308 if (ConvertToI420(src, vinfo->preview.buf.bytesused, mTemp_buffer, width, uBuffer, (width + 1) / 2,
2309 vBuffer, (width + 1) / 2, 0, 0, width, height,
2310 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2311 DBG_LOGA("Decode MJPEG frame failed\n");
2312 putback_frame(vinfo);
2313 ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__);
2314 continue;
2315 }
2316 uint8_t *pUVBuffer = mTemp_buffer + width * height;
2317 for (int i = 0; i < width * height / 4; i++) {
2318 *pUVBuffer++ = *(vBuffer + i);
2319 *pUVBuffer++ = *(uBuffer + i);
2320 }
2321 delete vBuffer;
2322 delete uBuffer;
2323#else
2324 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, mTemp_buffer,
2325 width, mTemp_buffer + width * height, (width + 1) / 2, width,
2326 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2327 putback_frame(vinfo);
2328 ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__);
2329 continue;
2330 }
2331#endif
2332 if ((width == b.width) && (height == b.height)) {
2333 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2334 mKernelBuffer = b.img;
2335 } else {
2336 if ((b.height % 2) != 0) {
2337 DBG_LOGB("%d, b.height = %d", __LINE__, b.height);
2338 b.height = b.height - 1;
2339 }
2340 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2341 mKernelBuffer = mTemp_buffer;
2342 }
2343 }
2344 mSensorWorkFlag = true;
2345 break;
2346 }
2347#endif
2348
2349 ALOGVV("NV21 sensor image captured");
2350}
2351
2352void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2353#if 0
2354 float totalGain = gain/100.0 * kBaseGainFactor;
2355 // Using fixed-point math with 6 bits of fractional precision.
2356 // In fixed-point math, calculate total scaling from electrons to 8bpp
2357 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2358 // In fixed-point math, saturation point of sensor after gain
2359 const int saturationPoint = 64 * 255;
2360 // Fixed-point coefficients for RGB-YUV transform
2361 // Based on JFIF RGB->YUV transform.
2362 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2363 const int rgbToY[] = {19, 37, 7};
2364 const int rgbToCb[] = {-10,-21, 32, 524288};
2365 const int rgbToCr[] = {32,-26, -5, 524288};
2366 // Scale back to 8bpp non-fixed-point
2367 const int scaleOut = 64;
2368 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2369
2370 uint32_t inc = kResolution[0] / stride;
2371 uint32_t outH = kResolution[1] / inc;
2372 for (unsigned int y = 0, outY = 0;
2373 y < kResolution[1]; y+=inc, outY++) {
2374 uint8_t *pxY = img + outY * stride;
2375 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2376 mScene.setReadoutPixel(0,y);
2377 for (unsigned int outX = 0; outX < stride; outX++) {
2378 int32_t rCount, gCount, bCount;
2379 // TODO: Perfect demosaicing is a cheat
2380 const uint32_t *pixel = mScene.getPixelElectrons();
2381 rCount = pixel[Scene::R] * scale64x;
2382 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2383 gCount = pixel[Scene::Gr] * scale64x;
2384 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2385 bCount = pixel[Scene::B] * scale64x;
2386 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2387
2388 *pxY++ = (rgbToY[0] * rCount +
2389 rgbToY[1] * gCount +
2390 rgbToY[2] * bCount) / scaleOutSq;
2391 if (outY % 2 == 0 && outX % 2 == 0) {
2392 *pxVU++ = (rgbToCr[0] * rCount +
2393 rgbToCr[1] * gCount +
2394 rgbToCr[2] * bCount +
2395 rgbToCr[3]) / scaleOutSq;
2396 *pxVU++ = (rgbToCb[0] * rCount +
2397 rgbToCb[1] * gCount +
2398 rgbToCb[2] * bCount +
2399 rgbToCb[3]) / scaleOutSq;
2400 }
2401 for (unsigned int j = 1; j < inc; j++)
2402 mScene.getPixelElectrons();
2403 }
2404 }
2405#else
2406 uint8_t *src;
2407 if (mKernelBuffer) {
2408 src = mKernelBuffer;
2409 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2410 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2411 ALOGI("Sclale YV12 frame down \n");
2412
2413 int width = vinfo->preview.format.fmt.pix.width;
2414 int height = vinfo->preview.format.fmt.pix.height;
2415 int ret = libyuv::I420Scale(src, width,
2416 src + width * height, width / 2,
2417 src + width * height + width * height / 4, width / 2,
2418 width, height,
2419 b.img, b.width,
2420 b.img + b.width * b.height, b.width / 2,
2421 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2422 b.width, b.height,
2423 libyuv::kFilterNone);
2424 if (ret < 0)
2425 ALOGE("Sclale YV12 frame down failed!\n");
2426 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2427 int width = vinfo->preview.format.fmt.pix.width;
2428 int height = vinfo->preview.format.fmt.pix.height;
2429 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2430
2431 if ( tmp_buffer == NULL) {
2432 ALOGE("new buffer failed!\n");
2433 return;
2434 }
2435
2436 YUYVToYV12(src, tmp_buffer, width, height);
2437
2438 int ret = libyuv::I420Scale(tmp_buffer, width,
2439 tmp_buffer + width * height, width / 2,
2440 tmp_buffer + width * height + width * height / 4, width / 2,
2441 width, height,
2442 b.img, b.width,
2443 b.img + b.width * b.height, b.width / 2,
2444 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2445 b.width, b.height,
2446 libyuv::kFilterNone);
2447 if (ret < 0)
2448 ALOGE("Sclale YV12 frame down failed!\n");
2449 delete [] tmp_buffer;
2450 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2451 int width = vinfo->preview.format.fmt.pix.width;
2452 int height = vinfo->preview.format.fmt.pix.height;
2453 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2454
2455 if ( tmp_buffer == NULL) {
2456 ALOGE("new buffer failed!\n");
2457 return;
2458 }
2459
2460 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2461 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2462 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2463 DBG_LOGA("Decode MJPEG frame failed\n");
2464 }
2465
2466 int ret = libyuv::I420Scale(tmp_buffer, width,
2467 tmp_buffer + width * height, width / 2,
2468 tmp_buffer + width * height + width * height / 4, width / 2,
2469 width, height,
2470 b.img, b.width,
2471 b.img + b.width * b.height, b.width / 2,
2472 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2473 b.width, b.height,
2474 libyuv::kFilterNone);
2475 if (ret < 0)
2476 ALOGE("Sclale YV12 frame down failed!\n");
2477
2478 delete [] tmp_buffer;
2479 } else {
2480 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2481 }
2482 return ;
2483 }
2484 while(1){
2485 if (mFlushFlag) {
2486 break;
2487 }
2488 if (mExitSensorThread) {
2489 break;
2490 }
2491 src = (uint8_t *)get_frame(vinfo);
2492
2493 if (NULL == src) {
2494 if (get_device_status(vinfo)) {
2495 break;
2496 }
2497 ALOGVV("get frame NULL, sleep 5ms");
2498 usleep(5000);
2499 mTimeOutCount++;
2500 if (mTimeOutCount > 300) {
2501 force_reset_sensor();
2502 }
2503 continue;
2504 }
2505 mTimeOutCount = 0;
2506 if (mSensorType == SENSOR_USB) {
2507 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2508 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2509 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2510 putback_frame(vinfo);
2511 continue;
2512 }
2513 }
2514 }
2515 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2516 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2517 memcpy(b.img, src, vinfo->preview.buf.length);
2518 } else {
2519 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2520 }
2521 mKernelBuffer = b.img;
2522 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2523 int width = vinfo->preview.format.fmt.pix.width;
2524 int height = vinfo->preview.format.fmt.pix.height;
2525 YUYVToYV12(src, b.img, width, height);
2526 mKernelBuffer = b.img;
2527 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2528 int width = vinfo->preview.format.fmt.pix.width;
2529 int height = vinfo->preview.format.fmt.pix.height;
2530 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2531 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2532 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2533 putback_frame(vinfo);
2534 DBG_LOGA("Decode MJPEG frame failed\n");
2535 continue;
2536 }
2537 mKernelBuffer = b.img;
2538 } else {
2539 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2540 }
2541 mSensorWorkFlag = true;
2542 break;
2543 }
2544#endif
2545 //mKernelBuffer = src;
2546 ALOGVV("YV12 sensor image captured");
2547}
2548
2549void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2550#if 0
2551 float totalGain = gain/100.0 * kBaseGainFactor;
2552 // Using fixed-point math with 6 bits of fractional precision.
2553 // In fixed-point math, calculate total scaling from electrons to 8bpp
2554 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2555 // In fixed-point math, saturation point of sensor after gain
2556 const int saturationPoint = 64 * 255;
2557 // Fixed-point coefficients for RGB-YUV transform
2558 // Based on JFIF RGB->YUV transform.
2559 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2560 const int rgbToY[] = {19, 37, 7};
2561 const int rgbToCb[] = {-10,-21, 32, 524288};
2562 const int rgbToCr[] = {32,-26, -5, 524288};
2563 // Scale back to 8bpp non-fixed-point
2564 const int scaleOut = 64;
2565 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2566
2567 uint32_t inc = kResolution[0] / stride;
2568 uint32_t outH = kResolution[1] / inc;
2569 for (unsigned int y = 0, outY = 0;
2570 y < kResolution[1]; y+=inc, outY++) {
2571 uint8_t *pxY = img + outY * stride;
2572 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2573 mScene.setReadoutPixel(0,y);
2574 for (unsigned int outX = 0; outX < stride; outX++) {
2575 int32_t rCount, gCount, bCount;
2576 // TODO: Perfect demosaicing is a cheat
2577 const uint32_t *pixel = mScene.getPixelElectrons();
2578 rCount = pixel[Scene::R] * scale64x;
2579 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2580 gCount = pixel[Scene::Gr] * scale64x;
2581 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2582 bCount = pixel[Scene::B] * scale64x;
2583 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2584
2585 *pxY++ = (rgbToY[0] * rCount +
2586 rgbToY[1] * gCount +
2587 rgbToY[2] * bCount) / scaleOutSq;
2588 if (outY % 2 == 0 && outX % 2 == 0) {
2589 *pxVU++ = (rgbToCr[0] * rCount +
2590 rgbToCr[1] * gCount +
2591 rgbToCr[2] * bCount +
2592 rgbToCr[3]) / scaleOutSq;
2593 *pxVU++ = (rgbToCb[0] * rCount +
2594 rgbToCb[1] * gCount +
2595 rgbToCb[2] * bCount +
2596 rgbToCb[3]) / scaleOutSq;
2597 }
2598 for (unsigned int j = 1; j < inc; j++)
2599 mScene.getPixelElectrons();
2600 }
2601 }
2602#else
2603 uint8_t *src;
2604 if (mKernelBuffer) {
2605 src = mKernelBuffer;
2606 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2607 //TODO YUYV scale
2608 //memcpy(img, src, vinfo->preview.buf.length);
2609
2610 } else
2611 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2612
2613 return ;
2614 }
2615
2616 while(1) {
2617 if (mFlushFlag) {
2618 break;
2619 }
2620 if (mExitSensorThread) {
2621 break;
2622 }
2623 src = (uint8_t *)get_frame(vinfo);
2624 if (NULL == src) {
2625 if (get_device_status(vinfo)) {
2626 break;
2627 }
2628 ALOGVV("get frame NULL, sleep 5ms");
2629 usleep(5000);
2630 mTimeOutCount++;
2631 if (mTimeOutCount > 300) {
2632 force_reset_sensor();
2633 }
2634 continue;
2635 }
2636 mTimeOutCount = 0;
2637 if (mSensorType == SENSOR_USB) {
2638 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2639 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2640 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2641 putback_frame(vinfo);
2642 continue;
2643 }
2644 }
2645 }
2646 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2647 memcpy(img, src, vinfo->preview.buf.length);
2648 mKernelBuffer = src;
2649 } else {
2650 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2651 }
2652 mSensorWorkFlag = true;
2653 break;
2654 }
2655#endif
2656 //mKernelBuffer = src;
2657 ALOGVV("YUYV sensor image captured");
2658}
2659
2660void Sensor::dump(int fd) {
2661 String8 result;
2662 result = String8::format("%s, sensor preview information: \n", __FILE__);
2663 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2664 result.appendFormat("camera preview width: %d , height =%d\n",
2665 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2666
2667 result.appendFormat("camera preview format: %.4s\n\n",
2668 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2669
2670 write(fd, result.string(), result.size());
2671}
2672
2673} // namespace android
2674
2675