summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: 73c3c9aff08ce8868866bd9743b902be11fe8a6c
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
45
46namespace android {
47
48const unsigned int Sensor::kResolution[2] = {1600, 1200};
49
50const nsecs_t Sensor::kExposureTimeRange[2] =
51 {1000L, 30000000000L} ; // 1 us - 30 sec
52const nsecs_t Sensor::kFrameDurationRange[2] =
53 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
54
55const nsecs_t Sensor::kMinVerticalBlank = 10000L;
56
57const uint8_t Sensor::kColorFilterArrangement =
58 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
59
60// Output image data characteristics
61const uint32_t Sensor::kMaxRawValue = 4000;
62const uint32_t Sensor::kBlackLevel = 1000;
63
64// Sensor sensitivity
65const float Sensor::kSaturationVoltage = 0.520f;
66const uint32_t Sensor::kSaturationElectrons = 2000;
67const float Sensor::kVoltsPerLuxSecond = 0.100f;
68
69const float Sensor::kElectronsPerLuxSecond =
70 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
71 * Sensor::kVoltsPerLuxSecond;
72
73const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
74 Sensor::kSaturationElectrons;
75
76const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
77const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
78const float Sensor::kReadNoiseVarBeforeGain =
79 Sensor::kReadNoiseStddevBeforeGain *
80 Sensor::kReadNoiseStddevBeforeGain;
81const float Sensor::kReadNoiseVarAfterGain =
82 Sensor::kReadNoiseStddevAfterGain *
83 Sensor::kReadNoiseStddevAfterGain;
84
85// While each row has to read out, reset, and then expose, the (reset +
86// expose) sequence can be overlapped by other row readouts, so the final
87// minimum frame duration is purely a function of row readout time, at least
88// if there's a reasonable number of rows.
89const nsecs_t Sensor::kRowReadoutTime =
90 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
91
92const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
93const uint32_t Sensor::kDefaultSensitivity = 100;
94
95const usb_frmsize_discrete_t kUsbAvailablePictureSize[] = {
96 {4128, 3096},
97 {3264, 2448},
98 {2592, 1944},
99 {2592, 1936},
100 {2560, 1920},
101 {2688, 1520},
102 {2048, 1536},
103 {1600, 1200},
104 {1920, 1088},
105 {1920, 1080},
106 {1440, 1080},
107 {1280, 960},
108 {1280, 720},
109 {1024, 768},
110 {960, 720},
111 {720, 480},
112 {640, 480},
113 {352, 288},
114 {320, 240},
115};
116
117/** A few utility functions for math, normal distributions */
118
119// Take advantage of IEEE floating-point format to calculate an approximate
120// square root. Accurate to within +-3.6%
121float sqrtf_approx(float r) {
122 // Modifier is based on IEEE floating-point representation; the
123 // manipulations boil down to finding approximate log2, dividing by two, and
124 // then inverting the log2. A bias is added to make the relative error
125 // symmetric about the real answer.
126 const int32_t modifier = 0x1FBB4000;
127
128 int32_t r_i = *(int32_t*)(&r);
129 r_i = (r_i >> 1) + modifier;
130
131 return *(float*)(&r_i);
132}
133
134void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
135{
136 int stride = (width + 31) & ( ~31);
137 int w, h;
138 for (h=0; h<height; h++)
139 {
140 memcpy( dst, src, width*3);
141 dst += width*3;
142 src += stride*3;
143 }
144}
145
146static int ALIGN(int x, int y) {
147 // y must be a power of 2.
148 return (x + y - 1) & ~(y - 1);
149}
150
151bool IsUsbAvailablePictureSize(const usb_frmsize_discrete_t AvailablePictureSize[], uint32_t width, uint32_t height)
152{
153 int i;
154 bool ret = false;
155 int count = sizeof(kUsbAvailablePictureSize)/sizeof(kUsbAvailablePictureSize[0]);
156 for (i = 0; i < count; i++) {
157 if ((width == AvailablePictureSize[i].width) && (height == AvailablePictureSize[i].height)) {
158 ret = true;
159 } else {
160 continue;
161 }
162 }
163 return ret;
164}
165
166void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
167{
168 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
169 (mmInt32)vinfo->preview.format.fmt.pix.height,
170 (mmInt32)vinfo->preview.format.fmt.pix.width,
171 IC_FORMAT_YCbCr420_lp,
172 (mmByte *) src,
173 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
174 0};
175
176 structConvImage output = {(mmInt32)width,
177 (mmInt32)height,
178 (mmInt32)width,
179 IC_FORMAT_YCbCr420_lp,
180 (mmByte *) img,
181 (mmByte *) img + width * height,
182 0};
183
184 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
185 ALOGE("Sclale NV21 frame down failed!\n");
186}
187
188Sensor::Sensor():
189 Thread(false),
190 mGotVSync(false),
191 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
192 mFrameDuration(kFrameDurationRange[0]),
193 mGainFactor(kDefaultSensitivity),
194 mNextBuffers(NULL),
195 mFrameNumber(0),
196 mCapturedBuffers(NULL),
197 mListener(NULL),
198 mTemp_buffer(NULL),
199 mExitSensorThread(false),
200 mIoctlSupport(0),
201 msupportrotate(0),
202 mTimeOutCount(0),
203 mWait(false),
204 mPre_width(0),
205 mPre_height(0),
206 mFlushFlag(false),
207 mSensorWorkFlag(false),
208 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
209{
210
211}
212
213Sensor::~Sensor() {
214 //shutDown();
215}
216
217status_t Sensor::startUp(int idx) {
218 ALOGV("%s: E", __FUNCTION__);
219 DBG_LOGA("ddd");
220
221 int res;
222 mCapturedBuffers = NULL;
223 res = run("EmulatedFakeCamera3::Sensor",
224 ANDROID_PRIORITY_URGENT_DISPLAY);
225
226 if (res != OK) {
227 ALOGE("Unable to start up sensor capture thread: %d", res);
228 }
229
230 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
231 vinfo->idx = idx;
232
233 res = camera_open(vinfo);
234 if (res < 0) {
235 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
236 }
237
238 mSensorType = SENSOR_MMAP;
239 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
240 mSensorType = SENSOR_USB;
241 }
242
243 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
244 mSensorType = SENSOR_SHARE_FD;
245 }
246
247 if (strstr((const char *)vinfo->cap.card, "front"))
248 mSensorFace = SENSOR_FACE_FRONT;
249 else if (strstr((const char *)vinfo->cap.card, "back"))
250 mSensorFace = SENSOR_FACE_BACK;
251 else
252 mSensorFace = SENSOR_FACE_NONE;
253
254 return res;
255}
256
257sensor_type_e Sensor::getSensorType(void)
258{
259 return mSensorType;
260}
261status_t Sensor::IoctlStateProbe(void) {
262 struct v4l2_queryctrl qc;
263 int ret = 0;
264 mIoctlSupport = 0;
265 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
266 qc.id = V4L2_ROTATE_ID;
267 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
268 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
269 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
270 }else{
271 mIoctlSupport |= IOCTL_MASK_ROTATE;
272 }
273
274 if(mIoctlSupport & IOCTL_MASK_ROTATE){
275 msupportrotate = true;
276 DBG_LOGA("camera support capture rotate");
277 }
278 return mIoctlSupport;
279}
280
281uint32_t Sensor::getStreamUsage(int stream_type)
282{
283 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
284
285 switch (stream_type) {
286 case CAMERA3_STREAM_OUTPUT:
287 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
288 break;
289 case CAMERA3_STREAM_INPUT:
290 usage = GRALLOC_USAGE_HW_CAMERA_READ;
291 break;
292 case CAMERA3_STREAM_BIDIRECTIONAL:
293 usage = GRALLOC_USAGE_HW_CAMERA_READ |
294 GRALLOC_USAGE_HW_CAMERA_WRITE;
295 break;
296 }
297 if ((mSensorType == SENSOR_MMAP)
298 || (mSensorType == SENSOR_USB)) {
299 usage = (GRALLOC_USAGE_HW_TEXTURE
300 | GRALLOC_USAGE_HW_RENDER
301 | GRALLOC_USAGE_SW_READ_MASK
302 | GRALLOC_USAGE_SW_WRITE_MASK
303 );
304 }
305
306 return usage;
307}
308
309status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
310{
311 int res;
312
313 mFramecount = 0;
314 mCurFps = 0;
315 gettimeofday(&mTimeStart, NULL);
316
317 if (isjpeg) {
318 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
319 vinfo->picture.format.fmt.pix.width = width;
320 vinfo->picture.format.fmt.pix.height = height;
321 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
322 } else {
323 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
324 vinfo->preview.format.fmt.pix.width = width;
325 vinfo->preview.format.fmt.pix.height = height;
326 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
327
328 res = setBuffersFormat(vinfo);
329 if (res < 0) {
330 ALOGE("set buffer failed\n");
331 return res;
332 }
333 }
334
335 if (NULL == mTemp_buffer) {
336 mPre_width = vinfo->preview.format.fmt.pix.width;
337 mPre_height = vinfo->preview.format.fmt.pix.height;
338 DBG_LOGB("setOutputFormat :: pre_width = %d, pre_height = %d \n" , mPre_width , mPre_height);
339 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
340 if (mTemp_buffer == NULL) {
341 ALOGE("first time allocate mTemp_buffer failed !");
342 return -1;
343 }
344 }
345
346 if ((mPre_width != vinfo->preview.format.fmt.pix.width) && (mPre_height != vinfo->preview.format.fmt.pix.height)) {
347 if (mTemp_buffer) {
348 delete [] mTemp_buffer;
349 mTemp_buffer = NULL;
350 }
351 mPre_width = vinfo->preview.format.fmt.pix.width;
352 mPre_height = vinfo->preview.format.fmt.pix.height;
353 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
354 if (mTemp_buffer == NULL) {
355 ALOGE("allocate mTemp_buffer failed !");
356 return -1;
357 }
358 }
359
360 return OK;
361
362}
363
364status_t Sensor::streamOn() {
365
366 return start_capturing(vinfo);
367}
368
369bool Sensor::isStreaming() {
370
371 return vinfo->isStreaming;
372}
373
374bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
375{
376 if ((vinfo->preview.format.fmt.pix.width != width)
377 ||(vinfo->preview.format.fmt.pix.height != height)
378 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
379 ) {
380
381 return true;
382
383 }
384
385 return false;
386}
387status_t Sensor::streamOff() {
388 if (mSensorType == SENSOR_USB) {
389 return releasebuf_and_stop_capturing(vinfo);
390 } else {
391 return stop_capturing(vinfo);
392 }
393}
394
395int Sensor::getOutputFormat()
396{
397 struct v4l2_fmtdesc fmt;
398 int ret;
399 memset(&fmt,0,sizeof(fmt));
400 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
401
402 fmt.index = 0;
403 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
404 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
405 return V4L2_PIX_FMT_MJPEG;
406 fmt.index++;
407 }
408
409 fmt.index = 0;
410 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
411 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
412 return V4L2_PIX_FMT_NV21;
413 fmt.index++;
414 }
415
416 fmt.index = 0;
417 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
418 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
419 return V4L2_PIX_FMT_YUYV;
420 fmt.index++;
421 }
422
423 ALOGE("Unable to find a supported sensor format!");
424 return BAD_VALUE;
425}
426
427/* if sensor supports MJPEG, return it first, otherwise
428 * trasform HAL format to v4l2 format then check whether
429 * it is supported.
430 */
431int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
432{
433 struct v4l2_fmtdesc fmt;
434 int ret;
435 memset(&fmt,0,sizeof(fmt));
436 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
437
438 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
439 pixelfmt = V4L2_PIX_FMT_YVU420;
440 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
441 pixelfmt = V4L2_PIX_FMT_NV21;
442 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
443 pixelfmt = V4L2_PIX_FMT_YUYV;
444 } else {
445 pixelfmt = V4L2_PIX_FMT_NV21;
446 }
447
448 fmt.index = 0;
449 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
450 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
451 return V4L2_PIX_FMT_MJPEG;
452 fmt.index++;
453 }
454
455 fmt.index = 0;
456 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
457 if (fmt.pixelformat == pixelfmt)
458 return pixelfmt;
459 fmt.index++;
460 }
461
462 fmt.index = 0;
463 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0) {
464 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
465 return V4L2_PIX_FMT_YUYV;
466 fmt.index++;
467 }
468 ALOGE("%s, Unable to find a supported sensor format!", __FUNCTION__);
469 return BAD_VALUE;
470}
471
472void Sensor::setPictureRotate(int rotate)
473{
474 mRotateValue = rotate;
475}
476int Sensor::getPictureRotate()
477{
478 return mRotateValue;
479}
480status_t Sensor::shutDown() {
481 ALOGV("%s: E", __FUNCTION__);
482
483 int res;
484
485 mTimeOutCount = 0;
486
487 res = requestExitAndWait();
488 if (res != OK) {
489 ALOGE("Unable to shut down sensor capture thread: %d", res);
490 }
491
492 if (vinfo != NULL) {
493 if (mSensorType == SENSOR_USB) {
494 releasebuf_and_stop_capturing(vinfo);
495 } else {
496 stop_capturing(vinfo);
497 }
498 }
499
500 camera_close(vinfo);
501
502 if (vinfo){
503 free(vinfo);
504 vinfo = NULL;
505 }
506
507 if (mTemp_buffer) {
508 delete [] mTemp_buffer;
509 mTemp_buffer = NULL;
510 }
511
512 mSensorWorkFlag = false;
513
514 ALOGD("%s: Exit", __FUNCTION__);
515 return res;
516}
517
518void Sensor::sendExitSingalToSensor() {
519 {
520 Mutex::Autolock lock(mReadoutMutex);
521 mExitSensorThread = true;
522 mReadoutComplete.signal();
523 }
524
525 {
526 Mutex::Autolock lock(mControlMutex);
527 mVSync.signal();
528 }
529
530 {
531 Mutex::Autolock lock(mReadoutMutex);
532 mReadoutAvailable.signal();
533 }
534}
535
536Scene &Sensor::getScene() {
537 return mScene;
538}
539
540int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
541{
542 int ret = 0;
543 struct v4l2_queryctrl qc;
544
545 memset(&qc, 0, sizeof(qc));
546 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
547 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
548
549 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
550 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
551 ret = -1;
552 *zoomMin = 0;
553 *zoomMax = 0;
554 *zoomStep = 1;
555 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
556 } else {
557 if ((qc.step != 0) && (qc.minimum != 0) &&
558 ((qc.minimum/qc.step) > (qc.maximum/qc.minimum))) {
559 DBG_LOGA("adjust zoom step. \n");
560 qc.step = (qc.minimum * qc.step);
561 }
562 *zoomMin = qc.minimum;
563 *zoomMax = qc.maximum;
564 *zoomStep = qc.step;
565 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
566 }
567
568 return ret ;
569}
570
571int Sensor::setZoom(int zoomValue)
572{
573 int ret = 0;
574 struct v4l2_control ctl;
575
576 memset( &ctl, 0, sizeof(ctl));
577 ctl.value = zoomValue;
578 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
579 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
580 if (ret < 0) {
581 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
582 }
583 return ret ;
584}
585
586status_t Sensor::setEffect(uint8_t effect)
587{
588 int ret = 0;
589 struct v4l2_control ctl;
590 ctl.id = V4L2_CID_COLORFX;
591
592 switch (effect) {
593 case ANDROID_CONTROL_EFFECT_MODE_OFF:
594 ctl.value= CAM_EFFECT_ENC_NORMAL;
595 break;
596 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
597 ctl.value= CAM_EFFECT_ENC_COLORINV;
598 break;
599 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
600 ctl.value= CAM_EFFECT_ENC_SEPIA;
601 break;
602 default:
603 ALOGE("%s: Doesn't support effect mode %d",
604 __FUNCTION__, effect);
605 return BAD_VALUE;
606 }
607
608 DBG_LOGB("set effect mode:%d", effect);
609 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
610 if (ret < 0) {
611 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
612 }
613 return ret ;
614}
615
616#define MAX_LEVEL_FOR_EXPOSURE 16
617#define MIN_LEVEL_FOR_EXPOSURE 3
618
619int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
620{
621 struct v4l2_queryctrl qc;
622 int ret=0;
623 int level = 0;
624 int middle = 0;
625
626 memset( &qc, 0, sizeof(qc));
627
628 DBG_LOGA("getExposure\n");
629 qc.id = V4L2_CID_EXPOSURE;
630 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
631 if(ret < 0) {
632 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
633 *minExp = -4;
634 *maxExp = 4;
635 *def = 0;
636 step->numerator = 1;
637 step->denominator = 1;
638 return ret;
639 }
640
641 if(0 < qc.step)
642 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
643
644 if((level > MAX_LEVEL_FOR_EXPOSURE)
645 || (level < MIN_LEVEL_FOR_EXPOSURE)){
646 *minExp = -4;
647 *maxExp = 4;
648 *def = 0;
649 step->numerator = 1;
650 step->denominator = 1;
651 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
652 *minExp, *maxExp, *def);
653 return true;
654 }
655
656 middle = (qc.minimum+qc.maximum)/2;
657 *minExp = qc.minimum - middle;
658 *maxExp = qc.maximum - middle;
659 *def = qc.default_value - middle;
660 step->numerator = 1;
661 step->denominator = 2;//qc.step;
662 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
663 return ret;
664}
665
666status_t Sensor::setExposure(int expCmp)
667{
668 int ret = 0;
669 struct v4l2_control ctl;
670 struct v4l2_queryctrl qc;
671
672 if(mEV == expCmp){
673 return 0;
674 }else{
675 mEV = expCmp;
676 }
677 memset(&ctl, 0, sizeof(ctl));
678 memset(&qc, 0, sizeof(qc));
679
680 qc.id = V4L2_CID_EXPOSURE;
681
682 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
683 if (ret < 0) {
684 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
685 }
686
687 ctl.id = V4L2_CID_EXPOSURE;
688 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
689
690 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
691 if (ret < 0) {
692 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
693 }
694 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
695 return ret ;
696}
697
698int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
699{
700 struct v4l2_queryctrl qc;
701 struct v4l2_querymenu qm;
702 int ret;
703 int mode_count = -1;
704
705 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
706 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
707 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
708 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
709 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
710 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
711 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
712 } else {
713 memset(&qm, 0, sizeof(qm));
714
715 int index = 0;
716 mode_count = 1;
717 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
718
719 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
720 if (mode_count >= maxCont)
721 break;
722
723 memset(&qm, 0, sizeof(struct v4l2_querymenu));
724 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
725 qm.index = index;
726 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
727 continue;
728 } else {
729 if (strcmp((char*)qm.name,"50hz") == 0) {
730 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
731 mode_count++;
732 } else if (strcmp((char*)qm.name,"60hz") == 0) {
733 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
734 mode_count++;
735 } else if (strcmp((char*)qm.name,"auto") == 0) {
736 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
737 mode_count++;
738 }
739
740 }
741 }
742 }
743
744 return mode_count;
745}
746
747status_t Sensor::setAntiBanding(uint8_t antiBanding)
748{
749 int ret = 0;
750 struct v4l2_control ctl;
751 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
752
753 switch (antiBanding) {
754 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
755 ctl.value= CAM_ANTIBANDING_OFF;
756 break;
757 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
758 ctl.value= CAM_ANTIBANDING_50HZ;
759 break;
760 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
761 ctl.value= CAM_ANTIBANDING_60HZ;
762 break;
763 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
764 ctl.value= CAM_ANTIBANDING_AUTO;
765 break;
766 default:
767 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
768 __FUNCTION__, antiBanding);
769 return BAD_VALUE;
770 }
771
772 DBG_LOGB("anti banding mode:%d", antiBanding);
773 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
774 if ( ret < 0) {
775 CAMHAL_LOGDA("failed to set anti banding mode!\n");
776 return BAD_VALUE;
777 }
778 return ret;
779}
780
781status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
782{
783 int ret = 0;
784 struct v4l2_control ctl;
785 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
786 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
787 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
788
789 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
790 return ret;
791}
792
793
794int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
795{
796 struct v4l2_queryctrl qc;
797 struct v4l2_querymenu qm;
798 int ret;
799 int mode_count = -1;
800
801 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
802 qc.id = V4L2_CID_FOCUS_AUTO;
803 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
804 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
805 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
806 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
807 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
808 }else{
809 memset(&qm, 0, sizeof(qm));
810
811 int index = 0;
812 mode_count = 1;
813 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
814
815 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
816 if (mode_count >= maxCount)
817 break;
818
819 memset(&qm, 0, sizeof(struct v4l2_querymenu));
820 qm.id = V4L2_CID_FOCUS_AUTO;
821 qm.index = index;
822 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
823 continue;
824 } else {
825 if (strcmp((char*)qm.name,"auto") == 0) {
826 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
827 mode_count++;
828 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
829 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
830 mode_count++;
831 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
832 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
833 mode_count++;
834 }
835
836 }
837 }
838 }
839
840 return mode_count;
841}
842
843status_t Sensor::setAutoFocuas(uint8_t afMode)
844{
845 struct v4l2_control ctl;
846 ctl.id = V4L2_CID_FOCUS_AUTO;
847
848 switch (afMode) {
849 case ANDROID_CONTROL_AF_MODE_AUTO:
850 ctl.value = CAM_FOCUS_MODE_AUTO;
851 break;
852 case ANDROID_CONTROL_AF_MODE_MACRO:
853 ctl.value = CAM_FOCUS_MODE_MACRO;
854 break;
855 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
856 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
857 break;
858 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
859 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
860 break;
861 default:
862 ALOGE("%s: Emulator doesn't support AF mode %d",
863 __FUNCTION__, afMode);
864 return BAD_VALUE;
865 }
866
867 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
868 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
869 return BAD_VALUE;
870 }
871
872 return OK;
873}
874
875int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
876{
877 struct v4l2_queryctrl qc;
878 struct v4l2_querymenu qm;
879 int ret;
880 int mode_count = -1;
881
882 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
883 qc.id = V4L2_CID_DO_WHITE_BALANCE;
884 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
885 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
886 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
887 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
888 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
889 }else{
890 memset(&qm, 0, sizeof(qm));
891
892 int index = 0;
893 mode_count = 1;
894 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
895
896 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
897 if (mode_count >= maxCount)
898 break;
899
900 memset(&qm, 0, sizeof(struct v4l2_querymenu));
901 qm.id = V4L2_CID_DO_WHITE_BALANCE;
902 qm.index = index;
903 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
904 continue;
905 } else {
906 if (strcmp((char*)qm.name,"auto") == 0) {
907 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
908 mode_count++;
909 } else if (strcmp((char*)qm.name,"daylight") == 0) {
910 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
911 mode_count++;
912 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
913 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
914 mode_count++;
915 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
916 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
917 mode_count++;
918 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
919 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
920 mode_count++;
921 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
922 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
923 mode_count++;
924 } else if (strcmp((char*)qm.name,"twilight") == 0) {
925 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
926 mode_count++;
927 } else if (strcmp((char*)qm.name,"shade") == 0) {
928 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
929 mode_count++;
930 }
931
932 }
933 }
934 }
935
936 return mode_count;
937}
938
939status_t Sensor::setAWB(uint8_t awbMode)
940{
941 int ret = 0;
942 struct v4l2_control ctl;
943 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
944
945 switch (awbMode) {
946 case ANDROID_CONTROL_AWB_MODE_AUTO:
947 ctl.value = CAM_WB_AUTO;
948 break;
949 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
950 ctl.value = CAM_WB_INCANDESCENCE;
951 break;
952 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
953 ctl.value = CAM_WB_FLUORESCENT;
954 break;
955 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
956 ctl.value = CAM_WB_DAYLIGHT;
957 break;
958 case ANDROID_CONTROL_AWB_MODE_SHADE:
959 ctl.value = CAM_WB_SHADE;
960 break;
961 default:
962 ALOGE("%s: Emulator doesn't support AWB mode %d",
963 __FUNCTION__, awbMode);
964 return BAD_VALUE;
965 }
966 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
967 return ret;
968}
969
970void Sensor::setExposureTime(uint64_t ns) {
971 Mutex::Autolock lock(mControlMutex);
972 ALOGVV("Exposure set to %f", ns/1000000.f);
973 mExposureTime = ns;
974}
975
976void Sensor::setFrameDuration(uint64_t ns) {
977 Mutex::Autolock lock(mControlMutex);
978 ALOGVV("Frame duration set to %f", ns/1000000.f);
979 mFrameDuration = ns;
980}
981
982void Sensor::setSensitivity(uint32_t gain) {
983 Mutex::Autolock lock(mControlMutex);
984 ALOGVV("Gain set to %d", gain);
985 mGainFactor = gain;
986}
987
988void Sensor::setDestinationBuffers(Buffers *buffers) {
989 Mutex::Autolock lock(mControlMutex);
990 mNextBuffers = buffers;
991}
992
993void Sensor::setFrameNumber(uint32_t frameNumber) {
994 Mutex::Autolock lock(mControlMutex);
995 mFrameNumber = frameNumber;
996}
997
998void Sensor::setFlushFlag(bool flushFlag) {
999 mFlushFlag = flushFlag;
1000}
1001
1002status_t Sensor::waitForVSync(nsecs_t reltime) {
1003 int res;
1004 Mutex::Autolock lock(mControlMutex);
1005 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
1006 if (mExitSensorThread) {
1007 return -1;
1008 }
1009
1010 mGotVSync = false;
1011 res = mVSync.waitRelative(mControlMutex, reltime);
1012 if (res != OK && res != TIMED_OUT) {
1013 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
1014 return false;
1015 }
1016 CAMHAL_LOGVB("%s , X mControlMutex , mGotVSync = %d " , __FUNCTION__ , mGotVSync);
1017 return mGotVSync;
1018}
1019
1020status_t Sensor::waitForNewFrame(nsecs_t reltime,
1021 nsecs_t *captureTime) {
1022 Mutex::Autolock lock(mReadoutMutex);
1023 if (mExitSensorThread) {
1024 return -1;
1025 }
1026
1027 if (mCapturedBuffers == NULL) {
1028 int res;
1029 CAMHAL_LOGVB("%s , E mReadoutMutex , reltime = %d" , __FUNCTION__, reltime);
1030 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
1031 if (res == TIMED_OUT) {
1032 return false;
1033 } else if (res != OK || mCapturedBuffers == NULL) {
1034 if (mFlushFlag) {
1035 ALOGE("%s , return immediately , mWait = %d", __FUNCTION__, mWait);
1036 if (mWait) {
1037 mWait = false;
1038 *captureTime = mCaptureTime;
1039 mCapturedBuffers = NULL;
1040 mReadoutComplete.signal();
1041 } else {
1042 *captureTime = mCaptureTime;
1043 mCapturedBuffers = NULL;
1044 }
1045 return -2;
1046 } else {
1047 ALOGE("Error waiting for sensor readout signal: %d", res);
1048 return false;
1049 }
1050 }
1051 }
1052 if (mWait) {
1053 mWait = false;
1054 *captureTime = mCaptureTime;
1055 mCapturedBuffers = NULL;
1056 mReadoutComplete.signal();
1057 } else {
1058 *captureTime = mCaptureTime;
1059 mCapturedBuffers = NULL;
1060 }
1061 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1062 return true;
1063}
1064
1065Sensor::SensorListener::~SensorListener() {
1066}
1067
1068void Sensor::setSensorListener(SensorListener *listener) {
1069 Mutex::Autolock lock(mControlMutex);
1070 mListener = listener;
1071}
1072
1073status_t Sensor::readyToRun() {
1074 int res;
1075 ALOGV("Starting up sensor thread");
1076 mStartupTime = systemTime();
1077 mNextCaptureTime = 0;
1078 mNextCapturedBuffers = NULL;
1079
1080 DBG_LOGA("");
1081
1082 return OK;
1083}
1084
1085bool Sensor::threadLoop() {
1086 /**
1087 * Sensor capture operation main loop.
1088 *
1089 * Stages are out-of-order relative to a single frame's processing, but
1090 * in-order in time.
1091 */
1092
1093 if (mExitSensorThread) {
1094 return false;
1095 }
1096
1097 /**
1098 * Stage 1: Read in latest control parameters
1099 */
1100 uint64_t exposureDuration;
1101 uint64_t frameDuration;
1102 uint32_t gain;
1103 Buffers *nextBuffers;
1104 uint32_t frameNumber;
1105 SensorListener *listener = NULL;
1106 {
1107 Mutex::Autolock lock(mControlMutex);
1108 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
1109 exposureDuration = mExposureTime;
1110 frameDuration = mFrameDuration;
1111 gain = mGainFactor;
1112 nextBuffers = mNextBuffers;
1113 frameNumber = mFrameNumber;
1114 listener = mListener;
1115 // Don't reuse a buffer set
1116 mNextBuffers = NULL;
1117
1118 // Signal VSync for start of readout
1119 ALOGVV("Sensor VSync");
1120 mGotVSync = true;
1121 mVSync.signal();
1122 }
1123
1124 /**
1125 * Stage 3: Read out latest captured image
1126 */
1127
1128 Buffers *capturedBuffers = NULL;
1129 nsecs_t captureTime = 0;
1130
1131 nsecs_t startRealTime = systemTime();
1132 // Stagefright cares about system time for timestamps, so base simulated
1133 // time on that.
1134 nsecs_t simulatedTime = startRealTime;
1135 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1136 nsecs_t frameReadoutEndRealTime = startRealTime +
1137 kRowReadoutTime * kResolution[1];
1138
1139 if (mNextCapturedBuffers != NULL) {
1140 ALOGVV("Sensor starting readout");
1141 // Pretend we're doing readout now; will signal once enough time has elapsed
1142 capturedBuffers = mNextCapturedBuffers;
1143 captureTime = mNextCaptureTime;
1144 }
1145 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1146
1147 // TODO: Move this signal to another thread to simulate readout
1148 // time properly
1149 if (capturedBuffers != NULL) {
1150 ALOGVV("Sensor readout complete");
1151 Mutex::Autolock lock(mReadoutMutex);
1152 CAMHAL_LOGVB("%s , E mReadoutMutex" , __FUNCTION__);
1153 if (mCapturedBuffers != NULL) {
1154 ALOGE("Waiting for readout thread to catch up!");
1155 mWait = true;
1156 mReadoutComplete.wait(mReadoutMutex);
1157 }
1158
1159 mCapturedBuffers = capturedBuffers;
1160 mCaptureTime = captureTime;
1161 mReadoutAvailable.signal();
1162 capturedBuffers = NULL;
1163 }
1164 CAMHAL_LOGVB("%s , X mReadoutMutex" , __FUNCTION__);
1165
1166 if (mExitSensorThread) {
1167 return false;
1168 }
1169 /**
1170 * Stage 2: Capture new image
1171 */
1172 mNextCaptureTime = simulatedTime;
1173 mNextCapturedBuffers = nextBuffers;
1174
1175 if (mNextCapturedBuffers != NULL) {
1176 if (listener != NULL) {
1177#if 0
1178 if (get_device_status(vinfo)) {
1179 listener->onSensorEvent(frameNumber, SensorListener::ERROR_CAMERA_DEVICE, mNextCaptureTime);
1180 }
1181#endif
1182 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1183 mNextCaptureTime);
1184 }
1185
1186 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1187 (float)exposureDuration/1e6, gain);
1188 mScene.setExposureDuration((float)exposureDuration/1e9);
1189 mScene.calculateScene(mNextCaptureTime);
1190
1191 if ( mSensorType == SENSOR_SHARE_FD) {
1192 captureNewImageWithGe2d();
1193 } else {
1194 captureNewImage();
1195 }
1196 mFramecount ++;
1197 }
1198
1199 if (mExitSensorThread) {
1200 return false;
1201 }
1202
1203 if (mFramecount == 100) {
1204 gettimeofday(&mTimeEnd, NULL);
1205 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1206 mCurFps = mFramecount/(interval/1000000.0f);
1207 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1208 mFramecount = 0;
1209 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1210 }
1211 ALOGVV("Sensor vertical blanking interval");
1212 nsecs_t workDoneRealTime = systemTime();
1213 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1214 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1215 timespec t;
1216 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1217 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1218
1219 int ret;
1220 do {
1221 ret = nanosleep(&t, &t);
1222 } while (ret != 0);
1223 }
1224 nsecs_t endRealTime = systemTime();
1225 ALOGVV("Frame cycle took %d ms, target %d ms",
1226 (int)((endRealTime - startRealTime)/1000000),
1227 (int)(frameDuration / 1000000));
1228 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1229 return true;
1230};
1231
1232int Sensor::captureNewImageWithGe2d() {
1233
1234 uint32_t gain = mGainFactor;
1235 mKernelPhysAddr = 0;
1236
1237
1238 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1239 usleep(5000);
1240 }
1241
1242 // Might be adding more buffers, so size isn't constant
1243 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1244 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1245 fillStream(vinfo, mKernelPhysAddr, b);
1246 }
1247 putback_frame(vinfo);
1248 mKernelPhysAddr = 0;
1249
1250 return 0;
1251
1252}
1253
1254int Sensor::captureNewImage() {
1255 bool isjpeg = false;
1256 uint32_t gain = mGainFactor;
1257 mKernelBuffer = NULL;
1258
1259 // Might be adding more buffers, so size isn't constant
1260 ALOGVV("size=%d\n", mNextCapturedBuffers->size());
1261 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1262 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1263 ALOGVV("Sensor capturing buffer %d: stream %d,"
1264 " %d x %d, format %x, stride %d, buf %p, img %p",
1265 i, b.streamId, b.width, b.height, b.format, b.stride,
1266 b.buffer, b.img);
1267 switch (b.format) {
1268#if PLATFORM_SDK_VERSION <= 22
1269 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1270 captureRaw(b.img, gain, b.stride);
1271 break;
1272#endif
1273 case HAL_PIXEL_FORMAT_RGB_888:
1274 captureRGB(b.img, gain, b.stride);
1275 break;
1276 case HAL_PIXEL_FORMAT_RGBA_8888:
1277 captureRGBA(b.img, gain, b.stride);
1278 break;
1279 case HAL_PIXEL_FORMAT_BLOB:
1280 // Add auxillary buffer of the right size
1281 // Assumes only one BLOB (JPEG) buffer in
1282 // mNextCapturedBuffers
1283 StreamBuffer bAux;
1284 int orientation;
1285 orientation = getPictureRotate();
1286 ALOGD("bAux orientation=%d",orientation);
1287 uint32_t pixelfmt;
1288 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1289 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1290
1291 pixelfmt = getOutputFormat();
1292 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1293 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1294 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1295 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1296 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1297 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1298 } else {
1299 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1300 }
1301 } else {
1302 isjpeg = true;
1303 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1304 }
1305
1306 if (!msupportrotate) {
1307 bAux.streamId = 0;
1308 bAux.width = b.width;
1309 bAux.height = b.height;
1310 bAux.format = pixelfmt;
1311 bAux.stride = b.width;
1312 bAux.buffer = NULL;
1313 } else {
1314 if ((orientation == 90) || (orientation == 270)) {
1315 bAux.streamId = 0;
1316 bAux.width = b.height;
1317 bAux.height = b.width;
1318 bAux.format = pixelfmt;
1319 bAux.stride = b.height;
1320 bAux.buffer = NULL;
1321 } else {
1322 bAux.streamId = 0;
1323 bAux.width = b.width;
1324 bAux.height = b.height;
1325 bAux.format = pixelfmt;
1326 bAux.stride = b.width;
1327 bAux.buffer = NULL;
1328 }
1329 }
1330 // TODO: Reuse these
1331 bAux.img = new uint8_t[b.width * b.height * 3];
1332 mNextCapturedBuffers->push_back(bAux);
1333 break;
1334 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1335 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1336 captureNV21(b, gain);
1337 break;
1338 case HAL_PIXEL_FORMAT_YV12:
1339 captureYV12(b, gain);
1340 break;
1341 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1342 captureYUYV(b.img, gain, b.stride);
1343 break;
1344 default:
1345 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1346 b.format);
1347 break;
1348 }
1349 }
1350 if ((!isjpeg)&&(mKernelBuffer)) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1351 // whose buffer putback separately.
1352 putback_frame(vinfo);
1353 }
1354 mKernelBuffer = NULL;
1355
1356 return 0;
1357}
1358
1359int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1360 int res;
1361 int i, j, k, START;
1362 int count = 0;
1363 int pixelfmt;
1364 struct v4l2_frmsizeenum frmsize;
1365 char property[PROPERTY_VALUE_MAX];
1366 unsigned int support_w,support_h;
1367
1368 support_w = 10000;
1369 support_h = 10000;
1370 memset(property, 0, sizeof(property));
1371 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1372 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1373 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1374 support_w = 10000;
1375 support_h = 10000;
1376 }
1377 }
1378
1379 memset(&frmsize,0,sizeof(frmsize));
1380 frmsize.pixel_format = getOutputFormat();
1381
1382 START = 0;
1383 for (i = 0; ; i++) {
1384 frmsize.index = i;
1385 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1386 if (res < 0){
1387 DBG_LOGB("index=%d, break\n", i);
1388 break;
1389 }
1390
1391 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1392
1393 if (0 != (frmsize.discrete.width%16))
1394 continue;
1395
1396 if ((frmsize.discrete.width * frmsize.discrete.height) > (support_w * support_h))
1397 continue;
1398 if (count >= size)
1399 break;
1400
1401 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1402 picSizes[count+1] = frmsize.discrete.width;
1403 picSizes[count+2] = frmsize.discrete.height;
1404 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1405
1406 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1407 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1408 if (0 == i) {
1409 count += 4;
1410 continue;
1411 }
1412
1413 for (k = count; k > START; k -= 4) {
1414 if (frmsize.discrete.width * frmsize.discrete.height >
1415 picSizes[k - 3] * picSizes[k - 2]) {
1416 picSizes[k + 1] = picSizes[k - 3];
1417 picSizes[k + 2] = picSizes[k - 2];
1418
1419 } else {
1420 break;
1421 }
1422 }
1423 picSizes[k + 1] = frmsize.discrete.width;
1424 picSizes[k + 2] = frmsize.discrete.height;
1425
1426 count += 4;
1427 }
1428 }
1429
1430 START = count;
1431 for (i = 0; ; i++) {
1432 frmsize.index = i;
1433 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1434 if (res < 0){
1435 DBG_LOGB("index=%d, break\n", i);
1436 break;
1437 }
1438
1439 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1440
1441 if (0 != (frmsize.discrete.width%16))
1442 continue;
1443
1444 if ((frmsize.discrete.width * frmsize.discrete.height) > (support_w * support_h))
1445 continue;
1446 if (count >= size)
1447 break;
1448
1449 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1450 picSizes[count+1] = frmsize.discrete.width;
1451 picSizes[count+2] = frmsize.discrete.height;
1452 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1453
1454 DBG_LOGB("get output width=%d, height=%d, format =\
1455 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1456 frmsize.discrete.height);
1457 if (0 == i) {
1458 count += 4;
1459 continue;
1460 }
1461
1462 for (k = count; k > START; k -= 4) {
1463 if (frmsize.discrete.width * frmsize.discrete.height >
1464 picSizes[k - 3] * picSizes[k - 2]) {
1465 picSizes[k + 1] = picSizes[k - 3];
1466 picSizes[k + 2] = picSizes[k - 2];
1467
1468 } else {
1469 break;
1470 }
1471 }
1472 picSizes[k + 1] = frmsize.discrete.width;
1473 picSizes[k + 2] = frmsize.discrete.height;
1474
1475 count += 4;
1476 }
1477 }
1478
1479#if 0
1480 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1481 START = count;
1482 for (i = 0; ; i++) {
1483 frmsize.index = i;
1484 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1485 if (res < 0){
1486 DBG_LOGB("index=%d, break\n", i);
1487 break;
1488 }
1489
1490 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1491
1492 if (0 != (frmsize.discrete.width%16))
1493 continue;
1494
1495 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1496 continue;
1497
1498 if (count >= size)
1499 break;
1500
1501 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1502 picSizes[count+1] = frmsize.discrete.width;
1503 picSizes[count+2] = frmsize.discrete.height;
1504 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1505
1506 DBG_LOGB("get output width=%d, height=%d, format =\
1507 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1508 frmsize.discrete.height);
1509 if (0 == i) {
1510 count += 4;
1511 continue;
1512 }
1513
1514 for (k = count; k > START; k -= 4) {
1515 if (frmsize.discrete.width * frmsize.discrete.height >
1516 picSizes[k - 3] * picSizes[k - 2]) {
1517 picSizes[k + 1] = picSizes[k - 3];
1518 picSizes[k + 2] = picSizes[k - 2];
1519
1520 } else {
1521 break;
1522 }
1523 }
1524 picSizes[k + 1] = frmsize.discrete.width;
1525 picSizes[k + 2] = frmsize.discrete.height;
1526
1527 count += 4;
1528 }
1529 }
1530 }
1531#endif
1532
1533 uint32_t jpgSrcfmt[] = {
1534 V4L2_PIX_FMT_RGB24,
1535 V4L2_PIX_FMT_MJPEG,
1536 V4L2_PIX_FMT_YUYV,
1537 };
1538
1539 START = count;
1540 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1541 memset(&frmsize,0,sizeof(frmsize));
1542 frmsize.pixel_format = jpgSrcfmt[j];
1543
1544 for (i = 0; ; i++) {
1545 frmsize.index = i;
1546 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1547 if (res < 0){
1548 DBG_LOGB("index=%d, break\n", i);
1549 break;
1550 }
1551
1552 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1553
1554 if (0 != (frmsize.discrete.width%16))
1555 continue;
1556
1557 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1558 // continue;
1559
1560 if (count >= size)
1561 break;
1562
1563 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1564 if (!IsUsbAvailablePictureSize(kUsbAvailablePictureSize, frmsize.discrete.width, frmsize.discrete.height))
1565 continue;
1566 }
1567
1568 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1569 picSizes[count+1] = frmsize.discrete.width;
1570 picSizes[count+2] = frmsize.discrete.height;
1571 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1572
1573 if (0 == i) {
1574 count += 4;
1575 continue;
1576 }
1577
1578 //TODO insert in descend order
1579 for (k = count; k > START; k -= 4) {
1580 if (frmsize.discrete.width * frmsize.discrete.height >
1581 picSizes[k - 3] * picSizes[k - 2]) {
1582 picSizes[k + 1] = picSizes[k - 3];
1583 picSizes[k + 2] = picSizes[k - 2];
1584
1585 } else {
1586 break;
1587 }
1588 }
1589
1590 picSizes[k + 1] = frmsize.discrete.width;
1591 picSizes[k + 2] = frmsize.discrete.height;
1592
1593 count += 4;
1594 }
1595 }
1596
1597 if (frmsize.index > 0)
1598 break;
1599 }
1600
1601 if (frmsize.index == 0)
1602 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1603
1604 return count;
1605
1606}
1607
1608int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size, bool flag)
1609{
1610 int ret=0; int framerate=0; int temp_rate=0;
1611 struct v4l2_frmivalenum fival;
1612 int i,j=0;
1613 int count = 0;
1614 int tmp_size = size;
1615 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1616 int pixelfmt_tbl[] = {
1617 V4L2_PIX_FMT_MJPEG,
1618 V4L2_PIX_FMT_YVU420,
1619 V4L2_PIX_FMT_NV21,
1620 V4L2_PIX_FMT_RGB24,
1621 V4L2_PIX_FMT_YUYV,
1622 //V4L2_PIX_FMT_YVU420
1623 };
1624
1625 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1626 {
1627 /* we got all duration for each resolution for prev format*/
1628 if (count >= tmp_size)
1629 break;
1630
1631 for( ; size > 0; size-=4)
1632 {
1633 memset(&fival, 0, sizeof(fival));
1634
1635 for (fival.index = 0;;fival.index++)
1636 {
1637 fival.pixel_format = pixelfmt_tbl[i];
1638 fival.width = picSizes[size-3];
1639 fival.height = picSizes[size-2];
1640 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1641 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1642 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1643 if(framerate < temp_rate)
1644 framerate = temp_rate;
1645 duration[count+0] = (int64_t)(picSizes[size-4]);
1646 duration[count+1] = (int64_t)(picSizes[size-3]);
1647 duration[count+2] = (int64_t)(picSizes[size-2]);
1648 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1649 j++;
1650 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1651 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1652 if(framerate < temp_rate)
1653 framerate = temp_rate;
1654 duration[count+0] = (int64_t)picSizes[size-4];
1655 duration[count+1] = (int64_t)picSizes[size-3];
1656 duration[count+2] = (int64_t)picSizes[size-2];
1657 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1658 j++;
1659 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1660 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1661 if(framerate < temp_rate)
1662 framerate = temp_rate;
1663 duration[count+0] = (int64_t)picSizes[size-4];
1664 duration[count+1] = (int64_t)picSizes[size-3];
1665 duration[count+2] = (int64_t)picSizes[size-2];
1666 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1667 j++;
1668 }
1669 } else {
1670 if (j > 0) {
1671 if (count >= tmp_size)
1672 break;
1673 duration[count+0] = (int64_t)(picSizes[size-4]);
1674 duration[count+1] = (int64_t)(picSizes[size-3]);
1675 duration[count+2] = (int64_t)(picSizes[size-2]);
1676 if (framerate == 5) {
1677 if ((!flag) && ((duration[count+0] == HAL_PIXEL_FORMAT_YCbCr_420_888)
1678 || (duration[count+0] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)))
1679 duration[count+3] = 0;
1680 else
1681 duration[count+3] = (int64_t)200000000L;
1682 } else if (framerate == 10) {
1683 if ((!flag) && ((duration[count+0] == HAL_PIXEL_FORMAT_YCbCr_420_888)
1684 || (duration[count+0] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)))
1685 duration[count+3] = 0;
1686 else
1687 duration[count+3] = (int64_t)100000000L;
1688 } else if (framerate == 15) {
1689 if ((!flag) && ((duration[count+0] == HAL_PIXEL_FORMAT_YCbCr_420_888)
1690 || (duration[count+0] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)))
1691 duration[count+3] = 0;
1692 else
1693 duration[count+3] = (int64_t)66666666L;
1694 } else if (framerate == 30) {
1695 if ((!flag) && ((duration[count+0] == HAL_PIXEL_FORMAT_YCbCr_420_888)
1696 || (duration[count+0] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)))
1697 duration[count+3] = 0;
1698 else
1699 duration[count+3] = (int64_t)33333333L;
1700 } else {
1701 if ((!flag) && ((duration[count+0] == HAL_PIXEL_FORMAT_YCbCr_420_888)
1702 || (duration[count+0] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)))
1703 duration[count+3] = 0;
1704 else
1705 duration[count+3] = (int64_t)66666666L;
1706 }
1707 count += 4;
1708 break;
1709 } else {
1710 break;
1711 }
1712 }
1713 }
1714 j=0;
1715 }
1716 size = tmp_size;
1717 }
1718
1719 return count;
1720
1721}
1722
1723int64_t Sensor::getMinFrameDuration()
1724{
1725 int64_t tmpDuration = 66666666L; // 1/15 s
1726 int64_t frameDuration = 66666666L; // 1/15 s
1727 struct v4l2_frmivalenum fival;
1728 int i,j;
1729
1730 uint32_t pixelfmt_tbl[]={
1731 V4L2_PIX_FMT_MJPEG,
1732 V4L2_PIX_FMT_YUYV,
1733 V4L2_PIX_FMT_NV21,
1734 };
1735 struct v4l2_frmsize_discrete resolution_tbl[]={
1736 {1920, 1080},
1737 {1280, 960},
1738 {640, 480},
1739 {320, 240},
1740 };
1741
1742 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1743 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1744 memset(&fival, 0, sizeof(fival));
1745 fival.index = 0;
1746 fival.pixel_format = pixelfmt_tbl[i];
1747 fival.width = resolution_tbl[j].width;
1748 fival.height = resolution_tbl[j].height;
1749
1750 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1751 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1752 tmpDuration =
1753 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1754
1755 if (frameDuration > tmpDuration)
1756 frameDuration = tmpDuration;
1757 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1758 frameDuration =
1759 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1760 break;
1761 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1762 frameDuration =
1763 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1764 break;
1765 }
1766 fival.index++;
1767 }
1768 }
1769
1770 if (fival.index > 0) {
1771 break;
1772 }
1773 }
1774
1775 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1776 return frameDuration;
1777}
1778
1779int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1780 int res;
1781 int i;
1782 int count = 0;
1783 struct v4l2_frmsizeenum frmsize;
1784 char property[PROPERTY_VALUE_MAX];
1785 unsigned int support_w,support_h;
1786 int preview_fmt;
1787
1788 support_w = 10000;
1789 support_h = 10000;
1790 memset(property, 0, sizeof(property));
1791 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1792 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1793 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1794 support_w = 10000;
1795 support_h = 10000;
1796 }
1797 }
1798
1799
1800 memset(&frmsize,0,sizeof(frmsize));
1801 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1802
1803 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1804 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1805 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1806 if (preview == true)
1807 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1808 else
1809 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1810 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1811 if (preview == true)
1812 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1813 else
1814 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1815 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1816 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1817
1818 for (i = 0; ; i++) {
1819 frmsize.index = i;
1820 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1821 if (res < 0){
1822 DBG_LOGB("index=%d, break\n", i);
1823 break;
1824 }
1825
1826
1827 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1828
1829 if (0 != (frmsize.discrete.width%16))
1830 continue;
1831
1832 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1833 continue;
1834
1835 if (count >= size)
1836 break;
1837
1838 picSizes[count] = frmsize.discrete.width;
1839 picSizes[count+1] = frmsize.discrete.height;
1840
1841 if (0 == i) {
1842 count += 2;
1843 continue;
1844 }
1845
1846 //TODO insert in descend order
1847 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1848 picSizes[count + 0] = picSizes[count - 2];
1849 picSizes[count + 1] = picSizes[count - 1];
1850
1851 picSizes[count - 2] = frmsize.discrete.width;
1852 picSizes[count - 1] = frmsize.discrete.height;
1853 }
1854
1855 count += 2;
1856 }
1857 }
1858
1859 return count;
1860
1861}
1862
1863bool Sensor::get_sensor_status() {
1864 return mSensorWorkFlag;
1865}
1866
1867void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1868 float totalGain = gain/100.0 * kBaseGainFactor;
1869 float noiseVarGain = totalGain * totalGain;
1870 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1871 + kReadNoiseVarAfterGain;
1872
1873 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1874 mScene.setReadoutPixel(0,0);
1875 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1876 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1877 uint16_t *px = (uint16_t*)img + y * stride;
1878 for (unsigned int x = 0; x < kResolution[0]; x++) {
1879 uint32_t electronCount;
1880 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1881
1882 // TODO: Better pixel saturation curve?
1883 electronCount = (electronCount < kSaturationElectrons) ?
1884 electronCount : kSaturationElectrons;
1885
1886 // TODO: Better A/D saturation curve?
1887 uint16_t rawCount = electronCount * totalGain;
1888 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1889
1890 // Calculate noise value
1891 // TODO: Use more-correct Gaussian instead of uniform noise
1892 float photonNoiseVar = electronCount * noiseVarGain;
1893 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1894 // Scaled to roughly match gaussian/uniform noise stddev
1895 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1896
1897 rawCount += kBlackLevel;
1898 rawCount += noiseStddev * noiseSample;
1899
1900 *px++ = rawCount;
1901 }
1902 // TODO: Handle this better
1903 //simulatedTime += kRowReadoutTime;
1904 }
1905 ALOGVV("Raw sensor image captured");
1906}
1907
1908void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1909 float totalGain = gain/100.0 * kBaseGainFactor;
1910 // In fixed-point math, calculate total scaling from electrons to 8bpp
1911 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1912 uint32_t inc = kResolution[0] / stride;
1913
1914 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1915 uint8_t *px = img + outY * stride * 4;
1916 mScene.setReadoutPixel(0, y);
1917 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1918 uint32_t rCount, gCount, bCount;
1919 // TODO: Perfect demosaicing is a cheat
1920 const uint32_t *pixel = mScene.getPixelElectrons();
1921 rCount = pixel[Scene::R] * scale64x;
1922 gCount = pixel[Scene::Gr] * scale64x;
1923 bCount = pixel[Scene::B] * scale64x;
1924
1925 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1926 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1927 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1928 *px++ = 255;
1929 for (unsigned int j = 1; j < inc; j++)
1930 mScene.getPixelElectrons();
1931 }
1932 // TODO: Handle this better
1933 //simulatedTime += kRowReadoutTime;
1934 }
1935 ALOGVV("RGBA sensor image captured");
1936}
1937
1938void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1939#if 0
1940 float totalGain = gain/100.0 * kBaseGainFactor;
1941 // In fixed-point math, calculate total scaling from electrons to 8bpp
1942 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1943 uint32_t inc = kResolution[0] / stride;
1944
1945 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1946 mScene.setReadoutPixel(0, y);
1947 uint8_t *px = img + outY * stride * 3;
1948 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1949 uint32_t rCount, gCount, bCount;
1950 // TODO: Perfect demosaicing is a cheat
1951 const uint32_t *pixel = mScene.getPixelElectrons();
1952 rCount = pixel[Scene::R] * scale64x;
1953 gCount = pixel[Scene::Gr] * scale64x;
1954 bCount = pixel[Scene::B] * scale64x;
1955
1956 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1957 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1958 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1959 for (unsigned int j = 1; j < inc; j++)
1960 mScene.getPixelElectrons();
1961 }
1962 // TODO: Handle this better
1963 //simulatedTime += kRowReadoutTime;
1964 }
1965#else
1966 uint8_t *src = NULL;
1967 int ret = 0, rotate = 0;
1968 uint32_t width = 0, height = 0;
1969 int dqTryNum = 3;
1970
1971 rotate = getPictureRotate();
1972 width = vinfo->picture.format.fmt.pix.width;
1973 height = vinfo->picture.format.fmt.pix.height;
1974
1975 if (mSensorType == SENSOR_USB) {
1976 releasebuf_and_stop_capturing(vinfo);
1977 } else {
1978 stop_capturing(vinfo);
1979 }
1980
1981 ret = start_picture(vinfo,rotate);
1982 if (ret < 0)
1983 {
1984 ALOGD("start picture failed!");
1985 }
1986 while(1)
1987 {
1988 src = (uint8_t *)get_picture(vinfo);
1989 if (NULL == src) {
1990 usleep(10000);
1991 continue;
1992 }
1993 if ((NULL != src) && (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)) {
1994 while (dqTryNum > 0) {
1995 if (NULL != src) {
1996 putback_picture_frame(vinfo);
1997 }
1998 usleep(10000);
1999 dqTryNum --;
2000 src = (uint8_t *)get_picture(vinfo);
2001 }
2002 }
2003
2004 if (NULL != src) {
2005 mSensorWorkFlag = true;
2006 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2007 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2008 if ( tmp_buffer == NULL) {
2009 ALOGE("new buffer failed!\n");
2010 return;
2011 }
2012#if ANDROID_PLATFORM_SDK_VERSION > 23
2013 uint8_t *vBuffer = new uint8_t[width * height / 4];
2014 if (vBuffer == NULL)
2015 ALOGE("alloc temperary v buffer failed\n");
2016 uint8_t *uBuffer = new uint8_t[width * height / 4];
2017 if (uBuffer == NULL)
2018 ALOGE("alloc temperary u buffer failed\n");
2019
2020 if (ConvertToI420(src, vinfo->picture.buf.bytesused, tmp_buffer, width, uBuffer, (width + 1) / 2,
2021 vBuffer, (width + 1) / 2, 0, 0, width, height,
2022 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2023 DBG_LOGA("Decode MJPEG frame failed\n");
2024 putback_picture_frame(vinfo);
2025 usleep(5000);
2026 delete vBuffer;
2027 delete uBuffer;
2028 } else {
2029
2030 uint8_t *pUVBuffer = tmp_buffer + width * height;
2031 for (int i = 0; i < width * height / 4; i++) {
2032 *pUVBuffer++ = *(vBuffer + i);
2033 *pUVBuffer++ = *(uBuffer + i);
2034 }
2035
2036 delete vBuffer;
2037 delete uBuffer;
2038 nv21_to_rgb24(tmp_buffer,img,width,height);
2039 if (tmp_buffer != NULL)
2040 delete [] tmp_buffer;
2041 break;
2042 }
2043#else
2044 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
2045 width, tmp_buffer + width * height, (width + 1) / 2, width,
2046 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2047 DBG_LOGA("Decode MJPEG frame failed\n");
2048 putback_picture_frame(vinfo);
2049 usleep(5000);
2050 } else {
2051 nv21_to_rgb24(tmp_buffer,img,width,height);
2052 if (tmp_buffer != NULL)
2053 delete [] tmp_buffer;
2054 break;
2055 }
2056#endif
2057 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2058 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
2059 yuyv422_to_rgb24(src,img,width,height);
2060 break;
2061 } else {
2062 putback_picture_frame(vinfo);
2063 usleep(5000);
2064 }
2065 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
2066 if (vinfo->picture.buf.length == width * height * 3) {
2067 memcpy(img, src, vinfo->picture.buf.length);
2068 } else {
2069 rgb24_memcpy(img, src, width, height);
2070 }
2071 break;
2072 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2073 memcpy(img, src, vinfo->picture.buf.length);
2074 break;
2075 }
2076 }
2077 }
2078 ALOGD("get picture success !");
2079
2080 if (mSensorType == SENSOR_USB) {
2081 releasebuf_and_stop_picture(vinfo);
2082 } else {
2083 stop_picture(vinfo);
2084 }
2085
2086#endif
2087}
2088
2089void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
2090{
2091 for (int i = 0; i < width * height * 2; i += 2) {
2092 *dst++ = *(src + i);
2093 }
2094
2095 for (int y = 0; y < height - 1; y +=2) {
2096 for (int j = 0; j < width * 2; j += 4) {
2097 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
2098 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
2099 }
2100 src += width * 2 * 2;
2101 }
2102
2103 if (height & 1)
2104 for (int j = 0; j < width * 2; j += 4) {
2105 *dst++ = *(src + 3 + j); //v
2106 *dst++ = *(src + 1 + j); //u
2107 }
2108}
2109
2110void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
2111{
2112 //width should be an even number.
2113 //uv ALIGN 32.
2114 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
2115 unsigned char *dst_copy,*src_copy;
2116
2117 dst_copy = dst;
2118 src_copy = src;
2119
2120 y_size = width*height;
2121 c_stride = ALIGN(width/2, 16);
2122 c_size = c_stride * height/2;
2123 cr_offset = y_size;
2124 cb_offset = y_size+c_size;
2125
2126 for(i=0;i< y_size;i++){
2127 *dst++ = *src;
2128 src += 2;
2129 }
2130
2131 dst = dst_copy;
2132 src = src_copy;
2133
2134 for(i=0;i<height;i+=2){
2135 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
2136 //ceil(u1+u2)/2
2137 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
2138 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
2139 }
2140 dst += c_stride;
2141 src += width*4;
2142 }
2143}
2144
2145status_t Sensor::force_reset_sensor() {
2146 DBG_LOGA("force_reset_sensor");
2147 status_t ret;
2148 mTimeOutCount = 0;
2149 ret = streamOff();
2150 ret = setBuffersFormat(vinfo);
2151 ret = streamOn();
2152 DBG_LOGB("%s , ret = %d", __FUNCTION__, ret);
2153 return ret;
2154}
2155
2156void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
2157#if 0
2158 float totalGain = gain/100.0 * kBaseGainFactor;
2159 // Using fixed-point math with 6 bits of fractional precision.
2160 // In fixed-point math, calculate total scaling from electrons to 8bpp
2161 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2162 // In fixed-point math, saturation point of sensor after gain
2163 const int saturationPoint = 64 * 255;
2164 // Fixed-point coefficients for RGB-YUV transform
2165 // Based on JFIF RGB->YUV transform.
2166 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2167 const int rgbToY[] = {19, 37, 7};
2168 const int rgbToCb[] = {-10,-21, 32, 524288};
2169 const int rgbToCr[] = {32,-26, -5, 524288};
2170 // Scale back to 8bpp non-fixed-point
2171 const int scaleOut = 64;
2172 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2173
2174 uint32_t inc = kResolution[0] / stride;
2175 uint32_t outH = kResolution[1] / inc;
2176 for (unsigned int y = 0, outY = 0;
2177 y < kResolution[1]; y+=inc, outY++) {
2178 uint8_t *pxY = img + outY * stride;
2179 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2180 mScene.setReadoutPixel(0,y);
2181 for (unsigned int outX = 0; outX < stride; outX++) {
2182 int32_t rCount, gCount, bCount;
2183 // TODO: Perfect demosaicing is a cheat
2184 const uint32_t *pixel = mScene.getPixelElectrons();
2185 rCount = pixel[Scene::R] * scale64x;
2186 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2187 gCount = pixel[Scene::Gr] * scale64x;
2188 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2189 bCount = pixel[Scene::B] * scale64x;
2190 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2191
2192 *pxY++ = (rgbToY[0] * rCount +
2193 rgbToY[1] * gCount +
2194 rgbToY[2] * bCount) / scaleOutSq;
2195 if (outY % 2 == 0 && outX % 2 == 0) {
2196 *pxVU++ = (rgbToCr[0] * rCount +
2197 rgbToCr[1] * gCount +
2198 rgbToCr[2] * bCount +
2199 rgbToCr[3]) / scaleOutSq;
2200 *pxVU++ = (rgbToCb[0] * rCount +
2201 rgbToCb[1] * gCount +
2202 rgbToCb[2] * bCount +
2203 rgbToCb[3]) / scaleOutSq;
2204 }
2205 for (unsigned int j = 1; j < inc; j++)
2206 mScene.getPixelElectrons();
2207 }
2208 }
2209#else
2210 uint8_t *src;
2211
2212 if (mKernelBuffer) {
2213 src = mKernelBuffer;
2214 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2215 uint32_t width = vinfo->preview.format.fmt.pix.width;
2216 uint32_t height = vinfo->preview.format.fmt.pix.height;
2217 if ((width == b.width) && (height == b.height)) {
2218 memcpy(b.img, src, b.width * b.height * 3/2);
2219 } else {
2220 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2221 }
2222 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2223 uint32_t width = vinfo->preview.format.fmt.pix.width;
2224 uint32_t height = vinfo->preview.format.fmt.pix.height;
2225
2226 if ((width == b.width) && (height == b.height)) {
2227 memcpy(b.img, src, b.width * b.height * 3/2);
2228 } else {
2229 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2230 }
2231 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2232 uint32_t width = vinfo->preview.format.fmt.pix.width;
2233 uint32_t height = vinfo->preview.format.fmt.pix.height;
2234
2235 if ((width == b.width) && (height == b.height)) {
2236 memcpy(b.img, src, b.width * b.height * 3/2);
2237 } else {
2238 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2239 }
2240 } else {
2241 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2242 }
2243 return ;
2244 }
2245 while(1){
2246 if (mFlushFlag) {
2247 break;
2248 }
2249
2250 if (mExitSensorThread) {
2251 break;
2252 }
2253
2254 src = (uint8_t *)get_frame(vinfo);
2255 if (NULL == src) {
2256 if (get_device_status(vinfo)) {
2257 break;
2258 }
2259 ALOGVV("get frame NULL, sleep 5ms");
2260 usleep(5000);
2261 mTimeOutCount++;
2262 if (mTimeOutCount > 300) {
2263 DBG_LOGA("force sensor reset.\n");
2264 force_reset_sensor();
2265 }
2266 continue;
2267 }
2268 mTimeOutCount = 0;
2269 if (mSensorType == SENSOR_USB) {
2270 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2271 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2272 DBG_LOGB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2273 putback_frame(vinfo);
2274 continue;
2275 }
2276 }
2277 }
2278 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2279 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2280 memcpy(b.img, src, vinfo->preview.buf.length);
2281 } else {
2282 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2283 }
2284 mKernelBuffer = b.img;
2285 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2286 uint32_t width = vinfo->preview.format.fmt.pix.width;
2287 uint32_t height = vinfo->preview.format.fmt.pix.height;
2288 memset(mTemp_buffer, 0 , width * height * 3/2);
2289 YUYVToNV21(src, mTemp_buffer, width, height);
2290 if ((width == b.width) && (height == b.height)) {
2291 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2292 mKernelBuffer = b.img;
2293 } else {
2294 if ((b.height % 2) != 0) {
2295 DBG_LOGB("%d , b.height = %d", __LINE__, b.height);
2296 b.height = b.height - 1;
2297 }
2298 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2299 mKernelBuffer = mTemp_buffer;
2300 }
2301 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2302 uint32_t width = vinfo->preview.format.fmt.pix.width;
2303 uint32_t height = vinfo->preview.format.fmt.pix.height;
2304 memset(mTemp_buffer, 0 , width * height * 3/2);
2305#if ANDROID_PLATFORM_SDK_VERSION > 23
2306 uint8_t *vBuffer = new uint8_t[width * height / 4];
2307 if (vBuffer == NULL)
2308 ALOGE("alloc temperary v buffer failed\n");
2309 uint8_t *uBuffer = new uint8_t[width * height / 4];
2310 if (uBuffer == NULL)
2311 ALOGE("alloc temperary u buffer failed\n");
2312
2313 if (ConvertToI420(src, vinfo->preview.buf.bytesused, mTemp_buffer, width, uBuffer, (width + 1) / 2,
2314 vBuffer, (width + 1) / 2, 0, 0, width, height,
2315 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2316 DBG_LOGA("Decode MJPEG frame failed\n");
2317 putback_frame(vinfo);
2318 ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__);
2319 continue;
2320 }
2321 uint8_t *pUVBuffer = mTemp_buffer + width * height;
2322 for (int i = 0; i < width * height / 4; i++) {
2323 *pUVBuffer++ = *(vBuffer + i);
2324 *pUVBuffer++ = *(uBuffer + i);
2325 }
2326 delete vBuffer;
2327 delete uBuffer;
2328#else
2329 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, mTemp_buffer,
2330 width, mTemp_buffer + width * height, (width + 1) / 2, width,
2331 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2332 putback_frame(vinfo);
2333 ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__);
2334 continue;
2335 }
2336#endif
2337 if ((width == b.width) && (height == b.height)) {
2338 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2339 mKernelBuffer = b.img;
2340 } else {
2341 if ((b.height % 2) != 0) {
2342 DBG_LOGB("%d, b.height = %d", __LINE__, b.height);
2343 b.height = b.height - 1;
2344 }
2345 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2346 mKernelBuffer = mTemp_buffer;
2347 }
2348 }
2349 mSensorWorkFlag = true;
2350 break;
2351 }
2352#endif
2353
2354 ALOGVV("NV21 sensor image captured");
2355}
2356
2357void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2358#if 0
2359 float totalGain = gain/100.0 * kBaseGainFactor;
2360 // Using fixed-point math with 6 bits of fractional precision.
2361 // In fixed-point math, calculate total scaling from electrons to 8bpp
2362 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2363 // In fixed-point math, saturation point of sensor after gain
2364 const int saturationPoint = 64 * 255;
2365 // Fixed-point coefficients for RGB-YUV transform
2366 // Based on JFIF RGB->YUV transform.
2367 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2368 const int rgbToY[] = {19, 37, 7};
2369 const int rgbToCb[] = {-10,-21, 32, 524288};
2370 const int rgbToCr[] = {32,-26, -5, 524288};
2371 // Scale back to 8bpp non-fixed-point
2372 const int scaleOut = 64;
2373 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2374
2375 uint32_t inc = kResolution[0] / stride;
2376 uint32_t outH = kResolution[1] / inc;
2377 for (unsigned int y = 0, outY = 0;
2378 y < kResolution[1]; y+=inc, outY++) {
2379 uint8_t *pxY = img + outY * stride;
2380 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2381 mScene.setReadoutPixel(0,y);
2382 for (unsigned int outX = 0; outX < stride; outX++) {
2383 int32_t rCount, gCount, bCount;
2384 // TODO: Perfect demosaicing is a cheat
2385 const uint32_t *pixel = mScene.getPixelElectrons();
2386 rCount = pixel[Scene::R] * scale64x;
2387 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2388 gCount = pixel[Scene::Gr] * scale64x;
2389 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2390 bCount = pixel[Scene::B] * scale64x;
2391 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2392
2393 *pxY++ = (rgbToY[0] * rCount +
2394 rgbToY[1] * gCount +
2395 rgbToY[2] * bCount) / scaleOutSq;
2396 if (outY % 2 == 0 && outX % 2 == 0) {
2397 *pxVU++ = (rgbToCr[0] * rCount +
2398 rgbToCr[1] * gCount +
2399 rgbToCr[2] * bCount +
2400 rgbToCr[3]) / scaleOutSq;
2401 *pxVU++ = (rgbToCb[0] * rCount +
2402 rgbToCb[1] * gCount +
2403 rgbToCb[2] * bCount +
2404 rgbToCb[3]) / scaleOutSq;
2405 }
2406 for (unsigned int j = 1; j < inc; j++)
2407 mScene.getPixelElectrons();
2408 }
2409 }
2410#else
2411 uint8_t *src;
2412 if (mKernelBuffer) {
2413 src = mKernelBuffer;
2414 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2415 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2416 ALOGI("Sclale YV12 frame down \n");
2417
2418 int width = vinfo->preview.format.fmt.pix.width;
2419 int height = vinfo->preview.format.fmt.pix.height;
2420 int ret = libyuv::I420Scale(src, width,
2421 src + width * height, width / 2,
2422 src + width * height + width * height / 4, width / 2,
2423 width, height,
2424 b.img, b.width,
2425 b.img + b.width * b.height, b.width / 2,
2426 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2427 b.width, b.height,
2428 libyuv::kFilterNone);
2429 if (ret < 0)
2430 ALOGE("Sclale YV12 frame down failed!\n");
2431 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2432 int width = vinfo->preview.format.fmt.pix.width;
2433 int height = vinfo->preview.format.fmt.pix.height;
2434 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2435
2436 if ( tmp_buffer == NULL) {
2437 ALOGE("new buffer failed!\n");
2438 return;
2439 }
2440
2441 YUYVToYV12(src, tmp_buffer, width, height);
2442
2443 int ret = libyuv::I420Scale(tmp_buffer, width,
2444 tmp_buffer + width * height, width / 2,
2445 tmp_buffer + width * height + width * height / 4, width / 2,
2446 width, height,
2447 b.img, b.width,
2448 b.img + b.width * b.height, b.width / 2,
2449 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2450 b.width, b.height,
2451 libyuv::kFilterNone);
2452 if (ret < 0)
2453 ALOGE("Sclale YV12 frame down failed!\n");
2454 delete [] tmp_buffer;
2455 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2456 int width = vinfo->preview.format.fmt.pix.width;
2457 int height = vinfo->preview.format.fmt.pix.height;
2458 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2459
2460 if ( tmp_buffer == NULL) {
2461 ALOGE("new buffer failed!\n");
2462 return;
2463 }
2464
2465 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2466 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2467 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2468 DBG_LOGA("Decode MJPEG frame failed\n");
2469 }
2470
2471 int ret = libyuv::I420Scale(tmp_buffer, width,
2472 tmp_buffer + width * height, width / 2,
2473 tmp_buffer + width * height + width * height / 4, width / 2,
2474 width, height,
2475 b.img, b.width,
2476 b.img + b.width * b.height, b.width / 2,
2477 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2478 b.width, b.height,
2479 libyuv::kFilterNone);
2480 if (ret < 0)
2481 ALOGE("Sclale YV12 frame down failed!\n");
2482
2483 delete [] tmp_buffer;
2484 } else {
2485 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2486 }
2487 return ;
2488 }
2489 while(1){
2490 if (mFlushFlag) {
2491 break;
2492 }
2493 if (mExitSensorThread) {
2494 break;
2495 }
2496 src = (uint8_t *)get_frame(vinfo);
2497
2498 if (NULL == src) {
2499 if (get_device_status(vinfo)) {
2500 break;
2501 }
2502 ALOGVV("get frame NULL, sleep 5ms");
2503 usleep(5000);
2504 mTimeOutCount++;
2505 if (mTimeOutCount > 300) {
2506 force_reset_sensor();
2507 }
2508 continue;
2509 }
2510 mTimeOutCount = 0;
2511 if (mSensorType == SENSOR_USB) {
2512 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2513 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2514 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2515 putback_frame(vinfo);
2516 continue;
2517 }
2518 }
2519 }
2520 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2521 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2522 memcpy(b.img, src, vinfo->preview.buf.length);
2523 } else {
2524 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2525 }
2526 mKernelBuffer = b.img;
2527 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2528 int width = vinfo->preview.format.fmt.pix.width;
2529 int height = vinfo->preview.format.fmt.pix.height;
2530 YUYVToYV12(src, b.img, width, height);
2531 mKernelBuffer = b.img;
2532 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2533 int width = vinfo->preview.format.fmt.pix.width;
2534 int height = vinfo->preview.format.fmt.pix.height;
2535 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2536 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2537 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2538 putback_frame(vinfo);
2539 DBG_LOGA("Decode MJPEG frame failed\n");
2540 continue;
2541 }
2542 mKernelBuffer = b.img;
2543 } else {
2544 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2545 }
2546 mSensorWorkFlag = true;
2547 break;
2548 }
2549#endif
2550 //mKernelBuffer = src;
2551 ALOGVV("YV12 sensor image captured");
2552}
2553
2554void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2555#if 0
2556 float totalGain = gain/100.0 * kBaseGainFactor;
2557 // Using fixed-point math with 6 bits of fractional precision.
2558 // In fixed-point math, calculate total scaling from electrons to 8bpp
2559 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2560 // In fixed-point math, saturation point of sensor after gain
2561 const int saturationPoint = 64 * 255;
2562 // Fixed-point coefficients for RGB-YUV transform
2563 // Based on JFIF RGB->YUV transform.
2564 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2565 const int rgbToY[] = {19, 37, 7};
2566 const int rgbToCb[] = {-10,-21, 32, 524288};
2567 const int rgbToCr[] = {32,-26, -5, 524288};
2568 // Scale back to 8bpp non-fixed-point
2569 const int scaleOut = 64;
2570 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2571
2572 uint32_t inc = kResolution[0] / stride;
2573 uint32_t outH = kResolution[1] / inc;
2574 for (unsigned int y = 0, outY = 0;
2575 y < kResolution[1]; y+=inc, outY++) {
2576 uint8_t *pxY = img + outY * stride;
2577 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2578 mScene.setReadoutPixel(0,y);
2579 for (unsigned int outX = 0; outX < stride; outX++) {
2580 int32_t rCount, gCount, bCount;
2581 // TODO: Perfect demosaicing is a cheat
2582 const uint32_t *pixel = mScene.getPixelElectrons();
2583 rCount = pixel[Scene::R] * scale64x;
2584 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2585 gCount = pixel[Scene::Gr] * scale64x;
2586 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2587 bCount = pixel[Scene::B] * scale64x;
2588 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2589
2590 *pxY++ = (rgbToY[0] * rCount +
2591 rgbToY[1] * gCount +
2592 rgbToY[2] * bCount) / scaleOutSq;
2593 if (outY % 2 == 0 && outX % 2 == 0) {
2594 *pxVU++ = (rgbToCr[0] * rCount +
2595 rgbToCr[1] * gCount +
2596 rgbToCr[2] * bCount +
2597 rgbToCr[3]) / scaleOutSq;
2598 *pxVU++ = (rgbToCb[0] * rCount +
2599 rgbToCb[1] * gCount +
2600 rgbToCb[2] * bCount +
2601 rgbToCb[3]) / scaleOutSq;
2602 }
2603 for (unsigned int j = 1; j < inc; j++)
2604 mScene.getPixelElectrons();
2605 }
2606 }
2607#else
2608 uint8_t *src;
2609 if (mKernelBuffer) {
2610 src = mKernelBuffer;
2611 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2612 //TODO YUYV scale
2613 //memcpy(img, src, vinfo->preview.buf.length);
2614
2615 } else
2616 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2617
2618 return ;
2619 }
2620
2621 while(1) {
2622 if (mFlushFlag) {
2623 break;
2624 }
2625 if (mExitSensorThread) {
2626 break;
2627 }
2628 src = (uint8_t *)get_frame(vinfo);
2629 if (NULL == src) {
2630 if (get_device_status(vinfo)) {
2631 break;
2632 }
2633 ALOGVV("get frame NULL, sleep 5ms");
2634 usleep(5000);
2635 mTimeOutCount++;
2636 if (mTimeOutCount > 300) {
2637 force_reset_sensor();
2638 }
2639 continue;
2640 }
2641 mTimeOutCount = 0;
2642 if (mSensorType == SENSOR_USB) {
2643 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2644 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2645 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2646 putback_frame(vinfo);
2647 continue;
2648 }
2649 }
2650 }
2651 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2652 memcpy(img, src, vinfo->preview.buf.length);
2653 mKernelBuffer = src;
2654 } else {
2655 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2656 }
2657 mSensorWorkFlag = true;
2658 break;
2659 }
2660#endif
2661 //mKernelBuffer = src;
2662 ALOGVV("YUYV sensor image captured");
2663}
2664
2665void Sensor::dump(int fd) {
2666 String8 result;
2667 result = String8::format("%s, sensor preview information: \n", __FILE__);
2668 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2669 result.appendFormat("camera preview width: %d , height =%d\n",
2670 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2671
2672 result.appendFormat("camera preview format: %.4s\n\n",
2673 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2674
2675 write(fd, result.string(), result.size());
2676}
2677
2678} // namespace android
2679
2680