summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: c8d4c5665e6beb41fc98f43da7e8c7beca82cb73
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44extern "C"{
45 #include "jutils.h"
46}
47
48
49#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
50
51namespace android {
52
53const unsigned int Sensor::kResolution[2] = {1600, 1200};
54
55const nsecs_t Sensor::kExposureTimeRange[2] =
56 {1000L, 30000000000L} ; // 1 us - 30 sec
57const nsecs_t Sensor::kFrameDurationRange[2] =
58 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
59const nsecs_t Sensor::kMinVerticalBlank = 10000L;
60
61const uint8_t Sensor::kColorFilterArrangement =
62 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
63
64// Output image data characteristics
65const uint32_t Sensor::kMaxRawValue = 4000;
66const uint32_t Sensor::kBlackLevel = 1000;
67
68// Sensor sensitivity
69const float Sensor::kSaturationVoltage = 0.520f;
70const uint32_t Sensor::kSaturationElectrons = 2000;
71const float Sensor::kVoltsPerLuxSecond = 0.100f;
72
73const float Sensor::kElectronsPerLuxSecond =
74 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
75 * Sensor::kVoltsPerLuxSecond;
76
77const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
78 Sensor::kSaturationElectrons;
79
80const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
81const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
82const float Sensor::kReadNoiseVarBeforeGain =
83 Sensor::kReadNoiseStddevBeforeGain *
84 Sensor::kReadNoiseStddevBeforeGain;
85const float Sensor::kReadNoiseVarAfterGain =
86 Sensor::kReadNoiseStddevAfterGain *
87 Sensor::kReadNoiseStddevAfterGain;
88
89// While each row has to read out, reset, and then expose, the (reset +
90// expose) sequence can be overlapped by other row readouts, so the final
91// minimum frame duration is purely a function of row readout time, at least
92// if there's a reasonable number of rows.
93const nsecs_t Sensor::kRowReadoutTime =
94 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
95
96const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
97const uint32_t Sensor::kDefaultSensitivity = 100;
98
99const usb_frmsize_discrete_t kUsbAvailablePictureSize[] = {
100 {4128, 3096},
101 {3264, 2448},
102 {2592, 1944},
103 {2592, 1936},
104 {2560, 1920},
105 {2688, 1520},
106 {2048, 1536},
107 {1600, 1200},
108 {1920, 1088},
109 {1920, 1080},
110 {1440, 1080},
111 {1280, 960},
112 {1280, 720},
113 {1024, 768},
114 {960, 720},
115 {720, 480},
116 {640, 480},
117 {320, 240},
118};
119
120/** A few utility functions for math, normal distributions */
121
122// Take advantage of IEEE floating-point format to calculate an approximate
123// square root. Accurate to within +-3.6%
124float sqrtf_approx(float r) {
125 // Modifier is based on IEEE floating-point representation; the
126 // manipulations boil down to finding approximate log2, dividing by two, and
127 // then inverting the log2. A bias is added to make the relative error
128 // symmetric about the real answer.
129 const int32_t modifier = 0x1FBB4000;
130
131 int32_t r_i = *(int32_t*)(&r);
132 r_i = (r_i >> 1) + modifier;
133
134 return *(float*)(&r_i);
135}
136
137void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
138{
139 int stride = (width + 31) & ( ~31);
140 int w, h;
141 for (h=0; h<height; h++)
142 {
143 memcpy( dst, src, width*3);
144 dst += width*3;
145 src += stride*3;
146 }
147}
148
149static int ALIGN(int x, int y) {
150 // y must be a power of 2.
151 return (x + y - 1) & ~(y - 1);
152}
153
154bool IsUsbAvailablePictureSize(const usb_frmsize_discrete_t AvailablePictureSize[], uint32_t width, uint32_t height)
155{
156 int i;
157 bool ret = false;
158 int count = sizeof(kUsbAvailablePictureSize)/sizeof(kUsbAvailablePictureSize[0]);
159 for (i = 0; i < count; i++) {
160 if ((width == AvailablePictureSize[i].width) && (height == AvailablePictureSize[i].height)) {
161 ret = true;
162 } else {
163 continue;
164 }
165 }
166 return ret;
167}
168
169void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
170{
171 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
172 (mmInt32)vinfo->preview.format.fmt.pix.height,
173 (mmInt32)vinfo->preview.format.fmt.pix.width,
174 IC_FORMAT_YCbCr420_lp,
175 (mmByte *) src,
176 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
177 0};
178
179 structConvImage output = {(mmInt32)width,
180 (mmInt32)height,
181 (mmInt32)width,
182 IC_FORMAT_YCbCr420_lp,
183 (mmByte *) img,
184 (mmByte *) img + width * height,
185 0};
186
187 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
188 ALOGE("Sclale NV21 frame down failed!\n");
189}
190
191Sensor::Sensor():
192 Thread(false),
193 mGotVSync(false),
194 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
195 mFrameDuration(kFrameDurationRange[0]),
196 mGainFactor(kDefaultSensitivity),
197 mNextBuffers(NULL),
198 mFrameNumber(0),
199 mCapturedBuffers(NULL),
200 mListener(NULL),
201 mTemp_buffer(NULL),
202 mExitSensorThread(false),
203 mIoctlSupport(0),
204 msupportrotate(0),
205 mTimeOutCount(0),
206 mWait(false),
207 mPre_width(0),
208 mPre_height(0),
209 mFlushFlag(false),
210 mSensorWorkFlag(false),
211 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
212{
213
214}
215
216Sensor::~Sensor() {
217 //shutDown();
218}
219
220status_t Sensor::startUp(int idx) {
221 ALOGV("%s: E", __FUNCTION__);
222 DBG_LOGA("ddd");
223
224 int res;
225 mCapturedBuffers = NULL;
226 res = run("EmulatedFakeCamera3::Sensor",
227 ANDROID_PRIORITY_URGENT_DISPLAY);
228
229 if (res != OK) {
230 ALOGE("Unable to start up sensor capture thread: %d", res);
231 }
232
233 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
234 vinfo->idx = idx;
235
236 res = camera_open(vinfo);
237 if (res < 0) {
238 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
239 }
240
241 mSensorType = SENSOR_MMAP;
242 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
243 mSensorType = SENSOR_USB;
244 }
245
246 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
247 mSensorType = SENSOR_SHARE_FD;
248 }
249
250 if (strstr((const char *)vinfo->cap.card, "front"))
251 mSensorFace = SENSOR_FACE_FRONT;
252 else if (strstr((const char *)vinfo->cap.card, "back"))
253 mSensorFace = SENSOR_FACE_BACK;
254 else
255 mSensorFace = SENSOR_FACE_NONE;
256
257 return res;
258}
259
260sensor_type_e Sensor::getSensorType(void)
261{
262 return mSensorType;
263}
264status_t Sensor::IoctlStateProbe(void) {
265 struct v4l2_queryctrl qc;
266 int ret = 0;
267 mIoctlSupport = 0;
268 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
269 qc.id = V4L2_ROTATE_ID;
270 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
271 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
272 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
273 }else{
274 mIoctlSupport |= IOCTL_MASK_ROTATE;
275 }
276
277 if(mIoctlSupport & IOCTL_MASK_ROTATE){
278 msupportrotate = true;
279 DBG_LOGA("camera support capture rotate");
280 }
281 return mIoctlSupport;
282}
283
284uint32_t Sensor::getStreamUsage(int stream_type)
285{
286 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
287
288 switch (stream_type) {
289 case CAMERA3_STREAM_OUTPUT:
290 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
291 break;
292 case CAMERA3_STREAM_INPUT:
293 usage = GRALLOC_USAGE_HW_CAMERA_READ;
294 break;
295 case CAMERA3_STREAM_BIDIRECTIONAL:
296 usage = GRALLOC_USAGE_HW_CAMERA_READ |
297 GRALLOC_USAGE_HW_CAMERA_WRITE;
298 break;
299 }
300 if ((mSensorType == SENSOR_MMAP)
301 || (mSensorType == SENSOR_USB)) {
302 usage = (GRALLOC_USAGE_HW_TEXTURE
303 | GRALLOC_USAGE_HW_RENDER
304 | GRALLOC_USAGE_SW_READ_MASK
305 | GRALLOC_USAGE_SW_WRITE_MASK
306 );
307 }
308
309 return usage;
310}
311
312status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
313{
314 int res;
315
316 mFramecount = 0;
317 mCurFps = 0;
318 gettimeofday(&mTimeStart, NULL);
319
320 if (isjpeg) {
321 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
322 vinfo->picture.format.fmt.pix.width = width;
323 vinfo->picture.format.fmt.pix.height = height;
324 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
325 } else {
326 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
327 vinfo->preview.format.fmt.pix.width = width;
328 vinfo->preview.format.fmt.pix.height = height;
329 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
330
331 res = setBuffersFormat(vinfo);
332 if (res < 0) {
333 ALOGE("set buffer failed\n");
334 return res;
335 }
336 }
337
338 if (NULL == mTemp_buffer) {
339 mPre_width = vinfo->preview.format.fmt.pix.width;
340 mPre_height = vinfo->preview.format.fmt.pix.height;
341 DBG_LOGB("setOutputFormat :: pre_width = %d, pre_height = %d \n" , mPre_width , mPre_height);
342 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
343 if (mTemp_buffer == NULL) {
344 ALOGE("first time allocate mTemp_buffer failed !");
345 return -1;
346 }
347 }
348
349 if ((mPre_width != vinfo->preview.format.fmt.pix.width) && (mPre_height != vinfo->preview.format.fmt.pix.height)) {
350 if (mTemp_buffer) {
351 delete [] mTemp_buffer;
352 mTemp_buffer = NULL;
353 }
354 mPre_width = vinfo->preview.format.fmt.pix.width;
355 mPre_height = vinfo->preview.format.fmt.pix.height;
356 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
357 if (mTemp_buffer == NULL) {
358 ALOGE("allocate mTemp_buffer failed !");
359 return -1;
360 }
361 }
362
363 return OK;
364
365}
366
367status_t Sensor::streamOn() {
368
369 return start_capturing(vinfo);
370}
371
372bool Sensor::isStreaming() {
373
374 return vinfo->isStreaming;
375}
376
377bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
378{
379 if ((vinfo->preview.format.fmt.pix.width != width)
380 ||(vinfo->preview.format.fmt.pix.height != height)
381 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
382 ) {
383
384 return true;
385
386 }
387
388 return false;
389}
390status_t Sensor::streamOff() {
391 if (mSensorType == SENSOR_USB) {
392 return releasebuf_and_stop_capturing(vinfo);
393 } else {
394 return stop_capturing(vinfo);
395 }
396}
397
398int Sensor::getOutputFormat()
399{
400 struct v4l2_fmtdesc fmt;
401 int ret;
402 memset(&fmt,0,sizeof(fmt));
403 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
404
405 fmt.index = 0;
406 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
407 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
408 return V4L2_PIX_FMT_MJPEG;
409 fmt.index++;
410 }
411
412 fmt.index = 0;
413 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
414 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
415 return V4L2_PIX_FMT_NV21;
416 fmt.index++;
417 }
418
419 fmt.index = 0;
420 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
421 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
422 return V4L2_PIX_FMT_YUYV;
423 fmt.index++;
424 }
425
426 ALOGE("Unable to find a supported sensor format!");
427 return BAD_VALUE;
428}
429
430/* if sensor supports MJPEG, return it first, otherwise
431 * trasform HAL format to v4l2 format then check whether
432 * it is supported.
433 */
434int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
435{
436 struct v4l2_fmtdesc fmt;
437 int ret;
438 memset(&fmt,0,sizeof(fmt));
439 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
440
441 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
442 pixelfmt = V4L2_PIX_FMT_YVU420;
443 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
444 pixelfmt = V4L2_PIX_FMT_NV21;
445 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
446 pixelfmt = V4L2_PIX_FMT_YUYV;
447 } else {
448 pixelfmt = V4L2_PIX_FMT_NV21;
449 }
450
451 fmt.index = 0;
452 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
453 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
454 return V4L2_PIX_FMT_MJPEG;
455 fmt.index++;
456 }
457
458 fmt.index = 0;
459 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
460 if (fmt.pixelformat == pixelfmt)
461 return pixelfmt;
462 fmt.index++;
463 }
464
465 fmt.index = 0;
466 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0) {
467 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
468 return V4L2_PIX_FMT_YUYV;
469 fmt.index++;
470 }
471 ALOGE("%s, Unable to find a supported sensor format!", __FUNCTION__);
472 return BAD_VALUE;
473}
474
475void Sensor::setPictureRotate(int rotate)
476{
477 mRotateValue = rotate;
478}
479int Sensor::getPictureRotate()
480{
481 return mRotateValue;
482}
483status_t Sensor::shutDown() {
484 ALOGV("%s: E", __FUNCTION__);
485
486 int res;
487
488 mTimeOutCount = 0;
489
490 res = requestExitAndWait();
491 if (res != OK) {
492 ALOGE("Unable to shut down sensor capture thread: %d", res);
493 }
494
495 if (vinfo != NULL) {
496 if (mSensorType == SENSOR_USB) {
497 releasebuf_and_stop_capturing(vinfo);
498 } else {
499 stop_capturing(vinfo);
500 }
501 }
502
503 camera_close(vinfo);
504
505 if (vinfo){
506 free(vinfo);
507 vinfo = NULL;
508 }
509
510 if (mTemp_buffer) {
511 delete [] mTemp_buffer;
512 mTemp_buffer = NULL;
513 }
514
515 mSensorWorkFlag = false;
516
517 ALOGD("%s: Exit", __FUNCTION__);
518 return res;
519}
520
521void Sensor::sendExitSingalToSensor() {
522 {
523 Mutex::Autolock lock(mReadoutMutex);
524 mExitSensorThread = true;
525 mReadoutComplete.signal();
526 }
527
528 {
529 Mutex::Autolock lock(mControlMutex);
530 mVSync.signal();
531 }
532
533 {
534 Mutex::Autolock lock(mReadoutMutex);
535 mReadoutAvailable.signal();
536 }
537}
538
539Scene &Sensor::getScene() {
540 return mScene;
541}
542
543int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
544{
545 int ret = 0;
546 struct v4l2_queryctrl qc;
547
548 memset(&qc, 0, sizeof(qc));
549 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
550 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
551
552 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
553 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
554 ret = -1;
555 *zoomMin = 0;
556 *zoomMax = 0;
557 *zoomStep = 1;
558 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
559 } else {
560 *zoomMin = qc.minimum;
561 *zoomMax = qc.maximum;
562 *zoomStep = qc.step;
563 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
564 }
565
566 return ret ;
567}
568
569int Sensor::setZoom(int zoomValue)
570{
571 int ret = 0;
572 struct v4l2_control ctl;
573
574 memset( &ctl, 0, sizeof(ctl));
575 ctl.value = zoomValue;
576 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
577 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
578 if (ret < 0) {
579 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
580 }
581 return ret ;
582}
583
584status_t Sensor::setEffect(uint8_t effect)
585{
586 int ret = 0;
587 struct v4l2_control ctl;
588 ctl.id = V4L2_CID_COLORFX;
589
590 switch (effect) {
591 case ANDROID_CONTROL_EFFECT_MODE_OFF:
592 ctl.value= CAM_EFFECT_ENC_NORMAL;
593 break;
594 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
595 ctl.value= CAM_EFFECT_ENC_COLORINV;
596 break;
597 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
598 ctl.value= CAM_EFFECT_ENC_SEPIA;
599 break;
600 default:
601 ALOGE("%s: Doesn't support effect mode %d",
602 __FUNCTION__, effect);
603 return BAD_VALUE;
604 }
605
606 DBG_LOGB("set effect mode:%d", effect);
607 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
608 if (ret < 0) {
609 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
610 }
611 return ret ;
612}
613
614#define MAX_LEVEL_FOR_EXPOSURE 16
615#define MIN_LEVEL_FOR_EXPOSURE 3
616
617int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
618{
619 struct v4l2_queryctrl qc;
620 int ret=0;
621 int level = 0;
622 int middle = 0;
623
624 memset( &qc, 0, sizeof(qc));
625
626 DBG_LOGA("getExposure\n");
627 qc.id = V4L2_CID_EXPOSURE;
628 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
629 if(ret < 0) {
630 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
631 *minExp = -4;
632 *maxExp = 4;
633 *def = 0;
634 step->numerator = 1;
635 step->denominator = 1;
636 return ret;
637 }
638
639 if(0 < qc.step)
640 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
641
642 if((level > MAX_LEVEL_FOR_EXPOSURE)
643 || (level < MIN_LEVEL_FOR_EXPOSURE)){
644 *minExp = -4;
645 *maxExp = 4;
646 *def = 0;
647 step->numerator = 1;
648 step->denominator = 1;
649 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
650 *minExp, *maxExp, *def);
651 return true;
652 }
653
654 middle = (qc.minimum+qc.maximum)/2;
655 *minExp = qc.minimum - middle;
656 *maxExp = qc.maximum - middle;
657 *def = qc.default_value - middle;
658 step->numerator = 1;
659 step->denominator = 2;//qc.step;
660 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
661 return ret;
662}
663
664status_t Sensor::setExposure(int expCmp)
665{
666 int ret = 0;
667 struct v4l2_control ctl;
668 struct v4l2_queryctrl qc;
669
670 if(mEV == expCmp){
671 return 0;
672 }else{
673 mEV = expCmp;
674 }
675 memset(&ctl, 0, sizeof(ctl));
676 memset(&qc, 0, sizeof(qc));
677
678 qc.id = V4L2_CID_EXPOSURE;
679
680 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
681 if (ret < 0) {
682 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
683 }
684
685 ctl.id = V4L2_CID_EXPOSURE;
686 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
687
688 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
689 if (ret < 0) {
690 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
691 }
692 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
693 return ret ;
694}
695
696int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
697{
698 struct v4l2_queryctrl qc;
699 struct v4l2_querymenu qm;
700 int ret;
701 int mode_count = -1;
702
703 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
704 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
705 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
706 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
707 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
708 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
709 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
710 } else {
711 memset(&qm, 0, sizeof(qm));
712
713 int index = 0;
714 mode_count = 1;
715 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
716
717 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
718 if (mode_count >= maxCont)
719 break;
720
721 memset(&qm, 0, sizeof(struct v4l2_querymenu));
722 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
723 qm.index = index;
724 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
725 continue;
726 } else {
727 if (strcmp((char*)qm.name,"50hz") == 0) {
728 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
729 mode_count++;
730 } else if (strcmp((char*)qm.name,"60hz") == 0) {
731 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
732 mode_count++;
733 } else if (strcmp((char*)qm.name,"auto") == 0) {
734 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
735 mode_count++;
736 }
737
738 }
739 }
740 }
741
742 return mode_count;
743}
744
745status_t Sensor::setAntiBanding(uint8_t antiBanding)
746{
747 int ret = 0;
748 struct v4l2_control ctl;
749 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
750
751 switch (antiBanding) {
752 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
753 ctl.value= CAM_ANTIBANDING_OFF;
754 break;
755 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
756 ctl.value= CAM_ANTIBANDING_50HZ;
757 break;
758 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
759 ctl.value= CAM_ANTIBANDING_60HZ;
760 break;
761 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
762 ctl.value= CAM_ANTIBANDING_AUTO;
763 break;
764 default:
765 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
766 __FUNCTION__, antiBanding);
767 return BAD_VALUE;
768 }
769
770 DBG_LOGB("anti banding mode:%d", antiBanding);
771 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
772 if ( ret < 0) {
773 CAMHAL_LOGDA("failed to set anti banding mode!\n");
774 return BAD_VALUE;
775 }
776 return ret;
777}
778
779status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
780{
781 int ret = 0;
782 struct v4l2_control ctl;
783 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
784 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
785 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
786
787 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
788 return ret;
789}
790
791
792int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
793{
794 struct v4l2_queryctrl qc;
795 struct v4l2_querymenu qm;
796 int ret;
797 int mode_count = -1;
798
799 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
800 qc.id = V4L2_CID_FOCUS_AUTO;
801 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
802 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
803 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
804 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
805 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
806 }else{
807 memset(&qm, 0, sizeof(qm));
808
809 int index = 0;
810 mode_count = 1;
811 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
812
813 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
814 if (mode_count >= maxCount)
815 break;
816
817 memset(&qm, 0, sizeof(struct v4l2_querymenu));
818 qm.id = V4L2_CID_FOCUS_AUTO;
819 qm.index = index;
820 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
821 continue;
822 } else {
823 if (strcmp((char*)qm.name,"auto") == 0) {
824 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
825 mode_count++;
826 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
827 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
828 mode_count++;
829 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
830 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
831 mode_count++;
832 }
833
834 }
835 }
836 }
837
838 return mode_count;
839}
840
841status_t Sensor::setAutoFocuas(uint8_t afMode)
842{
843 struct v4l2_control ctl;
844 ctl.id = V4L2_CID_FOCUS_AUTO;
845
846 switch (afMode) {
847 case ANDROID_CONTROL_AF_MODE_AUTO:
848 ctl.value = CAM_FOCUS_MODE_AUTO;
849 break;
850 case ANDROID_CONTROL_AF_MODE_MACRO:
851 ctl.value = CAM_FOCUS_MODE_MACRO;
852 break;
853 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
854 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
855 break;
856 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
857 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
858 break;
859 default:
860 ALOGE("%s: Emulator doesn't support AF mode %d",
861 __FUNCTION__, afMode);
862 return BAD_VALUE;
863 }
864
865 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
866 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
867 return BAD_VALUE;
868 }
869
870 return OK;
871}
872
873int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
874{
875 struct v4l2_queryctrl qc;
876 struct v4l2_querymenu qm;
877 int ret;
878 int mode_count = -1;
879
880 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
881 qc.id = V4L2_CID_DO_WHITE_BALANCE;
882 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
883 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
884 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
885 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
886 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
887 }else{
888 memset(&qm, 0, sizeof(qm));
889
890 int index = 0;
891 mode_count = 1;
892 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
893
894 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
895 if (mode_count >= maxCount)
896 break;
897
898 memset(&qm, 0, sizeof(struct v4l2_querymenu));
899 qm.id = V4L2_CID_DO_WHITE_BALANCE;
900 qm.index = index;
901 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
902 continue;
903 } else {
904 if (strcmp((char*)qm.name,"auto") == 0) {
905 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
906 mode_count++;
907 } else if (strcmp((char*)qm.name,"daylight") == 0) {
908 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
909 mode_count++;
910 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
911 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
912 mode_count++;
913 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
914 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
915 mode_count++;
916 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
917 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
918 mode_count++;
919 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
920 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
921 mode_count++;
922 } else if (strcmp((char*)qm.name,"twilight") == 0) {
923 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
924 mode_count++;
925 } else if (strcmp((char*)qm.name,"shade") == 0) {
926 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
927 mode_count++;
928 }
929
930 }
931 }
932 }
933
934 return mode_count;
935}
936
937status_t Sensor::setAWB(uint8_t awbMode)
938{
939 int ret = 0;
940 struct v4l2_control ctl;
941 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
942
943 switch (awbMode) {
944 case ANDROID_CONTROL_AWB_MODE_AUTO:
945 ctl.value = CAM_WB_AUTO;
946 break;
947 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
948 ctl.value = CAM_WB_INCANDESCENCE;
949 break;
950 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
951 ctl.value = CAM_WB_FLUORESCENT;
952 break;
953 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
954 ctl.value = CAM_WB_DAYLIGHT;
955 break;
956 case ANDROID_CONTROL_AWB_MODE_SHADE:
957 ctl.value = CAM_WB_SHADE;
958 break;
959 default:
960 ALOGE("%s: Emulator doesn't support AWB mode %d",
961 __FUNCTION__, awbMode);
962 return BAD_VALUE;
963 }
964 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
965 return ret;
966}
967
968void Sensor::setExposureTime(uint64_t ns) {
969 Mutex::Autolock lock(mControlMutex);
970 ALOGVV("Exposure set to %f", ns/1000000.f);
971 mExposureTime = ns;
972}
973
974void Sensor::setFrameDuration(uint64_t ns) {
975 Mutex::Autolock lock(mControlMutex);
976 ALOGVV("Frame duration set to %f", ns/1000000.f);
977 mFrameDuration = ns;
978}
979
980void Sensor::setSensitivity(uint32_t gain) {
981 Mutex::Autolock lock(mControlMutex);
982 ALOGVV("Gain set to %d", gain);
983 mGainFactor = gain;
984}
985
986void Sensor::setDestinationBuffers(Buffers *buffers) {
987 Mutex::Autolock lock(mControlMutex);
988 mNextBuffers = buffers;
989}
990
991void Sensor::setFrameNumber(uint32_t frameNumber) {
992 Mutex::Autolock lock(mControlMutex);
993 mFrameNumber = frameNumber;
994}
995
996void Sensor::setFlushFlag(bool flushFlag) {
997 mFlushFlag = flushFlag;
998}
999
1000status_t Sensor::waitForVSync(nsecs_t reltime) {
1001 int res;
1002 Mutex::Autolock lock(mControlMutex);
1003 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
1004 if (mExitSensorThread) {
1005 return -1;
1006 }
1007
1008 mGotVSync = false;
1009 res = mVSync.waitRelative(mControlMutex, reltime);
1010 if (res != OK && res != TIMED_OUT) {
1011 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
1012 return false;
1013 }
1014 CAMHAL_LOGVB("%s , X mControlMutex , mGotVSync = %d " , __FUNCTION__ , mGotVSync);
1015 return mGotVSync;
1016}
1017
1018status_t Sensor::waitForNewFrame(nsecs_t reltime,
1019 nsecs_t *captureTime) {
1020 Mutex::Autolock lock(mReadoutMutex);
1021 if (mExitSensorThread) {
1022 return -1;
1023 }
1024
1025 if (mCapturedBuffers == NULL) {
1026 int res;
1027 CAMHAL_LOGVB("%s , E mReadoutMutex , reltime = %d" , __FUNCTION__, reltime);
1028 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
1029 if (res == TIMED_OUT) {
1030 return false;
1031 } else if (res != OK || mCapturedBuffers == NULL) {
1032 if (mFlushFlag) {
1033 ALOGE("%s , return immediately , mWait = %d", __FUNCTION__, mWait);
1034 if (mWait) {
1035 mWait = false;
1036 *captureTime = mCaptureTime;
1037 mCapturedBuffers = NULL;
1038 mReadoutComplete.signal();
1039 } else {
1040 *captureTime = mCaptureTime;
1041 mCapturedBuffers = NULL;
1042 }
1043 return -2;
1044 } else {
1045 ALOGE("Error waiting for sensor readout signal: %d", res);
1046 return false;
1047 }
1048 }
1049 }
1050 if (mWait) {
1051 mWait = false;
1052 *captureTime = mCaptureTime;
1053 mCapturedBuffers = NULL;
1054 mReadoutComplete.signal();
1055 } else {
1056 *captureTime = mCaptureTime;
1057 mCapturedBuffers = NULL;
1058 }
1059 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1060 return true;
1061}
1062
1063Sensor::SensorListener::~SensorListener() {
1064}
1065
1066void Sensor::setSensorListener(SensorListener *listener) {
1067 Mutex::Autolock lock(mControlMutex);
1068 mListener = listener;
1069}
1070
1071status_t Sensor::readyToRun() {
1072 int res;
1073 ALOGV("Starting up sensor thread");
1074 mStartupTime = systemTime();
1075 mNextCaptureTime = 0;
1076 mNextCapturedBuffers = NULL;
1077
1078 DBG_LOGA("");
1079
1080 return OK;
1081}
1082
1083bool Sensor::threadLoop() {
1084 /**
1085 * Sensor capture operation main loop.
1086 *
1087 * Stages are out-of-order relative to a single frame's processing, but
1088 * in-order in time.
1089 */
1090
1091 if (mExitSensorThread) {
1092 return false;
1093 }
1094
1095 /**
1096 * Stage 1: Read in latest control parameters
1097 */
1098 uint64_t exposureDuration;
1099 uint64_t frameDuration;
1100 uint32_t gain;
1101 Buffers *nextBuffers;
1102 uint32_t frameNumber;
1103 SensorListener *listener = NULL;
1104 {
1105 Mutex::Autolock lock(mControlMutex);
1106 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
1107 exposureDuration = mExposureTime;
1108 frameDuration = mFrameDuration;
1109 gain = mGainFactor;
1110 nextBuffers = mNextBuffers;
1111 frameNumber = mFrameNumber;
1112 listener = mListener;
1113 // Don't reuse a buffer set
1114 mNextBuffers = NULL;
1115
1116 // Signal VSync for start of readout
1117 ALOGVV("Sensor VSync");
1118 mGotVSync = true;
1119 mVSync.signal();
1120 }
1121
1122 /**
1123 * Stage 3: Read out latest captured image
1124 */
1125
1126 Buffers *capturedBuffers = NULL;
1127 nsecs_t captureTime = 0;
1128
1129 nsecs_t startRealTime = systemTime();
1130 // Stagefright cares about system time for timestamps, so base simulated
1131 // time on that.
1132 nsecs_t simulatedTime = startRealTime;
1133 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1134 nsecs_t frameReadoutEndRealTime = startRealTime +
1135 kRowReadoutTime * kResolution[1];
1136
1137 if (mNextCapturedBuffers != NULL) {
1138 ALOGVV("Sensor starting readout");
1139 // Pretend we're doing readout now; will signal once enough time has elapsed
1140 capturedBuffers = mNextCapturedBuffers;
1141 captureTime = mNextCaptureTime;
1142 }
1143 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1144
1145 // TODO: Move this signal to another thread to simulate readout
1146 // time properly
1147 if (capturedBuffers != NULL) {
1148 ALOGVV("Sensor readout complete");
1149 Mutex::Autolock lock(mReadoutMutex);
1150 CAMHAL_LOGVB("%s , E mReadoutMutex" , __FUNCTION__);
1151 if (mCapturedBuffers != NULL) {
1152 ALOGE("Waiting for readout thread to catch up!");
1153 mWait = true;
1154 mReadoutComplete.wait(mReadoutMutex);
1155 }
1156
1157 mCapturedBuffers = capturedBuffers;
1158 mCaptureTime = captureTime;
1159 mReadoutAvailable.signal();
1160 capturedBuffers = NULL;
1161 }
1162 CAMHAL_LOGVB("%s , X mReadoutMutex" , __FUNCTION__);
1163
1164 if (mExitSensorThread) {
1165 return false;
1166 }
1167 /**
1168 * Stage 2: Capture new image
1169 */
1170 mNextCaptureTime = simulatedTime;
1171 mNextCapturedBuffers = nextBuffers;
1172
1173 if (mNextCapturedBuffers != NULL) {
1174 if (listener != NULL) {
1175#if 0
1176 if (get_device_status(vinfo)) {
1177 listener->onSensorEvent(frameNumber, SensorListener::ERROR_CAMERA_DEVICE, mNextCaptureTime);
1178 }
1179#endif
1180 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1181 mNextCaptureTime);
1182 }
1183
1184 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1185 (float)exposureDuration/1e6, gain);
1186 mScene.setExposureDuration((float)exposureDuration/1e9);
1187 mScene.calculateScene(mNextCaptureTime);
1188
1189 if ( mSensorType == SENSOR_SHARE_FD) {
1190 captureNewImageWithGe2d();
1191 } else {
1192 captureNewImage();
1193 }
1194 mFramecount ++;
1195 }
1196
1197 if (mExitSensorThread) {
1198 return false;
1199 }
1200
1201 if (mFramecount == 100) {
1202 gettimeofday(&mTimeEnd, NULL);
1203 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1204 mCurFps = mFramecount/(interval/1000000.0f);
1205 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1206 mFramecount = 0;
1207 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1208 }
1209 ALOGVV("Sensor vertical blanking interval");
1210 nsecs_t workDoneRealTime = systemTime();
1211 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1212 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1213 timespec t;
1214 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1215 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1216
1217 int ret;
1218 do {
1219 ret = nanosleep(&t, &t);
1220 } while (ret != 0);
1221 }
1222 nsecs_t endRealTime = systemTime();
1223 ALOGVV("Frame cycle took %d ms, target %d ms",
1224 (int)((endRealTime - startRealTime)/1000000),
1225 (int)(frameDuration / 1000000));
1226 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1227 return true;
1228};
1229
1230int Sensor::captureNewImageWithGe2d() {
1231
1232 uint32_t gain = mGainFactor;
1233 mKernelPhysAddr = 0;
1234
1235
1236 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1237 usleep(5000);
1238 }
1239
1240 // Might be adding more buffers, so size isn't constant
1241 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1242 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1243 fillStream(vinfo, mKernelPhysAddr, b);
1244 }
1245 putback_frame(vinfo);
1246 mKernelPhysAddr = 0;
1247
1248 return 0;
1249
1250}
1251
1252int Sensor::captureNewImage() {
1253 bool isjpeg = false;
1254 uint32_t gain = mGainFactor;
1255 mKernelBuffer = NULL;
1256
1257 // Might be adding more buffers, so size isn't constant
1258 ALOGVV("size=%d\n", mNextCapturedBuffers->size());
1259 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1260 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1261 ALOGVV("Sensor capturing buffer %d: stream %d,"
1262 " %d x %d, format %x, stride %d, buf %p, img %p",
1263 i, b.streamId, b.width, b.height, b.format, b.stride,
1264 b.buffer, b.img);
1265 switch (b.format) {
1266#if PLATFORM_SDK_VERSION <= 22
1267 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1268 captureRaw(b.img, gain, b.stride);
1269 break;
1270#endif
1271 case HAL_PIXEL_FORMAT_RGB_888:
1272 captureRGB(b.img, gain, b.stride);
1273 break;
1274 case HAL_PIXEL_FORMAT_RGBA_8888:
1275 captureRGBA(b.img, gain, b.stride);
1276 break;
1277 case HAL_PIXEL_FORMAT_BLOB:
1278 // Add auxillary buffer of the right size
1279 // Assumes only one BLOB (JPEG) buffer in
1280 // mNextCapturedBuffers
1281 StreamBuffer bAux;
1282 int orientation;
1283 orientation = getPictureRotate();
1284 ALOGD("bAux orientation=%d",orientation);
1285 uint32_t pixelfmt;
1286 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1287 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1288
1289 pixelfmt = getOutputFormat();
1290 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1291 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1292 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1293 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1294 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1295 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1296 } else {
1297 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1298 }
1299 } else {
1300 isjpeg = true;
1301 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1302 }
1303
1304 if (!msupportrotate) {
1305 bAux.streamId = 0;
1306 bAux.width = b.width;
1307 bAux.height = b.height;
1308 bAux.format = pixelfmt;
1309 bAux.stride = b.width;
1310 bAux.buffer = NULL;
1311 } else {
1312 if ((orientation == 90) || (orientation == 270)) {
1313 bAux.streamId = 0;
1314 bAux.width = b.height;
1315 bAux.height = b.width;
1316 bAux.format = pixelfmt;
1317 bAux.stride = b.height;
1318 bAux.buffer = NULL;
1319 } else {
1320 bAux.streamId = 0;
1321 bAux.width = b.width;
1322 bAux.height = b.height;
1323 bAux.format = pixelfmt;
1324 bAux.stride = b.width;
1325 bAux.buffer = NULL;
1326 }
1327 }
1328 // TODO: Reuse these
1329 bAux.img = new uint8_t[b.width * b.height * 3];
1330 mNextCapturedBuffers->push_back(bAux);
1331 break;
1332 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1333 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1334 captureNV21(b, gain);
1335 break;
1336 case HAL_PIXEL_FORMAT_YV12:
1337 captureYV12(b, gain);
1338 break;
1339 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1340 captureYUYV(b.img, gain, b.stride);
1341 break;
1342 default:
1343 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1344 b.format);
1345 break;
1346 }
1347 }
1348 if ((!isjpeg)&&(mKernelBuffer)) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1349 // whose buffer putback separately.
1350 putback_frame(vinfo);
1351 }
1352 mKernelBuffer = NULL;
1353
1354 return 0;
1355}
1356
1357int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1358 int res;
1359 int i, j, k, START;
1360 int count = 0;
1361 int pixelfmt;
1362 struct v4l2_frmsizeenum frmsize;
1363 char property[PROPERTY_VALUE_MAX];
1364 unsigned int support_w,support_h;
1365
1366 support_w = 10000;
1367 support_h = 10000;
1368 memset(property, 0, sizeof(property));
1369 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1370 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1371 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1372 support_w = 10000;
1373 support_h = 10000;
1374 }
1375 }
1376
1377 memset(&frmsize,0,sizeof(frmsize));
1378 frmsize.pixel_format = getOutputFormat();
1379
1380 START = 0;
1381 for (i = 0; ; i++) {
1382 frmsize.index = i;
1383 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1384 if (res < 0){
1385 DBG_LOGB("index=%d, break\n", i);
1386 break;
1387 }
1388
1389 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1390
1391 if (0 != (frmsize.discrete.width%16))
1392 continue;
1393
1394 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1395 continue;
1396
1397 if (count >= size)
1398 break;
1399
1400 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1401 picSizes[count+1] = frmsize.discrete.width;
1402 picSizes[count+2] = frmsize.discrete.height;
1403 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1404
1405 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1406 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1407 if (0 == i) {
1408 count += 4;
1409 continue;
1410 }
1411
1412 for (k = count; k > START; k -= 4) {
1413 if (frmsize.discrete.width * frmsize.discrete.height >
1414 picSizes[k - 3] * picSizes[k - 2]) {
1415 picSizes[k + 1] = picSizes[k - 3];
1416 picSizes[k + 2] = picSizes[k - 2];
1417
1418 } else {
1419 break;
1420 }
1421 }
1422 picSizes[k + 1] = frmsize.discrete.width;
1423 picSizes[k + 2] = frmsize.discrete.height;
1424
1425 count += 4;
1426 }
1427 }
1428
1429 START = count;
1430 for (i = 0; ; i++) {
1431 frmsize.index = i;
1432 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1433 if (res < 0){
1434 DBG_LOGB("index=%d, break\n", i);
1435 break;
1436 }
1437
1438 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1439
1440 if (0 != (frmsize.discrete.width%16))
1441 continue;
1442
1443 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1444 continue;
1445
1446 if (count >= size)
1447 break;
1448
1449 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1450 picSizes[count+1] = frmsize.discrete.width;
1451 picSizes[count+2] = frmsize.discrete.height;
1452 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1453
1454 DBG_LOGB("get output width=%d, height=%d, format =\
1455 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1456 frmsize.discrete.height);
1457 if (0 == i) {
1458 count += 4;
1459 continue;
1460 }
1461
1462 for (k = count; k > START; k -= 4) {
1463 if (frmsize.discrete.width * frmsize.discrete.height >
1464 picSizes[k - 3] * picSizes[k - 2]) {
1465 picSizes[k + 1] = picSizes[k - 3];
1466 picSizes[k + 2] = picSizes[k - 2];
1467
1468 } else {
1469 break;
1470 }
1471 }
1472 picSizes[k + 1] = frmsize.discrete.width;
1473 picSizes[k + 2] = frmsize.discrete.height;
1474
1475 count += 4;
1476 }
1477 }
1478
1479#if 0
1480 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1481 START = count;
1482 for (i = 0; ; i++) {
1483 frmsize.index = i;
1484 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1485 if (res < 0){
1486 DBG_LOGB("index=%d, break\n", i);
1487 break;
1488 }
1489
1490 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1491
1492 if (0 != (frmsize.discrete.width%16))
1493 continue;
1494
1495 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1496 continue;
1497
1498 if (count >= size)
1499 break;
1500
1501 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1502 picSizes[count+1] = frmsize.discrete.width;
1503 picSizes[count+2] = frmsize.discrete.height;
1504 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1505
1506 DBG_LOGB("get output width=%d, height=%d, format =\
1507 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1508 frmsize.discrete.height);
1509 if (0 == i) {
1510 count += 4;
1511 continue;
1512 }
1513
1514 for (k = count; k > START; k -= 4) {
1515 if (frmsize.discrete.width * frmsize.discrete.height >
1516 picSizes[k - 3] * picSizes[k - 2]) {
1517 picSizes[k + 1] = picSizes[k - 3];
1518 picSizes[k + 2] = picSizes[k - 2];
1519
1520 } else {
1521 break;
1522 }
1523 }
1524 picSizes[k + 1] = frmsize.discrete.width;
1525 picSizes[k + 2] = frmsize.discrete.height;
1526
1527 count += 4;
1528 }
1529 }
1530 }
1531#endif
1532
1533 uint32_t jpgSrcfmt[] = {
1534 V4L2_PIX_FMT_RGB24,
1535 V4L2_PIX_FMT_MJPEG,
1536 V4L2_PIX_FMT_YUYV,
1537 };
1538
1539 START = count;
1540 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1541 memset(&frmsize,0,sizeof(frmsize));
1542 frmsize.pixel_format = jpgSrcfmt[j];
1543
1544 for (i = 0; ; i++) {
1545 frmsize.index = i;
1546 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1547 if (res < 0){
1548 DBG_LOGB("index=%d, break\n", i);
1549 break;
1550 }
1551
1552 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1553
1554 if (0 != (frmsize.discrete.width%16))
1555 continue;
1556
1557 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1558 // continue;
1559
1560 if (count >= size)
1561 break;
1562
1563 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1564 if (!IsUsbAvailablePictureSize(kUsbAvailablePictureSize, frmsize.discrete.width, frmsize.discrete.height))
1565 continue;
1566 }
1567
1568 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1569 picSizes[count+1] = frmsize.discrete.width;
1570 picSizes[count+2] = frmsize.discrete.height;
1571 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1572
1573 if (0 == i) {
1574 count += 4;
1575 continue;
1576 }
1577
1578 //TODO insert in descend order
1579 for (k = count; k > START; k -= 4) {
1580 if (frmsize.discrete.width * frmsize.discrete.height >
1581 picSizes[k - 3] * picSizes[k - 2]) {
1582 picSizes[k + 1] = picSizes[k - 3];
1583 picSizes[k + 2] = picSizes[k - 2];
1584
1585 } else {
1586 break;
1587 }
1588 }
1589
1590 picSizes[k + 1] = frmsize.discrete.width;
1591 picSizes[k + 2] = frmsize.discrete.height;
1592
1593 count += 4;
1594 }
1595 }
1596
1597 if (frmsize.index > 0)
1598 break;
1599 }
1600
1601 if (frmsize.index == 0)
1602 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1603
1604 return count;
1605
1606}
1607
1608int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1609{
1610 int ret=0; int framerate=0; int temp_rate=0;
1611 struct v4l2_frmivalenum fival;
1612 int i,j=0;
1613 int count = 0;
1614 int tmp_size = size;
1615 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1616 int pixelfmt_tbl[] = {
1617 V4L2_PIX_FMT_MJPEG,
1618 V4L2_PIX_FMT_YVU420,
1619 V4L2_PIX_FMT_NV21,
1620 V4L2_PIX_FMT_RGB24,
1621 V4L2_PIX_FMT_YUYV,
1622 //V4L2_PIX_FMT_YVU420
1623 };
1624
1625 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1626 {
1627 /* we got all duration for each resolution for prev format*/
1628 if (count >= tmp_size)
1629 break;
1630
1631 for( ; size > 0; size-=4)
1632 {
1633 memset(&fival, 0, sizeof(fival));
1634
1635 for (fival.index = 0;;fival.index++)
1636 {
1637 fival.pixel_format = pixelfmt_tbl[i];
1638 fival.width = picSizes[size-3];
1639 fival.height = picSizes[size-2];
1640 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1641 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1642 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1643 if(framerate < temp_rate)
1644 framerate = temp_rate;
1645 duration[count+0] = (int64_t)(picSizes[size-4]);
1646 duration[count+1] = (int64_t)(picSizes[size-3]);
1647 duration[count+2] = (int64_t)(picSizes[size-2]);
1648 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1649 j++;
1650 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1651 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1652 if(framerate < temp_rate)
1653 framerate = temp_rate;
1654 duration[count+0] = (int64_t)picSizes[size-4];
1655 duration[count+1] = (int64_t)picSizes[size-3];
1656 duration[count+2] = (int64_t)picSizes[size-2];
1657 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1658 j++;
1659 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1660 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1661 if(framerate < temp_rate)
1662 framerate = temp_rate;
1663 duration[count+0] = (int64_t)picSizes[size-4];
1664 duration[count+1] = (int64_t)picSizes[size-3];
1665 duration[count+2] = (int64_t)picSizes[size-2];
1666 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1667 j++;
1668 }
1669 } else {
1670 if (j > 0) {
1671 if (count >= tmp_size)
1672 break;
1673 duration[count+0] = (int64_t)(picSizes[size-4]);
1674 duration[count+1] = (int64_t)(picSizes[size-3]);
1675 duration[count+2] = (int64_t)(picSizes[size-2]);
1676 if (framerate == 5) {
1677 duration[count+3] = (int64_t)200000000L;
1678 } else if (framerate == 10) {
1679 duration[count+3] = (int64_t)100000000L;
1680 } else if (framerate == 15) {
1681 duration[count+3] = (int64_t)66666666L;
1682 } else if (framerate == 30) {
1683 duration[count+3] = (int64_t)33333333L;
1684 } else {
1685 duration[count+3] = (int64_t)66666666L;
1686 }
1687 count += 4;
1688 break;
1689 } else {
1690 break;
1691 }
1692 }
1693 }
1694 j=0;
1695 }
1696 size = tmp_size;
1697 }
1698
1699 return count;
1700
1701}
1702
1703int64_t Sensor::getMinFrameDuration()
1704{
1705 int64_t tmpDuration = 66666666L; // 1/15 s
1706 int64_t frameDuration = 66666666L; // 1/15 s
1707 struct v4l2_frmivalenum fival;
1708 int i,j;
1709
1710 uint32_t pixelfmt_tbl[]={
1711 V4L2_PIX_FMT_MJPEG,
1712 V4L2_PIX_FMT_YUYV,
1713 V4L2_PIX_FMT_NV21,
1714 };
1715 struct v4l2_frmsize_discrete resolution_tbl[]={
1716 {1920, 1080},
1717 {1280, 960},
1718 {640, 480},
1719 {320, 240},
1720 };
1721
1722 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1723 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1724 memset(&fival, 0, sizeof(fival));
1725 fival.index = 0;
1726 fival.pixel_format = pixelfmt_tbl[i];
1727 fival.width = resolution_tbl[j].width;
1728 fival.height = resolution_tbl[j].height;
1729
1730 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1731 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1732 tmpDuration =
1733 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1734
1735 if (frameDuration > tmpDuration)
1736 frameDuration = tmpDuration;
1737 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1738 frameDuration =
1739 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1740 break;
1741 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1742 frameDuration =
1743 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1744 break;
1745 }
1746 fival.index++;
1747 }
1748 }
1749
1750 if (fival.index > 0) {
1751 break;
1752 }
1753 }
1754
1755 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1756 return frameDuration;
1757}
1758
1759int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1760 int res;
1761 int i;
1762 int count = 0;
1763 struct v4l2_frmsizeenum frmsize;
1764 char property[PROPERTY_VALUE_MAX];
1765 unsigned int support_w,support_h;
1766 int preview_fmt;
1767
1768 support_w = 10000;
1769 support_h = 10000;
1770 memset(property, 0, sizeof(property));
1771 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1772 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1773 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1774 support_w = 10000;
1775 support_h = 10000;
1776 }
1777 }
1778
1779
1780 memset(&frmsize,0,sizeof(frmsize));
1781 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1782
1783 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1784 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1785 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1786 if (preview == true)
1787 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1788 else
1789 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1790 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1791 if (preview == true)
1792 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1793 else
1794 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1795 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1796 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1797
1798 for (i = 0; ; i++) {
1799 frmsize.index = i;
1800 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1801 if (res < 0){
1802 DBG_LOGB("index=%d, break\n", i);
1803 break;
1804 }
1805
1806
1807 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1808
1809 if (0 != (frmsize.discrete.width%16))
1810 continue;
1811
1812 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1813 continue;
1814
1815 if (count >= size)
1816 break;
1817
1818 picSizes[count] = frmsize.discrete.width;
1819 picSizes[count+1] = frmsize.discrete.height;
1820
1821 if (0 == i) {
1822 count += 2;
1823 continue;
1824 }
1825
1826 //TODO insert in descend order
1827 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1828 picSizes[count + 0] = picSizes[count - 2];
1829 picSizes[count + 1] = picSizes[count - 1];
1830
1831 picSizes[count - 2] = frmsize.discrete.width;
1832 picSizes[count - 1] = frmsize.discrete.height;
1833 }
1834
1835 count += 2;
1836 }
1837 }
1838
1839 return count;
1840
1841}
1842
1843bool Sensor::get_sensor_status() {
1844 return mSensorWorkFlag;
1845}
1846
1847void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1848 float totalGain = gain/100.0 * kBaseGainFactor;
1849 float noiseVarGain = totalGain * totalGain;
1850 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1851 + kReadNoiseVarAfterGain;
1852
1853 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1854 mScene.setReadoutPixel(0,0);
1855 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1856 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1857 uint16_t *px = (uint16_t*)img + y * stride;
1858 for (unsigned int x = 0; x < kResolution[0]; x++) {
1859 uint32_t electronCount;
1860 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1861
1862 // TODO: Better pixel saturation curve?
1863 electronCount = (electronCount < kSaturationElectrons) ?
1864 electronCount : kSaturationElectrons;
1865
1866 // TODO: Better A/D saturation curve?
1867 uint16_t rawCount = electronCount * totalGain;
1868 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1869
1870 // Calculate noise value
1871 // TODO: Use more-correct Gaussian instead of uniform noise
1872 float photonNoiseVar = electronCount * noiseVarGain;
1873 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1874 // Scaled to roughly match gaussian/uniform noise stddev
1875 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1876
1877 rawCount += kBlackLevel;
1878 rawCount += noiseStddev * noiseSample;
1879
1880 *px++ = rawCount;
1881 }
1882 // TODO: Handle this better
1883 //simulatedTime += kRowReadoutTime;
1884 }
1885 ALOGVV("Raw sensor image captured");
1886}
1887
1888void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1889 float totalGain = gain/100.0 * kBaseGainFactor;
1890 // In fixed-point math, calculate total scaling from electrons to 8bpp
1891 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1892 uint32_t inc = kResolution[0] / stride;
1893
1894 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1895 uint8_t *px = img + outY * stride * 4;
1896 mScene.setReadoutPixel(0, y);
1897 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1898 uint32_t rCount, gCount, bCount;
1899 // TODO: Perfect demosaicing is a cheat
1900 const uint32_t *pixel = mScene.getPixelElectrons();
1901 rCount = pixel[Scene::R] * scale64x;
1902 gCount = pixel[Scene::Gr] * scale64x;
1903 bCount = pixel[Scene::B] * scale64x;
1904
1905 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1906 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1907 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1908 *px++ = 255;
1909 for (unsigned int j = 1; j < inc; j++)
1910 mScene.getPixelElectrons();
1911 }
1912 // TODO: Handle this better
1913 //simulatedTime += kRowReadoutTime;
1914 }
1915 ALOGVV("RGBA sensor image captured");
1916}
1917
1918void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1919#if 0
1920 float totalGain = gain/100.0 * kBaseGainFactor;
1921 // In fixed-point math, calculate total scaling from electrons to 8bpp
1922 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1923 uint32_t inc = kResolution[0] / stride;
1924
1925 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1926 mScene.setReadoutPixel(0, y);
1927 uint8_t *px = img + outY * stride * 3;
1928 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1929 uint32_t rCount, gCount, bCount;
1930 // TODO: Perfect demosaicing is a cheat
1931 const uint32_t *pixel = mScene.getPixelElectrons();
1932 rCount = pixel[Scene::R] * scale64x;
1933 gCount = pixel[Scene::Gr] * scale64x;
1934 bCount = pixel[Scene::B] * scale64x;
1935
1936 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1937 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1938 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1939 for (unsigned int j = 1; j < inc; j++)
1940 mScene.getPixelElectrons();
1941 }
1942 // TODO: Handle this better
1943 //simulatedTime += kRowReadoutTime;
1944 }
1945#else
1946 uint8_t *src = NULL;
1947 int ret = 0, rotate = 0;
1948 uint32_t width = 0, height = 0;
1949 int dqTryNum = 3;
1950
1951 rotate = getPictureRotate();
1952 width = vinfo->picture.format.fmt.pix.width;
1953 height = vinfo->picture.format.fmt.pix.height;
1954
1955 if (mSensorType == SENSOR_USB) {
1956 releasebuf_and_stop_capturing(vinfo);
1957 } else {
1958 stop_capturing(vinfo);
1959 }
1960
1961 ret = start_picture(vinfo,rotate);
1962 if (ret < 0)
1963 {
1964 ALOGD("start picture failed!");
1965 }
1966 while(1)
1967 {
1968 src = (uint8_t *)get_picture(vinfo);
1969 if ((NULL != src) && (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)) {
1970 while (dqTryNum > 0) {
1971 if (NULL != src) {
1972 putback_picture_frame(vinfo);
1973 }
1974 usleep(10000);
1975 dqTryNum --;
1976 src = (uint8_t *)get_picture(vinfo);
1977 }
1978 }
1979
1980 if (NULL != src) {
1981 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
1982 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1983 if ( tmp_buffer == NULL) {
1984 ALOGE("new buffer failed!\n");
1985 return;
1986 }
1987#if ANDROID_PLATFORM_SDK_VERSION > 23
1988 if (jpeg_decode(&tmp_buffer, src, width, height, V4L2_PIX_FMT_NV21) != 0) {
1989#else
1990 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
1991 width, tmp_buffer + width * height, (width + 1) / 2, width,
1992 height, width, height, libyuv::FOURCC_MJPG) != 0) {
1993#endif
1994 DBG_LOGA("Decode MJPEG frame failed\n");
1995 putback_picture_frame(vinfo);
1996 usleep(5000);
1997 } else {
1998 nv21_to_rgb24(tmp_buffer,img,width,height);
1999 if (tmp_buffer != NULL)
2000 delete [] tmp_buffer;
2001 break;
2002 }
2003 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2004 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
2005 yuyv422_to_rgb24(src,img,width,height);
2006 break;
2007 } else {
2008 putback_picture_frame(vinfo);
2009 usleep(5000);
2010 }
2011 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
2012 if (vinfo->picture.buf.length == width * height * 3) {
2013 memcpy(img, src, vinfo->picture.buf.length);
2014 } else {
2015 rgb24_memcpy(img, src, width, height);
2016 }
2017 break;
2018 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2019 memcpy(img, src, vinfo->picture.buf.length);
2020 break;
2021 }
2022 }
2023 }
2024 ALOGD("get picture success !");
2025
2026 if (mSensorType == SENSOR_USB) {
2027 releasebuf_and_stop_picture(vinfo);
2028 } else {
2029 stop_picture(vinfo);
2030 }
2031
2032#endif
2033}
2034
2035void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
2036{
2037 for (int i = 0; i < width * height * 2; i += 2) {
2038 *dst++ = *(src + i);
2039 }
2040
2041 for (int y = 0; y < height - 1; y +=2) {
2042 for (int j = 0; j < width * 2; j += 4) {
2043 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
2044 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
2045 }
2046 src += width * 2 * 2;
2047 }
2048
2049 if (height & 1)
2050 for (int j = 0; j < width * 2; j += 4) {
2051 *dst++ = *(src + 3 + j); //v
2052 *dst++ = *(src + 1 + j); //u
2053 }
2054}
2055
2056void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
2057{
2058 //width should be an even number.
2059 //uv ALIGN 32.
2060 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
2061 unsigned char *dst_copy,*src_copy;
2062
2063 dst_copy = dst;
2064 src_copy = src;
2065
2066 y_size = width*height;
2067 c_stride = ALIGN(width/2, 16);
2068 c_size = c_stride * height/2;
2069 cr_offset = y_size;
2070 cb_offset = y_size+c_size;
2071
2072 for(i=0;i< y_size;i++){
2073 *dst++ = *src;
2074 src += 2;
2075 }
2076
2077 dst = dst_copy;
2078 src = src_copy;
2079
2080 for(i=0;i<height;i+=2){
2081 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
2082 //ceil(u1+u2)/2
2083 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
2084 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
2085 }
2086 dst += c_stride;
2087 src += width*4;
2088 }
2089}
2090
2091status_t Sensor::force_reset_sensor() {
2092 DBG_LOGA("force_reset_sensor");
2093 status_t ret;
2094 mTimeOutCount = 0;
2095 ret = streamOff();
2096 ret = setBuffersFormat(vinfo);
2097 ret = streamOn();
2098 DBG_LOGB("%s , ret = %d", __FUNCTION__, ret);
2099 return ret;
2100}
2101
2102void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
2103#if 0
2104 float totalGain = gain/100.0 * kBaseGainFactor;
2105 // Using fixed-point math with 6 bits of fractional precision.
2106 // In fixed-point math, calculate total scaling from electrons to 8bpp
2107 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2108 // In fixed-point math, saturation point of sensor after gain
2109 const int saturationPoint = 64 * 255;
2110 // Fixed-point coefficients for RGB-YUV transform
2111 // Based on JFIF RGB->YUV transform.
2112 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2113 const int rgbToY[] = {19, 37, 7};
2114 const int rgbToCb[] = {-10,-21, 32, 524288};
2115 const int rgbToCr[] = {32,-26, -5, 524288};
2116 // Scale back to 8bpp non-fixed-point
2117 const int scaleOut = 64;
2118 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2119
2120 uint32_t inc = kResolution[0] / stride;
2121 uint32_t outH = kResolution[1] / inc;
2122 for (unsigned int y = 0, outY = 0;
2123 y < kResolution[1]; y+=inc, outY++) {
2124 uint8_t *pxY = img + outY * stride;
2125 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2126 mScene.setReadoutPixel(0,y);
2127 for (unsigned int outX = 0; outX < stride; outX++) {
2128 int32_t rCount, gCount, bCount;
2129 // TODO: Perfect demosaicing is a cheat
2130 const uint32_t *pixel = mScene.getPixelElectrons();
2131 rCount = pixel[Scene::R] * scale64x;
2132 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2133 gCount = pixel[Scene::Gr] * scale64x;
2134 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2135 bCount = pixel[Scene::B] * scale64x;
2136 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2137
2138 *pxY++ = (rgbToY[0] * rCount +
2139 rgbToY[1] * gCount +
2140 rgbToY[2] * bCount) / scaleOutSq;
2141 if (outY % 2 == 0 && outX % 2 == 0) {
2142 *pxVU++ = (rgbToCr[0] * rCount +
2143 rgbToCr[1] * gCount +
2144 rgbToCr[2] * bCount +
2145 rgbToCr[3]) / scaleOutSq;
2146 *pxVU++ = (rgbToCb[0] * rCount +
2147 rgbToCb[1] * gCount +
2148 rgbToCb[2] * bCount +
2149 rgbToCb[3]) / scaleOutSq;
2150 }
2151 for (unsigned int j = 1; j < inc; j++)
2152 mScene.getPixelElectrons();
2153 }
2154 }
2155#else
2156 uint8_t *src;
2157
2158 if (mKernelBuffer) {
2159 src = mKernelBuffer;
2160 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2161 uint32_t width = vinfo->preview.format.fmt.pix.width;
2162 uint32_t height = vinfo->preview.format.fmt.pix.height;
2163 if ((width == b.width) && (height == b.height)) {
2164 memcpy(b.img, src, b.width * b.height * 3/2);
2165 } else {
2166 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2167 }
2168 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2169 uint32_t width = vinfo->preview.format.fmt.pix.width;
2170 uint32_t height = vinfo->preview.format.fmt.pix.height;
2171
2172 if ((width == b.width) && (height == b.height)) {
2173 memcpy(b.img, src, b.width * b.height * 3/2);
2174 } else {
2175 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2176 }
2177 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2178 uint32_t width = vinfo->preview.format.fmt.pix.width;
2179 uint32_t height = vinfo->preview.format.fmt.pix.height;
2180
2181 if ((width == b.width) && (height == b.height)) {
2182 memcpy(b.img, src, b.width * b.height * 3/2);
2183 } else {
2184 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2185 }
2186 } else {
2187 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2188 }
2189 return ;
2190 }
2191 while(1){
2192 if (mFlushFlag) {
2193 break;
2194 }
2195
2196 if (mExitSensorThread) {
2197 break;
2198 }
2199
2200 src = (uint8_t *)get_frame(vinfo);
2201 if (NULL == src) {
2202 if (get_device_status(vinfo)) {
2203 break;
2204 }
2205 ALOGVV("get frame NULL, sleep 5ms");
2206 usleep(5000);
2207 mTimeOutCount++;
2208 if (mTimeOutCount > 300) {
2209 DBG_LOGA("force sensor reset.\n");
2210 force_reset_sensor();
2211 }
2212 continue;
2213 }
2214 mTimeOutCount = 0;
2215 if (mSensorType == SENSOR_USB) {
2216 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2217 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2218 DBG_LOGB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2219 putback_frame(vinfo);
2220 continue;
2221 }
2222 }
2223 }
2224 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2225 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2226 memcpy(b.img, src, vinfo->preview.buf.length);
2227 } else {
2228 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2229 }
2230 mKernelBuffer = b.img;
2231 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2232 uint32_t width = vinfo->preview.format.fmt.pix.width;
2233 uint32_t height = vinfo->preview.format.fmt.pix.height;
2234 memset(mTemp_buffer, 0 , width * height * 3/2);
2235 YUYVToNV21(src, mTemp_buffer, width, height);
2236 if ((width == b.width) && (height == b.height)) {
2237 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2238 mKernelBuffer = b.img;
2239 } else {
2240 if ((b.height % 2) != 0) {
2241 DBG_LOGB("%d , b.height = %d", __LINE__, b.height);
2242 b.height = b.height - 1;
2243 }
2244 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2245 mKernelBuffer = mTemp_buffer;
2246 }
2247 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2248 uint32_t width = vinfo->preview.format.fmt.pix.width;
2249 uint32_t height = vinfo->preview.format.fmt.pix.height;
2250 memset(mTemp_buffer, 0 , width * height * 3/2);
2251#if ANDROID_PLATFORM_SDK_VERSION > 23
2252 if (jpeg_decode(&mTemp_buffer, src, width, height, V4L2_PIX_FMT_NV21) != 0) {
2253#else
2254 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, mTemp_buffer,
2255 width, mTemp_buffer + width * height, (width + 1) / 2, width,
2256 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2257#endif
2258 putback_frame(vinfo);
2259 ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__);
2260 continue;
2261 }
2262 if ((width == b.width) && (height == b.height)) {
2263 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2264 mKernelBuffer = b.img;
2265 } else {
2266 if ((b.height % 2) != 0) {
2267 DBG_LOGB("%d, b.height = %d", __LINE__, b.height);
2268 b.height = b.height - 1;
2269 }
2270 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2271 mKernelBuffer = mTemp_buffer;
2272 }
2273 }
2274 mSensorWorkFlag = true;
2275 break;
2276 }
2277#endif
2278
2279 ALOGVV("NV21 sensor image captured");
2280}
2281
2282void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2283#if 0
2284 float totalGain = gain/100.0 * kBaseGainFactor;
2285 // Using fixed-point math with 6 bits of fractional precision.
2286 // In fixed-point math, calculate total scaling from electrons to 8bpp
2287 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2288 // In fixed-point math, saturation point of sensor after gain
2289 const int saturationPoint = 64 * 255;
2290 // Fixed-point coefficients for RGB-YUV transform
2291 // Based on JFIF RGB->YUV transform.
2292 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2293 const int rgbToY[] = {19, 37, 7};
2294 const int rgbToCb[] = {-10,-21, 32, 524288};
2295 const int rgbToCr[] = {32,-26, -5, 524288};
2296 // Scale back to 8bpp non-fixed-point
2297 const int scaleOut = 64;
2298 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2299
2300 uint32_t inc = kResolution[0] / stride;
2301 uint32_t outH = kResolution[1] / inc;
2302 for (unsigned int y = 0, outY = 0;
2303 y < kResolution[1]; y+=inc, outY++) {
2304 uint8_t *pxY = img + outY * stride;
2305 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2306 mScene.setReadoutPixel(0,y);
2307 for (unsigned int outX = 0; outX < stride; outX++) {
2308 int32_t rCount, gCount, bCount;
2309 // TODO: Perfect demosaicing is a cheat
2310 const uint32_t *pixel = mScene.getPixelElectrons();
2311 rCount = pixel[Scene::R] * scale64x;
2312 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2313 gCount = pixel[Scene::Gr] * scale64x;
2314 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2315 bCount = pixel[Scene::B] * scale64x;
2316 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2317
2318 *pxY++ = (rgbToY[0] * rCount +
2319 rgbToY[1] * gCount +
2320 rgbToY[2] * bCount) / scaleOutSq;
2321 if (outY % 2 == 0 && outX % 2 == 0) {
2322 *pxVU++ = (rgbToCr[0] * rCount +
2323 rgbToCr[1] * gCount +
2324 rgbToCr[2] * bCount +
2325 rgbToCr[3]) / scaleOutSq;
2326 *pxVU++ = (rgbToCb[0] * rCount +
2327 rgbToCb[1] * gCount +
2328 rgbToCb[2] * bCount +
2329 rgbToCb[3]) / scaleOutSq;
2330 }
2331 for (unsigned int j = 1; j < inc; j++)
2332 mScene.getPixelElectrons();
2333 }
2334 }
2335#else
2336 uint8_t *src;
2337 if (mKernelBuffer) {
2338 src = mKernelBuffer;
2339 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2340 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2341 ALOGI("Sclale YV12 frame down \n");
2342
2343 int width = vinfo->preview.format.fmt.pix.width;
2344 int height = vinfo->preview.format.fmt.pix.height;
2345 int ret = libyuv::I420Scale(src, width,
2346 src + width * height, width / 2,
2347 src + width * height + width * height / 4, width / 2,
2348 width, height,
2349 b.img, b.width,
2350 b.img + b.width * b.height, b.width / 2,
2351 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2352 b.width, b.height,
2353 libyuv::kFilterNone);
2354 if (ret < 0)
2355 ALOGE("Sclale YV12 frame down failed!\n");
2356 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2357 int width = vinfo->preview.format.fmt.pix.width;
2358 int height = vinfo->preview.format.fmt.pix.height;
2359 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2360
2361 if ( tmp_buffer == NULL) {
2362 ALOGE("new buffer failed!\n");
2363 return;
2364 }
2365
2366 YUYVToYV12(src, tmp_buffer, width, height);
2367
2368 int ret = libyuv::I420Scale(tmp_buffer, width,
2369 tmp_buffer + width * height, width / 2,
2370 tmp_buffer + width * height + width * height / 4, width / 2,
2371 width, height,
2372 b.img, b.width,
2373 b.img + b.width * b.height, b.width / 2,
2374 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2375 b.width, b.height,
2376 libyuv::kFilterNone);
2377 if (ret < 0)
2378 ALOGE("Sclale YV12 frame down failed!\n");
2379 delete [] tmp_buffer;
2380 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2381 int width = vinfo->preview.format.fmt.pix.width;
2382 int height = vinfo->preview.format.fmt.pix.height;
2383 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2384
2385 if ( tmp_buffer == NULL) {
2386 ALOGE("new buffer failed!\n");
2387 return;
2388 }
2389
2390 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2391 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2392 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2393 DBG_LOGA("Decode MJPEG frame failed\n");
2394 }
2395
2396 int ret = libyuv::I420Scale(tmp_buffer, width,
2397 tmp_buffer + width * height, width / 2,
2398 tmp_buffer + width * height + width * height / 4, width / 2,
2399 width, height,
2400 b.img, b.width,
2401 b.img + b.width * b.height, b.width / 2,
2402 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2403 b.width, b.height,
2404 libyuv::kFilterNone);
2405 if (ret < 0)
2406 ALOGE("Sclale YV12 frame down failed!\n");
2407
2408 delete [] tmp_buffer;
2409 } else {
2410 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2411 }
2412 return ;
2413 }
2414 while(1){
2415 if (mFlushFlag) {
2416 break;
2417 }
2418 if (mExitSensorThread) {
2419 break;
2420 }
2421 src = (uint8_t *)get_frame(vinfo);
2422
2423 if (NULL == src) {
2424 if (get_device_status(vinfo)) {
2425 break;
2426 }
2427 ALOGVV("get frame NULL, sleep 5ms");
2428 usleep(5000);
2429 mTimeOutCount++;
2430 if (mTimeOutCount > 300) {
2431 force_reset_sensor();
2432 }
2433 continue;
2434 }
2435 mTimeOutCount = 0;
2436 if (mSensorType == SENSOR_USB) {
2437 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2438 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2439 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2440 putback_frame(vinfo);
2441 continue;
2442 }
2443 }
2444 }
2445 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2446 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2447 memcpy(b.img, src, vinfo->preview.buf.length);
2448 } else {
2449 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2450 }
2451 mKernelBuffer = b.img;
2452 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2453 int width = vinfo->preview.format.fmt.pix.width;
2454 int height = vinfo->preview.format.fmt.pix.height;
2455 YUYVToYV12(src, b.img, width, height);
2456 mKernelBuffer = b.img;
2457 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2458 int width = vinfo->preview.format.fmt.pix.width;
2459 int height = vinfo->preview.format.fmt.pix.height;
2460 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2461 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2462 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2463 putback_frame(vinfo);
2464 DBG_LOGA("Decode MJPEG frame failed\n");
2465 continue;
2466 }
2467 mKernelBuffer = b.img;
2468 } else {
2469 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2470 }
2471 mSensorWorkFlag = true;
2472 break;
2473 }
2474#endif
2475 //mKernelBuffer = src;
2476 ALOGVV("YV12 sensor image captured");
2477}
2478
2479void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2480#if 0
2481 float totalGain = gain/100.0 * kBaseGainFactor;
2482 // Using fixed-point math with 6 bits of fractional precision.
2483 // In fixed-point math, calculate total scaling from electrons to 8bpp
2484 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2485 // In fixed-point math, saturation point of sensor after gain
2486 const int saturationPoint = 64 * 255;
2487 // Fixed-point coefficients for RGB-YUV transform
2488 // Based on JFIF RGB->YUV transform.
2489 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2490 const int rgbToY[] = {19, 37, 7};
2491 const int rgbToCb[] = {-10,-21, 32, 524288};
2492 const int rgbToCr[] = {32,-26, -5, 524288};
2493 // Scale back to 8bpp non-fixed-point
2494 const int scaleOut = 64;
2495 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2496
2497 uint32_t inc = kResolution[0] / stride;
2498 uint32_t outH = kResolution[1] / inc;
2499 for (unsigned int y = 0, outY = 0;
2500 y < kResolution[1]; y+=inc, outY++) {
2501 uint8_t *pxY = img + outY * stride;
2502 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2503 mScene.setReadoutPixel(0,y);
2504 for (unsigned int outX = 0; outX < stride; outX++) {
2505 int32_t rCount, gCount, bCount;
2506 // TODO: Perfect demosaicing is a cheat
2507 const uint32_t *pixel = mScene.getPixelElectrons();
2508 rCount = pixel[Scene::R] * scale64x;
2509 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2510 gCount = pixel[Scene::Gr] * scale64x;
2511 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2512 bCount = pixel[Scene::B] * scale64x;
2513 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2514
2515 *pxY++ = (rgbToY[0] * rCount +
2516 rgbToY[1] * gCount +
2517 rgbToY[2] * bCount) / scaleOutSq;
2518 if (outY % 2 == 0 && outX % 2 == 0) {
2519 *pxVU++ = (rgbToCr[0] * rCount +
2520 rgbToCr[1] * gCount +
2521 rgbToCr[2] * bCount +
2522 rgbToCr[3]) / scaleOutSq;
2523 *pxVU++ = (rgbToCb[0] * rCount +
2524 rgbToCb[1] * gCount +
2525 rgbToCb[2] * bCount +
2526 rgbToCb[3]) / scaleOutSq;
2527 }
2528 for (unsigned int j = 1; j < inc; j++)
2529 mScene.getPixelElectrons();
2530 }
2531 }
2532#else
2533 uint8_t *src;
2534 if (mKernelBuffer) {
2535 src = mKernelBuffer;
2536 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2537 //TODO YUYV scale
2538 //memcpy(img, src, vinfo->preview.buf.length);
2539
2540 } else
2541 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2542
2543 return ;
2544 }
2545
2546 while(1) {
2547 if (mFlushFlag) {
2548 break;
2549 }
2550 if (mExitSensorThread) {
2551 break;
2552 }
2553 src = (uint8_t *)get_frame(vinfo);
2554 if (NULL == src) {
2555 if (get_device_status(vinfo)) {
2556 break;
2557 }
2558 ALOGVV("get frame NULL, sleep 5ms");
2559 usleep(5000);
2560 mTimeOutCount++;
2561 if (mTimeOutCount > 300) {
2562 force_reset_sensor();
2563 }
2564 continue;
2565 }
2566 mTimeOutCount = 0;
2567 if (mSensorType == SENSOR_USB) {
2568 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2569 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2570 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2571 putback_frame(vinfo);
2572 continue;
2573 }
2574 }
2575 }
2576 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2577 memcpy(img, src, vinfo->preview.buf.length);
2578 mKernelBuffer = src;
2579 } else {
2580 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2581 }
2582 mSensorWorkFlag = true;
2583 break;
2584 }
2585#endif
2586 //mKernelBuffer = src;
2587 ALOGVV("YUYV sensor image captured");
2588}
2589
2590void Sensor::dump(int fd) {
2591 String8 result;
2592 result = String8::format("%s, sensor preview information: \n", __FILE__);
2593 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2594 result.appendFormat("camera preview width: %d , height =%d\n",
2595 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2596
2597 result.appendFormat("camera preview format: %.4s\n\n",
2598 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2599
2600 write(fd, result.string(), result.size());
2601}
2602
2603} // namespace android
2604
2605