summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: 1338a146860e17ff70256bfb12cb82e7c31374bf
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44
45#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
46
47namespace android {
48
49const unsigned int Sensor::kResolution[2] = {1600, 1200};
50
51const nsecs_t Sensor::kExposureTimeRange[2] =
52 {1000L, 30000000000L} ; // 1 us - 30 sec
53const nsecs_t Sensor::kFrameDurationRange[2] =
54 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
55const nsecs_t Sensor::kMinVerticalBlank = 10000L;
56
57const uint8_t Sensor::kColorFilterArrangement =
58 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
59
60// Output image data characteristics
61const uint32_t Sensor::kMaxRawValue = 4000;
62const uint32_t Sensor::kBlackLevel = 1000;
63
64// Sensor sensitivity
65const float Sensor::kSaturationVoltage = 0.520f;
66const uint32_t Sensor::kSaturationElectrons = 2000;
67const float Sensor::kVoltsPerLuxSecond = 0.100f;
68
69const float Sensor::kElectronsPerLuxSecond =
70 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
71 * Sensor::kVoltsPerLuxSecond;
72
73const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
74 Sensor::kSaturationElectrons;
75
76const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
77const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
78const float Sensor::kReadNoiseVarBeforeGain =
79 Sensor::kReadNoiseStddevBeforeGain *
80 Sensor::kReadNoiseStddevBeforeGain;
81const float Sensor::kReadNoiseVarAfterGain =
82 Sensor::kReadNoiseStddevAfterGain *
83 Sensor::kReadNoiseStddevAfterGain;
84
85// While each row has to read out, reset, and then expose, the (reset +
86// expose) sequence can be overlapped by other row readouts, so the final
87// minimum frame duration is purely a function of row readout time, at least
88// if there's a reasonable number of rows.
89const nsecs_t Sensor::kRowReadoutTime =
90 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
91
92const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
93const uint32_t Sensor::kDefaultSensitivity = 100;
94
95const usb_frmsize_discrete_t kUsbAvailablePictureSize[] = {
96 {4128, 3096},
97 {3264, 2448},
98 {2592, 1944},
99 {2592, 1936},
100 {2560, 1920},
101 {2688, 1520},
102 {2048, 1536},
103 {1600, 1200},
104 {1920, 1088},
105 {1920, 1080},
106 {1440, 1080},
107 {1280, 960},
108 {1280, 720},
109 {1024, 768},
110 {960, 720},
111 {720, 480},
112 {640, 480},
113 {320, 240},
114};
115
116/** A few utility functions for math, normal distributions */
117
118// Take advantage of IEEE floating-point format to calculate an approximate
119// square root. Accurate to within +-3.6%
120float sqrtf_approx(float r) {
121 // Modifier is based on IEEE floating-point representation; the
122 // manipulations boil down to finding approximate log2, dividing by two, and
123 // then inverting the log2. A bias is added to make the relative error
124 // symmetric about the real answer.
125 const int32_t modifier = 0x1FBB4000;
126
127 int32_t r_i = *(int32_t*)(&r);
128 r_i = (r_i >> 1) + modifier;
129
130 return *(float*)(&r_i);
131}
132
133void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
134{
135 int stride = (width + 31) & ( ~31);
136 int w, h;
137 for (h=0; h<height; h++)
138 {
139 memcpy( dst, src, width*3);
140 dst += width*3;
141 src += stride*3;
142 }
143}
144
145static int ALIGN(int x, int y) {
146 // y must be a power of 2.
147 return (x + y - 1) & ~(y - 1);
148}
149
150bool IsUsbAvailablePictureSize(const usb_frmsize_discrete_t AvailablePictureSize[], uint32_t width, uint32_t height)
151{
152 int i;
153 bool ret = false;
154 int count = sizeof(kUsbAvailablePictureSize)/sizeof(kUsbAvailablePictureSize[0]);
155 for (i = 0; i < count; i++) {
156 if ((width == AvailablePictureSize[i].width) && (height == AvailablePictureSize[i].height)) {
157 ret = true;
158 } else {
159 continue;
160 }
161 }
162 return ret;
163}
164
165void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
166{
167 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
168 (mmInt32)vinfo->preview.format.fmt.pix.height,
169 (mmInt32)vinfo->preview.format.fmt.pix.width,
170 IC_FORMAT_YCbCr420_lp,
171 (mmByte *) src,
172 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
173 0};
174
175 structConvImage output = {(mmInt32)width,
176 (mmInt32)height,
177 (mmInt32)width,
178 IC_FORMAT_YCbCr420_lp,
179 (mmByte *) img,
180 (mmByte *) img + width * height,
181 0};
182
183 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
184 ALOGE("Sclale NV21 frame down failed!\n");
185}
186
187Sensor::Sensor():
188 Thread(false),
189 mGotVSync(false),
190 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
191 mFrameDuration(kFrameDurationRange[0]),
192 mGainFactor(kDefaultSensitivity),
193 mNextBuffers(NULL),
194 mFrameNumber(0),
195 mCapturedBuffers(NULL),
196 mListener(NULL),
197 mTemp_buffer(NULL),
198 mExitSensorThread(false),
199 mIoctlSupport(0),
200 msupportrotate(0),
201 mTimeOutCount(0),
202 mWait(false),
203 mPre_width(0),
204 mPre_height(0),
205 mFlushFlag(false),
206 mSensorWorkFlag(false),
207 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
208{
209
210}
211
212Sensor::~Sensor() {
213 //shutDown();
214}
215
216status_t Sensor::startUp(int idx) {
217 ALOGV("%s: E", __FUNCTION__);
218 DBG_LOGA("ddd");
219
220 int res;
221 mCapturedBuffers = NULL;
222 res = run("EmulatedFakeCamera3::Sensor",
223 ANDROID_PRIORITY_URGENT_DISPLAY);
224
225 if (res != OK) {
226 ALOGE("Unable to start up sensor capture thread: %d", res);
227 }
228
229 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
230 vinfo->idx = idx;
231
232 res = camera_open(vinfo);
233 if (res < 0) {
234 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
235 }
236
237 mSensorType = SENSOR_MMAP;
238 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
239 mSensorType = SENSOR_USB;
240 }
241
242 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
243 mSensorType = SENSOR_SHARE_FD;
244 }
245
246 if (strstr((const char *)vinfo->cap.card, "front"))
247 mSensorFace = SENSOR_FACE_FRONT;
248 else if (strstr((const char *)vinfo->cap.card, "back"))
249 mSensorFace = SENSOR_FACE_BACK;
250 else
251 mSensorFace = SENSOR_FACE_NONE;
252
253 return res;
254}
255
256sensor_type_e Sensor::getSensorType(void)
257{
258 return mSensorType;
259}
260status_t Sensor::IoctlStateProbe(void) {
261 struct v4l2_queryctrl qc;
262 int ret = 0;
263 mIoctlSupport = 0;
264 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
265 qc.id = V4L2_ROTATE_ID;
266 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
267 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
268 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
269 }else{
270 mIoctlSupport |= IOCTL_MASK_ROTATE;
271 }
272
273 if(mIoctlSupport & IOCTL_MASK_ROTATE){
274 msupportrotate = true;
275 DBG_LOGA("camera support capture rotate");
276 }
277 return mIoctlSupport;
278}
279
280uint32_t Sensor::getStreamUsage(int stream_type)
281{
282 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
283
284 switch (stream_type) {
285 case CAMERA3_STREAM_OUTPUT:
286 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
287 break;
288 case CAMERA3_STREAM_INPUT:
289 usage = GRALLOC_USAGE_HW_CAMERA_READ;
290 break;
291 case CAMERA3_STREAM_BIDIRECTIONAL:
292 usage = GRALLOC_USAGE_HW_CAMERA_READ |
293 GRALLOC_USAGE_HW_CAMERA_WRITE;
294 break;
295 }
296 if ((mSensorType == SENSOR_MMAP)
297 || (mSensorType == SENSOR_USB)) {
298 usage = (GRALLOC_USAGE_HW_TEXTURE
299 | GRALLOC_USAGE_HW_RENDER
300 | GRALLOC_USAGE_SW_READ_MASK
301 | GRALLOC_USAGE_SW_WRITE_MASK
302 );
303 }
304
305 return usage;
306}
307
308status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
309{
310 int res;
311
312 mFramecount = 0;
313 mCurFps = 0;
314 gettimeofday(&mTimeStart, NULL);
315
316 if (isjpeg) {
317 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
318 vinfo->picture.format.fmt.pix.width = width;
319 vinfo->picture.format.fmt.pix.height = height;
320 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
321 } else {
322 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
323 vinfo->preview.format.fmt.pix.width = width;
324 vinfo->preview.format.fmt.pix.height = height;
325 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
326
327 res = setBuffersFormat(vinfo);
328 if (res < 0) {
329 ALOGE("set buffer failed\n");
330 return res;
331 }
332 }
333
334 if (NULL == mTemp_buffer) {
335 mPre_width = vinfo->preview.format.fmt.pix.width;
336 mPre_height = vinfo->preview.format.fmt.pix.height;
337 DBG_LOGB("setOutputFormat :: pre_width = %d, pre_height = %d \n" , mPre_width , mPre_height);
338 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
339 if (mTemp_buffer == NULL) {
340 ALOGE("first time allocate mTemp_buffer failed !");
341 return -1;
342 }
343 }
344
345 if ((mPre_width != vinfo->preview.format.fmt.pix.width) && (mPre_height != vinfo->preview.format.fmt.pix.height)) {
346 if (mTemp_buffer) {
347 delete [] mTemp_buffer;
348 mTemp_buffer = NULL;
349 }
350 mPre_width = vinfo->preview.format.fmt.pix.width;
351 mPre_height = vinfo->preview.format.fmt.pix.height;
352 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
353 if (mTemp_buffer == NULL) {
354 ALOGE("allocate mTemp_buffer failed !");
355 return -1;
356 }
357 }
358
359 return OK;
360
361}
362
363status_t Sensor::streamOn() {
364
365 return start_capturing(vinfo);
366}
367
368bool Sensor::isStreaming() {
369
370 return vinfo->isStreaming;
371}
372
373bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
374{
375 if ((vinfo->preview.format.fmt.pix.width != width)
376 ||(vinfo->preview.format.fmt.pix.height != height)
377 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
378 ) {
379
380 return true;
381
382 }
383
384 return false;
385}
386status_t Sensor::streamOff() {
387 if (mSensorType == SENSOR_USB) {
388 return releasebuf_and_stop_capturing(vinfo);
389 } else {
390 return stop_capturing(vinfo);
391 }
392}
393
394int Sensor::getOutputFormat()
395{
396 struct v4l2_fmtdesc fmt;
397 int ret;
398 memset(&fmt,0,sizeof(fmt));
399 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
400
401 fmt.index = 0;
402 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
403 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
404 return V4L2_PIX_FMT_MJPEG;
405 fmt.index++;
406 }
407
408 fmt.index = 0;
409 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
410 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
411 return V4L2_PIX_FMT_NV21;
412 fmt.index++;
413 }
414
415 fmt.index = 0;
416 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
417 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
418 return V4L2_PIX_FMT_YUYV;
419 fmt.index++;
420 }
421
422 ALOGE("Unable to find a supported sensor format!");
423 return BAD_VALUE;
424}
425
426/* if sensor supports MJPEG, return it first, otherwise
427 * trasform HAL format to v4l2 format then check whether
428 * it is supported.
429 */
430int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
431{
432 struct v4l2_fmtdesc fmt;
433 int ret;
434 memset(&fmt,0,sizeof(fmt));
435 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
436
437 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
438 pixelfmt = V4L2_PIX_FMT_YVU420;
439 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
440 pixelfmt = V4L2_PIX_FMT_NV21;
441 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
442 pixelfmt = V4L2_PIX_FMT_YUYV;
443 } else {
444 pixelfmt = V4L2_PIX_FMT_NV21;
445 }
446
447 fmt.index = 0;
448 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
449 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
450 return V4L2_PIX_FMT_MJPEG;
451 fmt.index++;
452 }
453
454 fmt.index = 0;
455 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
456 if (fmt.pixelformat == pixelfmt)
457 return pixelfmt;
458 fmt.index++;
459 }
460
461 fmt.index = 0;
462 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0) {
463 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
464 return V4L2_PIX_FMT_YUYV;
465 fmt.index++;
466 }
467 ALOGE("%s, Unable to find a supported sensor format!", __FUNCTION__);
468 return BAD_VALUE;
469}
470
471void Sensor::setPictureRotate(int rotate)
472{
473 mRotateValue = rotate;
474}
475int Sensor::getPictureRotate()
476{
477 return mRotateValue;
478}
479status_t Sensor::shutDown() {
480 ALOGV("%s: E", __FUNCTION__);
481
482 int res;
483
484 mTimeOutCount = 0;
485
486 res = requestExitAndWait();
487 if (res != OK) {
488 ALOGE("Unable to shut down sensor capture thread: %d", res);
489 }
490
491 if (vinfo != NULL) {
492 if (mSensorType == SENSOR_USB) {
493 releasebuf_and_stop_capturing(vinfo);
494 } else {
495 stop_capturing(vinfo);
496 }
497 }
498
499 camera_close(vinfo);
500
501 if (vinfo){
502 free(vinfo);
503 vinfo = NULL;
504 }
505
506 if (mTemp_buffer) {
507 delete [] mTemp_buffer;
508 mTemp_buffer = NULL;
509 }
510
511 mSensorWorkFlag = false;
512
513 ALOGD("%s: Exit", __FUNCTION__);
514 return res;
515}
516
517void Sensor::sendExitSingalToSensor() {
518 {
519 Mutex::Autolock lock(mReadoutMutex);
520 mExitSensorThread = true;
521 mReadoutComplete.signal();
522 }
523
524 {
525 Mutex::Autolock lock(mControlMutex);
526 mVSync.signal();
527 }
528
529 {
530 Mutex::Autolock lock(mReadoutMutex);
531 mReadoutAvailable.signal();
532 }
533}
534
535Scene &Sensor::getScene() {
536 return mScene;
537}
538
539int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
540{
541 int ret = 0;
542 struct v4l2_queryctrl qc;
543
544 memset(&qc, 0, sizeof(qc));
545 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
546 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
547
548 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
549 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
550 ret = -1;
551 *zoomMin = 0;
552 *zoomMax = 0;
553 *zoomStep = 1;
554 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
555 } else {
556 *zoomMin = qc.minimum;
557 *zoomMax = qc.maximum;
558 *zoomStep = qc.step;
559 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
560 }
561
562 return ret ;
563}
564
565int Sensor::setZoom(int zoomValue)
566{
567 int ret = 0;
568 struct v4l2_control ctl;
569
570 memset( &ctl, 0, sizeof(ctl));
571 ctl.value = zoomValue;
572 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
573 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
574 if (ret < 0) {
575 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
576 }
577 return ret ;
578}
579
580status_t Sensor::setEffect(uint8_t effect)
581{
582 int ret = 0;
583 struct v4l2_control ctl;
584 ctl.id = V4L2_CID_COLORFX;
585
586 switch (effect) {
587 case ANDROID_CONTROL_EFFECT_MODE_OFF:
588 ctl.value= CAM_EFFECT_ENC_NORMAL;
589 break;
590 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
591 ctl.value= CAM_EFFECT_ENC_COLORINV;
592 break;
593 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
594 ctl.value= CAM_EFFECT_ENC_SEPIA;
595 break;
596 default:
597 ALOGE("%s: Doesn't support effect mode %d",
598 __FUNCTION__, effect);
599 return BAD_VALUE;
600 }
601
602 DBG_LOGB("set effect mode:%d", effect);
603 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
604 if (ret < 0) {
605 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
606 }
607 return ret ;
608}
609
610#define MAX_LEVEL_FOR_EXPOSURE 16
611#define MIN_LEVEL_FOR_EXPOSURE 3
612
613int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
614{
615 struct v4l2_queryctrl qc;
616 int ret=0;
617 int level = 0;
618 int middle = 0;
619
620 memset( &qc, 0, sizeof(qc));
621
622 DBG_LOGA("getExposure\n");
623 qc.id = V4L2_CID_EXPOSURE;
624 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
625 if(ret < 0) {
626 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
627 *minExp = -4;
628 *maxExp = 4;
629 *def = 0;
630 step->numerator = 1;
631 step->denominator = 1;
632 return ret;
633 }
634
635 if(0 < qc.step)
636 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
637
638 if((level > MAX_LEVEL_FOR_EXPOSURE)
639 || (level < MIN_LEVEL_FOR_EXPOSURE)){
640 *minExp = -4;
641 *maxExp = 4;
642 *def = 0;
643 step->numerator = 1;
644 step->denominator = 1;
645 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
646 *minExp, *maxExp, *def);
647 return true;
648 }
649
650 middle = (qc.minimum+qc.maximum)/2;
651 *minExp = qc.minimum - middle;
652 *maxExp = qc.maximum - middle;
653 *def = qc.default_value - middle;
654 step->numerator = 1;
655 step->denominator = 2;//qc.step;
656 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
657 return ret;
658}
659
660status_t Sensor::setExposure(int expCmp)
661{
662 int ret = 0;
663 struct v4l2_control ctl;
664 struct v4l2_queryctrl qc;
665
666 if(mEV == expCmp){
667 return 0;
668 }else{
669 mEV = expCmp;
670 }
671 memset(&ctl, 0, sizeof(ctl));
672 memset(&qc, 0, sizeof(qc));
673
674 qc.id = V4L2_CID_EXPOSURE;
675
676 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
677 if (ret < 0) {
678 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
679 }
680
681 ctl.id = V4L2_CID_EXPOSURE;
682 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
683
684 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
685 if (ret < 0) {
686 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
687 }
688 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
689 return ret ;
690}
691
692int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
693{
694 struct v4l2_queryctrl qc;
695 struct v4l2_querymenu qm;
696 int ret;
697 int mode_count = -1;
698
699 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
700 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
701 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
702 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
703 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
704 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
705 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
706 } else {
707 memset(&qm, 0, sizeof(qm));
708
709 int index = 0;
710 mode_count = 1;
711 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
712
713 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
714 if (mode_count >= maxCont)
715 break;
716
717 memset(&qm, 0, sizeof(struct v4l2_querymenu));
718 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
719 qm.index = index;
720 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
721 continue;
722 } else {
723 if (strcmp((char*)qm.name,"50hz") == 0) {
724 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
725 mode_count++;
726 } else if (strcmp((char*)qm.name,"60hz") == 0) {
727 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
728 mode_count++;
729 } else if (strcmp((char*)qm.name,"auto") == 0) {
730 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
731 mode_count++;
732 }
733
734 }
735 }
736 }
737
738 return mode_count;
739}
740
741status_t Sensor::setAntiBanding(uint8_t antiBanding)
742{
743 int ret = 0;
744 struct v4l2_control ctl;
745 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
746
747 switch (antiBanding) {
748 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
749 ctl.value= CAM_ANTIBANDING_OFF;
750 break;
751 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
752 ctl.value= CAM_ANTIBANDING_50HZ;
753 break;
754 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
755 ctl.value= CAM_ANTIBANDING_60HZ;
756 break;
757 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
758 ctl.value= CAM_ANTIBANDING_AUTO;
759 break;
760 default:
761 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
762 __FUNCTION__, antiBanding);
763 return BAD_VALUE;
764 }
765
766 DBG_LOGB("anti banding mode:%d", antiBanding);
767 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
768 if ( ret < 0) {
769 CAMHAL_LOGDA("failed to set anti banding mode!\n");
770 return BAD_VALUE;
771 }
772 return ret;
773}
774
775status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
776{
777 int ret = 0;
778 struct v4l2_control ctl;
779 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
780 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
781 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
782
783 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
784 return ret;
785}
786
787
788int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
789{
790 struct v4l2_queryctrl qc;
791 struct v4l2_querymenu qm;
792 int ret;
793 int mode_count = -1;
794
795 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
796 qc.id = V4L2_CID_FOCUS_AUTO;
797 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
798 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
799 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
800 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
801 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
802 }else{
803 memset(&qm, 0, sizeof(qm));
804
805 int index = 0;
806 mode_count = 1;
807 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
808
809 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
810 if (mode_count >= maxCount)
811 break;
812
813 memset(&qm, 0, sizeof(struct v4l2_querymenu));
814 qm.id = V4L2_CID_FOCUS_AUTO;
815 qm.index = index;
816 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
817 continue;
818 } else {
819 if (strcmp((char*)qm.name,"auto") == 0) {
820 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
821 mode_count++;
822 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
823 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
824 mode_count++;
825 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
826 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
827 mode_count++;
828 }
829
830 }
831 }
832 }
833
834 return mode_count;
835}
836
837status_t Sensor::setAutoFocuas(uint8_t afMode)
838{
839 struct v4l2_control ctl;
840 ctl.id = V4L2_CID_FOCUS_AUTO;
841
842 switch (afMode) {
843 case ANDROID_CONTROL_AF_MODE_AUTO:
844 ctl.value = CAM_FOCUS_MODE_AUTO;
845 break;
846 case ANDROID_CONTROL_AF_MODE_MACRO:
847 ctl.value = CAM_FOCUS_MODE_MACRO;
848 break;
849 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
850 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
851 break;
852 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
853 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
854 break;
855 default:
856 ALOGE("%s: Emulator doesn't support AF mode %d",
857 __FUNCTION__, afMode);
858 return BAD_VALUE;
859 }
860
861 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
862 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
863 return BAD_VALUE;
864 }
865
866 return OK;
867}
868
869int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
870{
871 struct v4l2_queryctrl qc;
872 struct v4l2_querymenu qm;
873 int ret;
874 int mode_count = -1;
875
876 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
877 qc.id = V4L2_CID_DO_WHITE_BALANCE;
878 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
879 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
880 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
881 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
882 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
883 }else{
884 memset(&qm, 0, sizeof(qm));
885
886 int index = 0;
887 mode_count = 1;
888 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
889
890 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
891 if (mode_count >= maxCount)
892 break;
893
894 memset(&qm, 0, sizeof(struct v4l2_querymenu));
895 qm.id = V4L2_CID_DO_WHITE_BALANCE;
896 qm.index = index;
897 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
898 continue;
899 } else {
900 if (strcmp((char*)qm.name,"auto") == 0) {
901 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
902 mode_count++;
903 } else if (strcmp((char*)qm.name,"daylight") == 0) {
904 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
905 mode_count++;
906 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
907 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
908 mode_count++;
909 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
910 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
911 mode_count++;
912 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
913 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
914 mode_count++;
915 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
916 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
917 mode_count++;
918 } else if (strcmp((char*)qm.name,"twilight") == 0) {
919 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
920 mode_count++;
921 } else if (strcmp((char*)qm.name,"shade") == 0) {
922 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
923 mode_count++;
924 }
925
926 }
927 }
928 }
929
930 return mode_count;
931}
932
933status_t Sensor::setAWB(uint8_t awbMode)
934{
935 int ret = 0;
936 struct v4l2_control ctl;
937 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
938
939 switch (awbMode) {
940 case ANDROID_CONTROL_AWB_MODE_AUTO:
941 ctl.value = CAM_WB_AUTO;
942 break;
943 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
944 ctl.value = CAM_WB_INCANDESCENCE;
945 break;
946 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
947 ctl.value = CAM_WB_FLUORESCENT;
948 break;
949 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
950 ctl.value = CAM_WB_DAYLIGHT;
951 break;
952 case ANDROID_CONTROL_AWB_MODE_SHADE:
953 ctl.value = CAM_WB_SHADE;
954 break;
955 default:
956 ALOGE("%s: Emulator doesn't support AWB mode %d",
957 __FUNCTION__, awbMode);
958 return BAD_VALUE;
959 }
960 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
961 return ret;
962}
963
964void Sensor::setExposureTime(uint64_t ns) {
965 Mutex::Autolock lock(mControlMutex);
966 ALOGVV("Exposure set to %f", ns/1000000.f);
967 mExposureTime = ns;
968}
969
970void Sensor::setFrameDuration(uint64_t ns) {
971 Mutex::Autolock lock(mControlMutex);
972 ALOGVV("Frame duration set to %f", ns/1000000.f);
973 mFrameDuration = ns;
974}
975
976void Sensor::setSensitivity(uint32_t gain) {
977 Mutex::Autolock lock(mControlMutex);
978 ALOGVV("Gain set to %d", gain);
979 mGainFactor = gain;
980}
981
982void Sensor::setDestinationBuffers(Buffers *buffers) {
983 Mutex::Autolock lock(mControlMutex);
984 mNextBuffers = buffers;
985}
986
987void Sensor::setFrameNumber(uint32_t frameNumber) {
988 Mutex::Autolock lock(mControlMutex);
989 mFrameNumber = frameNumber;
990}
991
992void Sensor::setFlushFlag(bool flushFlag) {
993 mFlushFlag = flushFlag;
994}
995
996status_t Sensor::waitForVSync(nsecs_t reltime) {
997 int res;
998 Mutex::Autolock lock(mControlMutex);
999 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
1000 if (mExitSensorThread) {
1001 return -1;
1002 }
1003
1004 mGotVSync = false;
1005 res = mVSync.waitRelative(mControlMutex, reltime);
1006 if (res != OK && res != TIMED_OUT) {
1007 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
1008 return false;
1009 }
1010 CAMHAL_LOGVB("%s , X mControlMutex , mGotVSync = %d " , __FUNCTION__ , mGotVSync);
1011 return mGotVSync;
1012}
1013
1014status_t Sensor::waitForNewFrame(nsecs_t reltime,
1015 nsecs_t *captureTime) {
1016 Mutex::Autolock lock(mReadoutMutex);
1017 if (mExitSensorThread) {
1018 return -1;
1019 }
1020
1021 if (mCapturedBuffers == NULL) {
1022 int res;
1023 CAMHAL_LOGVB("%s , E mReadoutMutex , reltime = %d" , __FUNCTION__, reltime);
1024 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
1025 if (res == TIMED_OUT) {
1026 return false;
1027 } else if (res != OK || mCapturedBuffers == NULL) {
1028 if (mFlushFlag) {
1029 ALOGE("%s , return immediately , mWait = %d", __FUNCTION__, mWait);
1030 if (mWait) {
1031 mWait = false;
1032 *captureTime = mCaptureTime;
1033 mCapturedBuffers = NULL;
1034 mReadoutComplete.signal();
1035 } else {
1036 *captureTime = mCaptureTime;
1037 mCapturedBuffers = NULL;
1038 }
1039 return -2;
1040 } else {
1041 ALOGE("Error waiting for sensor readout signal: %d", res);
1042 return false;
1043 }
1044 }
1045 }
1046 if (mWait) {
1047 mWait = false;
1048 *captureTime = mCaptureTime;
1049 mCapturedBuffers = NULL;
1050 mReadoutComplete.signal();
1051 } else {
1052 *captureTime = mCaptureTime;
1053 mCapturedBuffers = NULL;
1054 }
1055 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1056 return true;
1057}
1058
1059Sensor::SensorListener::~SensorListener() {
1060}
1061
1062void Sensor::setSensorListener(SensorListener *listener) {
1063 Mutex::Autolock lock(mControlMutex);
1064 mListener = listener;
1065}
1066
1067status_t Sensor::readyToRun() {
1068 int res;
1069 ALOGV("Starting up sensor thread");
1070 mStartupTime = systemTime();
1071 mNextCaptureTime = 0;
1072 mNextCapturedBuffers = NULL;
1073
1074 DBG_LOGA("");
1075
1076 return OK;
1077}
1078
1079bool Sensor::threadLoop() {
1080 /**
1081 * Sensor capture operation main loop.
1082 *
1083 * Stages are out-of-order relative to a single frame's processing, but
1084 * in-order in time.
1085 */
1086
1087 if (mExitSensorThread) {
1088 return false;
1089 }
1090
1091 /**
1092 * Stage 1: Read in latest control parameters
1093 */
1094 uint64_t exposureDuration;
1095 uint64_t frameDuration;
1096 uint32_t gain;
1097 Buffers *nextBuffers;
1098 uint32_t frameNumber;
1099 SensorListener *listener = NULL;
1100 {
1101 Mutex::Autolock lock(mControlMutex);
1102 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
1103 exposureDuration = mExposureTime;
1104 frameDuration = mFrameDuration;
1105 gain = mGainFactor;
1106 nextBuffers = mNextBuffers;
1107 frameNumber = mFrameNumber;
1108 listener = mListener;
1109 // Don't reuse a buffer set
1110 mNextBuffers = NULL;
1111
1112 // Signal VSync for start of readout
1113 ALOGVV("Sensor VSync");
1114 mGotVSync = true;
1115 mVSync.signal();
1116 }
1117
1118 /**
1119 * Stage 3: Read out latest captured image
1120 */
1121
1122 Buffers *capturedBuffers = NULL;
1123 nsecs_t captureTime = 0;
1124
1125 nsecs_t startRealTime = systemTime();
1126 // Stagefright cares about system time for timestamps, so base simulated
1127 // time on that.
1128 nsecs_t simulatedTime = startRealTime;
1129 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1130 nsecs_t frameReadoutEndRealTime = startRealTime +
1131 kRowReadoutTime * kResolution[1];
1132
1133 if (mNextCapturedBuffers != NULL) {
1134 ALOGVV("Sensor starting readout");
1135 // Pretend we're doing readout now; will signal once enough time has elapsed
1136 capturedBuffers = mNextCapturedBuffers;
1137 captureTime = mNextCaptureTime;
1138 }
1139 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1140
1141 // TODO: Move this signal to another thread to simulate readout
1142 // time properly
1143 if (capturedBuffers != NULL) {
1144 ALOGVV("Sensor readout complete");
1145 Mutex::Autolock lock(mReadoutMutex);
1146 CAMHAL_LOGVB("%s , E mReadoutMutex" , __FUNCTION__);
1147 if (mCapturedBuffers != NULL) {
1148 ALOGE("Waiting for readout thread to catch up!");
1149 mWait = true;
1150 mReadoutComplete.wait(mReadoutMutex);
1151 }
1152
1153 mCapturedBuffers = capturedBuffers;
1154 mCaptureTime = captureTime;
1155 mReadoutAvailable.signal();
1156 capturedBuffers = NULL;
1157 }
1158 CAMHAL_LOGVB("%s , X mReadoutMutex" , __FUNCTION__);
1159
1160 if (mExitSensorThread) {
1161 return false;
1162 }
1163 /**
1164 * Stage 2: Capture new image
1165 */
1166 mNextCaptureTime = simulatedTime;
1167 mNextCapturedBuffers = nextBuffers;
1168
1169 if (mNextCapturedBuffers != NULL) {
1170 if (listener != NULL) {
1171#if 0
1172 if (get_device_status(vinfo)) {
1173 listener->onSensorEvent(frameNumber, SensorListener::ERROR_CAMERA_DEVICE, mNextCaptureTime);
1174 }
1175#endif
1176 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1177 mNextCaptureTime);
1178 }
1179
1180 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1181 (float)exposureDuration/1e6, gain);
1182 mScene.setExposureDuration((float)exposureDuration/1e9);
1183 mScene.calculateScene(mNextCaptureTime);
1184
1185 if ( mSensorType == SENSOR_SHARE_FD) {
1186 captureNewImageWithGe2d();
1187 } else {
1188 captureNewImage();
1189 }
1190 mFramecount ++;
1191 }
1192
1193 if (mExitSensorThread) {
1194 return false;
1195 }
1196
1197 if (mFramecount == 100) {
1198 gettimeofday(&mTimeEnd, NULL);
1199 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1200 mCurFps = mFramecount/(interval/1000000.0f);
1201 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1202 mFramecount = 0;
1203 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1204 }
1205 ALOGVV("Sensor vertical blanking interval");
1206 nsecs_t workDoneRealTime = systemTime();
1207 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1208 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1209 timespec t;
1210 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1211 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1212
1213 int ret;
1214 do {
1215 ret = nanosleep(&t, &t);
1216 } while (ret != 0);
1217 }
1218 nsecs_t endRealTime = systemTime();
1219 ALOGVV("Frame cycle took %d ms, target %d ms",
1220 (int)((endRealTime - startRealTime)/1000000),
1221 (int)(frameDuration / 1000000));
1222 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1223 return true;
1224};
1225
1226int Sensor::captureNewImageWithGe2d() {
1227
1228 uint32_t gain = mGainFactor;
1229 mKernelPhysAddr = 0;
1230
1231
1232 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1233 usleep(5000);
1234 }
1235
1236 // Might be adding more buffers, so size isn't constant
1237 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1238 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1239 fillStream(vinfo, mKernelPhysAddr, b);
1240 }
1241 putback_frame(vinfo);
1242 mKernelPhysAddr = 0;
1243
1244 return 0;
1245
1246}
1247
1248int Sensor::captureNewImage() {
1249 bool isjpeg = false;
1250 uint32_t gain = mGainFactor;
1251 mKernelBuffer = NULL;
1252
1253 // Might be adding more buffers, so size isn't constant
1254 ALOGVV("size=%d\n", mNextCapturedBuffers->size());
1255 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1256 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1257 ALOGVV("Sensor capturing buffer %d: stream %d,"
1258 " %d x %d, format %x, stride %d, buf %p, img %p",
1259 i, b.streamId, b.width, b.height, b.format, b.stride,
1260 b.buffer, b.img);
1261 switch (b.format) {
1262#if PLATFORM_SDK_VERSION <= 22
1263 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1264 captureRaw(b.img, gain, b.stride);
1265 break;
1266#endif
1267 case HAL_PIXEL_FORMAT_RGB_888:
1268 captureRGB(b.img, gain, b.stride);
1269 break;
1270 case HAL_PIXEL_FORMAT_RGBA_8888:
1271 captureRGBA(b.img, gain, b.stride);
1272 break;
1273 case HAL_PIXEL_FORMAT_BLOB:
1274 // Add auxillary buffer of the right size
1275 // Assumes only one BLOB (JPEG) buffer in
1276 // mNextCapturedBuffers
1277 StreamBuffer bAux;
1278 int orientation;
1279 orientation = getPictureRotate();
1280 ALOGD("bAux orientation=%d",orientation);
1281 uint32_t pixelfmt;
1282 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1283 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1284
1285 pixelfmt = getOutputFormat();
1286 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1287 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1288 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1289 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1290 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1291 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1292 } else {
1293 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1294 }
1295 } else {
1296 isjpeg = true;
1297 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1298 }
1299
1300 if (!msupportrotate) {
1301 bAux.streamId = 0;
1302 bAux.width = b.width;
1303 bAux.height = b.height;
1304 bAux.format = pixelfmt;
1305 bAux.stride = b.width;
1306 bAux.buffer = NULL;
1307 } else {
1308 if ((orientation == 90) || (orientation == 270)) {
1309 bAux.streamId = 0;
1310 bAux.width = b.height;
1311 bAux.height = b.width;
1312 bAux.format = pixelfmt;
1313 bAux.stride = b.height;
1314 bAux.buffer = NULL;
1315 } else {
1316 bAux.streamId = 0;
1317 bAux.width = b.width;
1318 bAux.height = b.height;
1319 bAux.format = pixelfmt;
1320 bAux.stride = b.width;
1321 bAux.buffer = NULL;
1322 }
1323 }
1324 // TODO: Reuse these
1325 bAux.img = new uint8_t[b.width * b.height * 3];
1326 mNextCapturedBuffers->push_back(bAux);
1327 break;
1328 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1329 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1330 captureNV21(b, gain);
1331 break;
1332 case HAL_PIXEL_FORMAT_YV12:
1333 captureYV12(b, gain);
1334 break;
1335 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1336 captureYUYV(b.img, gain, b.stride);
1337 break;
1338 default:
1339 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1340 b.format);
1341 break;
1342 }
1343 }
1344 if ((!isjpeg)&&(mKernelBuffer)) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1345 // whose buffer putback separately.
1346 putback_frame(vinfo);
1347 }
1348 mKernelBuffer = NULL;
1349
1350 return 0;
1351}
1352
1353int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1354 int res;
1355 int i, j, k, START;
1356 int count = 0;
1357 int pixelfmt;
1358 struct v4l2_frmsizeenum frmsize;
1359 char property[PROPERTY_VALUE_MAX];
1360 unsigned int support_w,support_h;
1361
1362 support_w = 10000;
1363 support_h = 10000;
1364 memset(property, 0, sizeof(property));
1365 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1366 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1367 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1368 support_w = 10000;
1369 support_h = 10000;
1370 }
1371 }
1372
1373 memset(&frmsize,0,sizeof(frmsize));
1374 frmsize.pixel_format = getOutputFormat();
1375
1376 START = 0;
1377 for (i = 0; ; i++) {
1378 frmsize.index = i;
1379 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1380 if (res < 0){
1381 DBG_LOGB("index=%d, break\n", i);
1382 break;
1383 }
1384
1385 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1386
1387 if (0 != (frmsize.discrete.width%16))
1388 continue;
1389
1390 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1391 continue;
1392
1393 if (count >= size)
1394 break;
1395
1396 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1397 picSizes[count+1] = frmsize.discrete.width;
1398 picSizes[count+2] = frmsize.discrete.height;
1399 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1400
1401 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1402 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1403 if (0 == i) {
1404 count += 4;
1405 continue;
1406 }
1407
1408 for (k = count; k > START; k -= 4) {
1409 if (frmsize.discrete.width * frmsize.discrete.height >
1410 picSizes[k - 3] * picSizes[k - 2]) {
1411 picSizes[k + 1] = picSizes[k - 3];
1412 picSizes[k + 2] = picSizes[k - 2];
1413
1414 } else {
1415 break;
1416 }
1417 }
1418 picSizes[k + 1] = frmsize.discrete.width;
1419 picSizes[k + 2] = frmsize.discrete.height;
1420
1421 count += 4;
1422 }
1423 }
1424
1425 START = count;
1426 for (i = 0; ; i++) {
1427 frmsize.index = i;
1428 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1429 if (res < 0){
1430 DBG_LOGB("index=%d, break\n", i);
1431 break;
1432 }
1433
1434 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1435
1436 if (0 != (frmsize.discrete.width%16))
1437 continue;
1438
1439 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1440 continue;
1441
1442 if (count >= size)
1443 break;
1444
1445 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1446 picSizes[count+1] = frmsize.discrete.width;
1447 picSizes[count+2] = frmsize.discrete.height;
1448 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1449
1450 DBG_LOGB("get output width=%d, height=%d, format =\
1451 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1452 frmsize.discrete.height);
1453 if (0 == i) {
1454 count += 4;
1455 continue;
1456 }
1457
1458 for (k = count; k > START; k -= 4) {
1459 if (frmsize.discrete.width * frmsize.discrete.height >
1460 picSizes[k - 3] * picSizes[k - 2]) {
1461 picSizes[k + 1] = picSizes[k - 3];
1462 picSizes[k + 2] = picSizes[k - 2];
1463
1464 } else {
1465 break;
1466 }
1467 }
1468 picSizes[k + 1] = frmsize.discrete.width;
1469 picSizes[k + 2] = frmsize.discrete.height;
1470
1471 count += 4;
1472 }
1473 }
1474
1475#if 0
1476 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1477 START = count;
1478 for (i = 0; ; i++) {
1479 frmsize.index = i;
1480 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1481 if (res < 0){
1482 DBG_LOGB("index=%d, break\n", i);
1483 break;
1484 }
1485
1486 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1487
1488 if (0 != (frmsize.discrete.width%16))
1489 continue;
1490
1491 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1492 continue;
1493
1494 if (count >= size)
1495 break;
1496
1497 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1498 picSizes[count+1] = frmsize.discrete.width;
1499 picSizes[count+2] = frmsize.discrete.height;
1500 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1501
1502 DBG_LOGB("get output width=%d, height=%d, format =\
1503 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1504 frmsize.discrete.height);
1505 if (0 == i) {
1506 count += 4;
1507 continue;
1508 }
1509
1510 for (k = count; k > START; k -= 4) {
1511 if (frmsize.discrete.width * frmsize.discrete.height >
1512 picSizes[k - 3] * picSizes[k - 2]) {
1513 picSizes[k + 1] = picSizes[k - 3];
1514 picSizes[k + 2] = picSizes[k - 2];
1515
1516 } else {
1517 break;
1518 }
1519 }
1520 picSizes[k + 1] = frmsize.discrete.width;
1521 picSizes[k + 2] = frmsize.discrete.height;
1522
1523 count += 4;
1524 }
1525 }
1526 }
1527#endif
1528
1529 uint32_t jpgSrcfmt[] = {
1530 V4L2_PIX_FMT_RGB24,
1531 V4L2_PIX_FMT_MJPEG,
1532 V4L2_PIX_FMT_YUYV,
1533 };
1534
1535 START = count;
1536 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1537 memset(&frmsize,0,sizeof(frmsize));
1538 frmsize.pixel_format = jpgSrcfmt[j];
1539
1540 for (i = 0; ; i++) {
1541 frmsize.index = i;
1542 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1543 if (res < 0){
1544 DBG_LOGB("index=%d, break\n", i);
1545 break;
1546 }
1547
1548 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1549
1550 if (0 != (frmsize.discrete.width%16))
1551 continue;
1552
1553 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1554 // continue;
1555
1556 if (count >= size)
1557 break;
1558
1559 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1560 if (!IsUsbAvailablePictureSize(kUsbAvailablePictureSize, frmsize.discrete.width, frmsize.discrete.height))
1561 continue;
1562 }
1563
1564 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1565 picSizes[count+1] = frmsize.discrete.width;
1566 picSizes[count+2] = frmsize.discrete.height;
1567 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1568
1569 if (0 == i) {
1570 count += 4;
1571 continue;
1572 }
1573
1574 //TODO insert in descend order
1575 for (k = count; k > START; k -= 4) {
1576 if (frmsize.discrete.width * frmsize.discrete.height >
1577 picSizes[k - 3] * picSizes[k - 2]) {
1578 picSizes[k + 1] = picSizes[k - 3];
1579 picSizes[k + 2] = picSizes[k - 2];
1580
1581 } else {
1582 break;
1583 }
1584 }
1585
1586 picSizes[k + 1] = frmsize.discrete.width;
1587 picSizes[k + 2] = frmsize.discrete.height;
1588
1589 count += 4;
1590 }
1591 }
1592
1593 if (frmsize.index > 0)
1594 break;
1595 }
1596
1597 if (frmsize.index == 0)
1598 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1599
1600 return count;
1601
1602}
1603
1604int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1605{
1606 int ret=0; int framerate=0; int temp_rate=0;
1607 struct v4l2_frmivalenum fival;
1608 int i,j=0;
1609 int count = 0;
1610 int tmp_size = size;
1611 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1612 int pixelfmt_tbl[] = {
1613 V4L2_PIX_FMT_MJPEG,
1614 V4L2_PIX_FMT_YVU420,
1615 V4L2_PIX_FMT_NV21,
1616 V4L2_PIX_FMT_RGB24,
1617 V4L2_PIX_FMT_YUYV,
1618 //V4L2_PIX_FMT_YVU420
1619 };
1620
1621 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1622 {
1623 /* we got all duration for each resolution for prev format*/
1624 if (count >= tmp_size)
1625 break;
1626
1627 for( ; size > 0; size-=4)
1628 {
1629 memset(&fival, 0, sizeof(fival));
1630
1631 for (fival.index = 0;;fival.index++)
1632 {
1633 fival.pixel_format = pixelfmt_tbl[i];
1634 fival.width = picSizes[size-3];
1635 fival.height = picSizes[size-2];
1636 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1637 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1638 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1639 if(framerate < temp_rate)
1640 framerate = temp_rate;
1641 duration[count+0] = (int64_t)(picSizes[size-4]);
1642 duration[count+1] = (int64_t)(picSizes[size-3]);
1643 duration[count+2] = (int64_t)(picSizes[size-2]);
1644 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1645 j++;
1646 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1647 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1648 if(framerate < temp_rate)
1649 framerate = temp_rate;
1650 duration[count+0] = (int64_t)picSizes[size-4];
1651 duration[count+1] = (int64_t)picSizes[size-3];
1652 duration[count+2] = (int64_t)picSizes[size-2];
1653 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1654 j++;
1655 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1656 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1657 if(framerate < temp_rate)
1658 framerate = temp_rate;
1659 duration[count+0] = (int64_t)picSizes[size-4];
1660 duration[count+1] = (int64_t)picSizes[size-3];
1661 duration[count+2] = (int64_t)picSizes[size-2];
1662 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1663 j++;
1664 }
1665 } else {
1666 if (j > 0) {
1667 if (count >= tmp_size)
1668 break;
1669 duration[count+0] = (int64_t)(picSizes[size-4]);
1670 duration[count+1] = (int64_t)(picSizes[size-3]);
1671 duration[count+2] = (int64_t)(picSizes[size-2]);
1672 if (framerate == 5) {
1673 duration[count+3] = (int64_t)200000000L;
1674 } else if (framerate == 10) {
1675 duration[count+3] = (int64_t)100000000L;
1676 } else if (framerate == 15) {
1677 duration[count+3] = (int64_t)66666666L;
1678 } else if (framerate == 30) {
1679 duration[count+3] = (int64_t)33333333L;
1680 } else {
1681 duration[count+3] = (int64_t)66666666L;
1682 }
1683 count += 4;
1684 break;
1685 } else {
1686 break;
1687 }
1688 }
1689 }
1690 j=0;
1691 }
1692 size = tmp_size;
1693 }
1694
1695 return count;
1696
1697}
1698
1699int64_t Sensor::getMinFrameDuration()
1700{
1701 int64_t tmpDuration = 66666666L; // 1/15 s
1702 int64_t frameDuration = 66666666L; // 1/15 s
1703 struct v4l2_frmivalenum fival;
1704 int i,j;
1705
1706 uint32_t pixelfmt_tbl[]={
1707 V4L2_PIX_FMT_MJPEG,
1708 V4L2_PIX_FMT_YUYV,
1709 V4L2_PIX_FMT_NV21,
1710 };
1711 struct v4l2_frmsize_discrete resolution_tbl[]={
1712 {1920, 1080},
1713 {1280, 960},
1714 {640, 480},
1715 {320, 240},
1716 };
1717
1718 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1719 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1720 memset(&fival, 0, sizeof(fival));
1721 fival.index = 0;
1722 fival.pixel_format = pixelfmt_tbl[i];
1723 fival.width = resolution_tbl[j].width;
1724 fival.height = resolution_tbl[j].height;
1725
1726 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1727 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1728 tmpDuration =
1729 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1730
1731 if (frameDuration > tmpDuration)
1732 frameDuration = tmpDuration;
1733 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1734 frameDuration =
1735 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1736 break;
1737 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1738 frameDuration =
1739 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1740 break;
1741 }
1742 fival.index++;
1743 }
1744 }
1745
1746 if (fival.index > 0) {
1747 break;
1748 }
1749 }
1750
1751 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1752 return frameDuration;
1753}
1754
1755int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1756 int res;
1757 int i;
1758 int count = 0;
1759 struct v4l2_frmsizeenum frmsize;
1760 char property[PROPERTY_VALUE_MAX];
1761 unsigned int support_w,support_h;
1762 int preview_fmt;
1763
1764 support_w = 10000;
1765 support_h = 10000;
1766 memset(property, 0, sizeof(property));
1767 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1768 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1769 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1770 support_w = 10000;
1771 support_h = 10000;
1772 }
1773 }
1774
1775
1776 memset(&frmsize,0,sizeof(frmsize));
1777 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1778
1779 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1780 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1781 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1782 if (preview == true)
1783 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1784 else
1785 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1786 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1787 if (preview == true)
1788 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1789 else
1790 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1791 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1792 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1793
1794 for (i = 0; ; i++) {
1795 frmsize.index = i;
1796 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1797 if (res < 0){
1798 DBG_LOGB("index=%d, break\n", i);
1799 break;
1800 }
1801
1802
1803 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1804
1805 if (0 != (frmsize.discrete.width%16))
1806 continue;
1807
1808 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1809 continue;
1810
1811 if (count >= size)
1812 break;
1813
1814 picSizes[count] = frmsize.discrete.width;
1815 picSizes[count+1] = frmsize.discrete.height;
1816
1817 if (0 == i) {
1818 count += 2;
1819 continue;
1820 }
1821
1822 //TODO insert in descend order
1823 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1824 picSizes[count + 0] = picSizes[count - 2];
1825 picSizes[count + 1] = picSizes[count - 1];
1826
1827 picSizes[count - 2] = frmsize.discrete.width;
1828 picSizes[count - 1] = frmsize.discrete.height;
1829 }
1830
1831 count += 2;
1832 }
1833 }
1834
1835 return count;
1836
1837}
1838
1839bool Sensor::get_sensor_status() {
1840 return mSensorWorkFlag;
1841}
1842
1843void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1844 float totalGain = gain/100.0 * kBaseGainFactor;
1845 float noiseVarGain = totalGain * totalGain;
1846 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1847 + kReadNoiseVarAfterGain;
1848
1849 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1850 mScene.setReadoutPixel(0,0);
1851 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1852 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1853 uint16_t *px = (uint16_t*)img + y * stride;
1854 for (unsigned int x = 0; x < kResolution[0]; x++) {
1855 uint32_t electronCount;
1856 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1857
1858 // TODO: Better pixel saturation curve?
1859 electronCount = (electronCount < kSaturationElectrons) ?
1860 electronCount : kSaturationElectrons;
1861
1862 // TODO: Better A/D saturation curve?
1863 uint16_t rawCount = electronCount * totalGain;
1864 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1865
1866 // Calculate noise value
1867 // TODO: Use more-correct Gaussian instead of uniform noise
1868 float photonNoiseVar = electronCount * noiseVarGain;
1869 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1870 // Scaled to roughly match gaussian/uniform noise stddev
1871 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1872
1873 rawCount += kBlackLevel;
1874 rawCount += noiseStddev * noiseSample;
1875
1876 *px++ = rawCount;
1877 }
1878 // TODO: Handle this better
1879 //simulatedTime += kRowReadoutTime;
1880 }
1881 ALOGVV("Raw sensor image captured");
1882}
1883
1884void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1885 float totalGain = gain/100.0 * kBaseGainFactor;
1886 // In fixed-point math, calculate total scaling from electrons to 8bpp
1887 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1888 uint32_t inc = kResolution[0] / stride;
1889
1890 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1891 uint8_t *px = img + outY * stride * 4;
1892 mScene.setReadoutPixel(0, y);
1893 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1894 uint32_t rCount, gCount, bCount;
1895 // TODO: Perfect demosaicing is a cheat
1896 const uint32_t *pixel = mScene.getPixelElectrons();
1897 rCount = pixel[Scene::R] * scale64x;
1898 gCount = pixel[Scene::Gr] * scale64x;
1899 bCount = pixel[Scene::B] * scale64x;
1900
1901 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1902 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1903 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1904 *px++ = 255;
1905 for (unsigned int j = 1; j < inc; j++)
1906 mScene.getPixelElectrons();
1907 }
1908 // TODO: Handle this better
1909 //simulatedTime += kRowReadoutTime;
1910 }
1911 ALOGVV("RGBA sensor image captured");
1912}
1913
1914void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1915#if 0
1916 float totalGain = gain/100.0 * kBaseGainFactor;
1917 // In fixed-point math, calculate total scaling from electrons to 8bpp
1918 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1919 uint32_t inc = kResolution[0] / stride;
1920
1921 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1922 mScene.setReadoutPixel(0, y);
1923 uint8_t *px = img + outY * stride * 3;
1924 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1925 uint32_t rCount, gCount, bCount;
1926 // TODO: Perfect demosaicing is a cheat
1927 const uint32_t *pixel = mScene.getPixelElectrons();
1928 rCount = pixel[Scene::R] * scale64x;
1929 gCount = pixel[Scene::Gr] * scale64x;
1930 bCount = pixel[Scene::B] * scale64x;
1931
1932 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1933 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1934 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1935 for (unsigned int j = 1; j < inc; j++)
1936 mScene.getPixelElectrons();
1937 }
1938 // TODO: Handle this better
1939 //simulatedTime += kRowReadoutTime;
1940 }
1941#else
1942 uint8_t *src = NULL;
1943 int ret = 0, rotate = 0;
1944 uint32_t width = 0, height = 0;
1945 int dqTryNum = 3;
1946
1947 rotate = getPictureRotate();
1948 width = vinfo->picture.format.fmt.pix.width;
1949 height = vinfo->picture.format.fmt.pix.height;
1950
1951 if (mSensorType == SENSOR_USB) {
1952 releasebuf_and_stop_capturing(vinfo);
1953 } else {
1954 stop_capturing(vinfo);
1955 }
1956
1957 ret = start_picture(vinfo,rotate);
1958 if (ret < 0)
1959 {
1960 ALOGD("start picture failed!");
1961 }
1962 while(1)
1963 {
1964 src = (uint8_t *)get_picture(vinfo);
1965 if ((NULL != src) && (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)) {
1966 while (dqTryNum > 0) {
1967 if (NULL != src) {
1968 putback_picture_frame(vinfo);
1969 }
1970 usleep(10000);
1971 dqTryNum --;
1972 src = (uint8_t *)get_picture(vinfo);
1973 }
1974 }
1975
1976 if (NULL != src) {
1977 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
1978 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1979 if ( tmp_buffer == NULL) {
1980 ALOGE("new buffer failed!\n");
1981 return;
1982 }
1983 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
1984 width, tmp_buffer + width * height, (width + 1) / 2, width,
1985 height, width, height, libyuv::FOURCC_MJPG) != 0) {
1986 DBG_LOGA("Decode MJPEG frame failed\n");
1987 putback_picture_frame(vinfo);
1988 usleep(5000);
1989 } else {
1990 nv21_to_rgb24(tmp_buffer,img,width,height);
1991 if (tmp_buffer != NULL)
1992 delete [] tmp_buffer;
1993 break;
1994 }
1995 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1996 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
1997 yuyv422_to_rgb24(src,img,width,height);
1998 break;
1999 } else {
2000 putback_picture_frame(vinfo);
2001 usleep(5000);
2002 }
2003 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
2004 if (vinfo->picture.buf.length == width * height * 3) {
2005 memcpy(img, src, vinfo->picture.buf.length);
2006 } else {
2007 rgb24_memcpy(img, src, width, height);
2008 }
2009 break;
2010 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2011 memcpy(img, src, vinfo->picture.buf.length);
2012 break;
2013 }
2014 }
2015 }
2016 ALOGD("get picture success !");
2017
2018 if (mSensorType == SENSOR_USB) {
2019 releasebuf_and_stop_picture(vinfo);
2020 } else {
2021 stop_picture(vinfo);
2022 }
2023
2024#endif
2025}
2026
2027void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
2028{
2029 for (int i = 0; i < width * height * 2; i += 2) {
2030 *dst++ = *(src + i);
2031 }
2032
2033 for (int y = 0; y < height - 1; y +=2) {
2034 for (int j = 0; j < width * 2; j += 4) {
2035 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
2036 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
2037 }
2038 src += width * 2 * 2;
2039 }
2040
2041 if (height & 1)
2042 for (int j = 0; j < width * 2; j += 4) {
2043 *dst++ = *(src + 3 + j); //v
2044 *dst++ = *(src + 1 + j); //u
2045 }
2046}
2047
2048void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
2049{
2050 //width should be an even number.
2051 //uv ALIGN 32.
2052 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
2053 unsigned char *dst_copy,*src_copy;
2054
2055 dst_copy = dst;
2056 src_copy = src;
2057
2058 y_size = width*height;
2059 c_stride = ALIGN(width/2, 16);
2060 c_size = c_stride * height/2;
2061 cr_offset = y_size;
2062 cb_offset = y_size+c_size;
2063
2064 for(i=0;i< y_size;i++){
2065 *dst++ = *src;
2066 src += 2;
2067 }
2068
2069 dst = dst_copy;
2070 src = src_copy;
2071
2072 for(i=0;i<height;i+=2){
2073 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
2074 //ceil(u1+u2)/2
2075 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
2076 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
2077 }
2078 dst += c_stride;
2079 src += width*4;
2080 }
2081}
2082
2083status_t Sensor::force_reset_sensor() {
2084 DBG_LOGA("force_reset_sensor");
2085 status_t ret;
2086 mTimeOutCount = 0;
2087 ret = streamOff();
2088 ret = setBuffersFormat(vinfo);
2089 ret = streamOn();
2090 DBG_LOGB("%s , ret = %d", __FUNCTION__, ret);
2091 return ret;
2092}
2093
2094void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
2095#if 0
2096 float totalGain = gain/100.0 * kBaseGainFactor;
2097 // Using fixed-point math with 6 bits of fractional precision.
2098 // In fixed-point math, calculate total scaling from electrons to 8bpp
2099 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2100 // In fixed-point math, saturation point of sensor after gain
2101 const int saturationPoint = 64 * 255;
2102 // Fixed-point coefficients for RGB-YUV transform
2103 // Based on JFIF RGB->YUV transform.
2104 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2105 const int rgbToY[] = {19, 37, 7};
2106 const int rgbToCb[] = {-10,-21, 32, 524288};
2107 const int rgbToCr[] = {32,-26, -5, 524288};
2108 // Scale back to 8bpp non-fixed-point
2109 const int scaleOut = 64;
2110 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2111
2112 uint32_t inc = kResolution[0] / stride;
2113 uint32_t outH = kResolution[1] / inc;
2114 for (unsigned int y = 0, outY = 0;
2115 y < kResolution[1]; y+=inc, outY++) {
2116 uint8_t *pxY = img + outY * stride;
2117 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2118 mScene.setReadoutPixel(0,y);
2119 for (unsigned int outX = 0; outX < stride; outX++) {
2120 int32_t rCount, gCount, bCount;
2121 // TODO: Perfect demosaicing is a cheat
2122 const uint32_t *pixel = mScene.getPixelElectrons();
2123 rCount = pixel[Scene::R] * scale64x;
2124 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2125 gCount = pixel[Scene::Gr] * scale64x;
2126 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2127 bCount = pixel[Scene::B] * scale64x;
2128 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2129
2130 *pxY++ = (rgbToY[0] * rCount +
2131 rgbToY[1] * gCount +
2132 rgbToY[2] * bCount) / scaleOutSq;
2133 if (outY % 2 == 0 && outX % 2 == 0) {
2134 *pxVU++ = (rgbToCr[0] * rCount +
2135 rgbToCr[1] * gCount +
2136 rgbToCr[2] * bCount +
2137 rgbToCr[3]) / scaleOutSq;
2138 *pxVU++ = (rgbToCb[0] * rCount +
2139 rgbToCb[1] * gCount +
2140 rgbToCb[2] * bCount +
2141 rgbToCb[3]) / scaleOutSq;
2142 }
2143 for (unsigned int j = 1; j < inc; j++)
2144 mScene.getPixelElectrons();
2145 }
2146 }
2147#else
2148 uint8_t *src;
2149
2150 if (mKernelBuffer) {
2151 src = mKernelBuffer;
2152 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2153 uint32_t width = vinfo->preview.format.fmt.pix.width;
2154 uint32_t height = vinfo->preview.format.fmt.pix.height;
2155 if ((width == b.width) && (height == b.height)) {
2156 memcpy(b.img, src, b.width * b.height * 3/2);
2157 } else {
2158 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2159 }
2160 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2161 uint32_t width = vinfo->preview.format.fmt.pix.width;
2162 uint32_t height = vinfo->preview.format.fmt.pix.height;
2163
2164 if ((width == b.width) && (height == b.height)) {
2165 memcpy(b.img, src, b.width * b.height * 3/2);
2166 } else {
2167 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2168 }
2169 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2170 uint32_t width = vinfo->preview.format.fmt.pix.width;
2171 uint32_t height = vinfo->preview.format.fmt.pix.height;
2172
2173 if ((width == b.width) && (height == b.height)) {
2174 memcpy(b.img, src, b.width * b.height * 3/2);
2175 } else {
2176 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2177 }
2178 } else {
2179 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2180 }
2181 return ;
2182 }
2183 while(1){
2184 if (mFlushFlag) {
2185 break;
2186 }
2187
2188 if (mExitSensorThread) {
2189 break;
2190 }
2191
2192 src = (uint8_t *)get_frame(vinfo);
2193 if (NULL == src) {
2194 if (get_device_status(vinfo)) {
2195 break;
2196 }
2197 ALOGVV("get frame NULL, sleep 5ms");
2198 usleep(5000);
2199 mTimeOutCount++;
2200 if (mTimeOutCount > 300) {
2201 DBG_LOGA("force sensor reset.\n");
2202 force_reset_sensor();
2203 }
2204 continue;
2205 }
2206 mTimeOutCount = 0;
2207 if (mSensorType == SENSOR_USB) {
2208 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2209 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2210 DBG_LOGB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2211 putback_frame(vinfo);
2212 continue;
2213 }
2214 }
2215 }
2216 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2217 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2218 memcpy(b.img, src, vinfo->preview.buf.length);
2219 } else {
2220 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2221 }
2222 mKernelBuffer = b.img;
2223 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2224 uint32_t width = vinfo->preview.format.fmt.pix.width;
2225 uint32_t height = vinfo->preview.format.fmt.pix.height;
2226 memset(mTemp_buffer, 0 , width * height * 3/2);
2227 YUYVToNV21(src, mTemp_buffer, width, height);
2228 if ((width == b.width) && (height == b.height)) {
2229 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2230 mKernelBuffer = b.img;
2231 } else {
2232 if ((b.height % 2) != 0) {
2233 DBG_LOGB("%d , b.height = %d", __LINE__, b.height);
2234 b.height = b.height - 1;
2235 }
2236 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2237 mKernelBuffer = mTemp_buffer;
2238 }
2239 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2240 uint32_t width = vinfo->preview.format.fmt.pix.width;
2241 uint32_t height = vinfo->preview.format.fmt.pix.height;
2242 memset(mTemp_buffer, 0 , width * height * 3/2);
2243 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, mTemp_buffer,
2244 width, mTemp_buffer + width * height, (width + 1) / 2, width,
2245 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2246 putback_frame(vinfo);
2247 ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__);
2248 continue;
2249 }
2250 if ((width == b.width) && (height == b.height)) {
2251 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2252 mKernelBuffer = b.img;
2253 } else {
2254 if ((b.height % 2) != 0) {
2255 DBG_LOGB("%d, b.height = %d", __LINE__, b.height);
2256 b.height = b.height - 1;
2257 }
2258 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2259 mKernelBuffer = mTemp_buffer;
2260 }
2261 }
2262 mSensorWorkFlag = true;
2263 break;
2264 }
2265#endif
2266
2267 ALOGVV("NV21 sensor image captured");
2268}
2269
2270void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2271#if 0
2272 float totalGain = gain/100.0 * kBaseGainFactor;
2273 // Using fixed-point math with 6 bits of fractional precision.
2274 // In fixed-point math, calculate total scaling from electrons to 8bpp
2275 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2276 // In fixed-point math, saturation point of sensor after gain
2277 const int saturationPoint = 64 * 255;
2278 // Fixed-point coefficients for RGB-YUV transform
2279 // Based on JFIF RGB->YUV transform.
2280 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2281 const int rgbToY[] = {19, 37, 7};
2282 const int rgbToCb[] = {-10,-21, 32, 524288};
2283 const int rgbToCr[] = {32,-26, -5, 524288};
2284 // Scale back to 8bpp non-fixed-point
2285 const int scaleOut = 64;
2286 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2287
2288 uint32_t inc = kResolution[0] / stride;
2289 uint32_t outH = kResolution[1] / inc;
2290 for (unsigned int y = 0, outY = 0;
2291 y < kResolution[1]; y+=inc, outY++) {
2292 uint8_t *pxY = img + outY * stride;
2293 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2294 mScene.setReadoutPixel(0,y);
2295 for (unsigned int outX = 0; outX < stride; outX++) {
2296 int32_t rCount, gCount, bCount;
2297 // TODO: Perfect demosaicing is a cheat
2298 const uint32_t *pixel = mScene.getPixelElectrons();
2299 rCount = pixel[Scene::R] * scale64x;
2300 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2301 gCount = pixel[Scene::Gr] * scale64x;
2302 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2303 bCount = pixel[Scene::B] * scale64x;
2304 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2305
2306 *pxY++ = (rgbToY[0] * rCount +
2307 rgbToY[1] * gCount +
2308 rgbToY[2] * bCount) / scaleOutSq;
2309 if (outY % 2 == 0 && outX % 2 == 0) {
2310 *pxVU++ = (rgbToCr[0] * rCount +
2311 rgbToCr[1] * gCount +
2312 rgbToCr[2] * bCount +
2313 rgbToCr[3]) / scaleOutSq;
2314 *pxVU++ = (rgbToCb[0] * rCount +
2315 rgbToCb[1] * gCount +
2316 rgbToCb[2] * bCount +
2317 rgbToCb[3]) / scaleOutSq;
2318 }
2319 for (unsigned int j = 1; j < inc; j++)
2320 mScene.getPixelElectrons();
2321 }
2322 }
2323#else
2324 uint8_t *src;
2325 if (mKernelBuffer) {
2326 src = mKernelBuffer;
2327 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2328 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2329 ALOGI("Sclale YV12 frame down \n");
2330
2331 int width = vinfo->preview.format.fmt.pix.width;
2332 int height = vinfo->preview.format.fmt.pix.height;
2333 int ret = libyuv::I420Scale(src, width,
2334 src + width * height, width / 2,
2335 src + width * height + width * height / 4, width / 2,
2336 width, height,
2337 b.img, b.width,
2338 b.img + b.width * b.height, b.width / 2,
2339 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2340 b.width, b.height,
2341 libyuv::kFilterNone);
2342 if (ret < 0)
2343 ALOGE("Sclale YV12 frame down failed!\n");
2344 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2345 int width = vinfo->preview.format.fmt.pix.width;
2346 int height = vinfo->preview.format.fmt.pix.height;
2347 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2348
2349 if ( tmp_buffer == NULL) {
2350 ALOGE("new buffer failed!\n");
2351 return;
2352 }
2353
2354 YUYVToYV12(src, tmp_buffer, width, height);
2355
2356 int ret = libyuv::I420Scale(tmp_buffer, width,
2357 tmp_buffer + width * height, width / 2,
2358 tmp_buffer + width * height + width * height / 4, width / 2,
2359 width, height,
2360 b.img, b.width,
2361 b.img + b.width * b.height, b.width / 2,
2362 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2363 b.width, b.height,
2364 libyuv::kFilterNone);
2365 if (ret < 0)
2366 ALOGE("Sclale YV12 frame down failed!\n");
2367 delete [] tmp_buffer;
2368 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2369 int width = vinfo->preview.format.fmt.pix.width;
2370 int height = vinfo->preview.format.fmt.pix.height;
2371 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2372
2373 if ( tmp_buffer == NULL) {
2374 ALOGE("new buffer failed!\n");
2375 return;
2376 }
2377
2378 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2379 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2380 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2381 DBG_LOGA("Decode MJPEG frame failed\n");
2382 }
2383
2384 int ret = libyuv::I420Scale(tmp_buffer, width,
2385 tmp_buffer + width * height, width / 2,
2386 tmp_buffer + width * height + width * height / 4, width / 2,
2387 width, height,
2388 b.img, b.width,
2389 b.img + b.width * b.height, b.width / 2,
2390 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2391 b.width, b.height,
2392 libyuv::kFilterNone);
2393 if (ret < 0)
2394 ALOGE("Sclale YV12 frame down failed!\n");
2395
2396 delete [] tmp_buffer;
2397 } else {
2398 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2399 }
2400 return ;
2401 }
2402 while(1){
2403 if (mFlushFlag) {
2404 break;
2405 }
2406 if (mExitSensorThread) {
2407 break;
2408 }
2409 src = (uint8_t *)get_frame(vinfo);
2410
2411 if (NULL == src) {
2412 if (get_device_status(vinfo)) {
2413 break;
2414 }
2415 ALOGVV("get frame NULL, sleep 5ms");
2416 usleep(5000);
2417 mTimeOutCount++;
2418 if (mTimeOutCount > 300) {
2419 force_reset_sensor();
2420 }
2421 continue;
2422 }
2423 mTimeOutCount = 0;
2424 if (mSensorType == SENSOR_USB) {
2425 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2426 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2427 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2428 putback_frame(vinfo);
2429 continue;
2430 }
2431 }
2432 }
2433 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2434 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2435 memcpy(b.img, src, vinfo->preview.buf.length);
2436 } else {
2437 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2438 }
2439 mKernelBuffer = b.img;
2440 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2441 int width = vinfo->preview.format.fmt.pix.width;
2442 int height = vinfo->preview.format.fmt.pix.height;
2443 YUYVToYV12(src, b.img, width, height);
2444 mKernelBuffer = b.img;
2445 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2446 int width = vinfo->preview.format.fmt.pix.width;
2447 int height = vinfo->preview.format.fmt.pix.height;
2448 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2449 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2450 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2451 putback_frame(vinfo);
2452 DBG_LOGA("Decode MJPEG frame failed\n");
2453 continue;
2454 }
2455 mKernelBuffer = b.img;
2456 } else {
2457 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2458 }
2459 mSensorWorkFlag = true;
2460 break;
2461 }
2462#endif
2463 //mKernelBuffer = src;
2464 ALOGVV("YV12 sensor image captured");
2465}
2466
2467void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2468#if 0
2469 float totalGain = gain/100.0 * kBaseGainFactor;
2470 // Using fixed-point math with 6 bits of fractional precision.
2471 // In fixed-point math, calculate total scaling from electrons to 8bpp
2472 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2473 // In fixed-point math, saturation point of sensor after gain
2474 const int saturationPoint = 64 * 255;
2475 // Fixed-point coefficients for RGB-YUV transform
2476 // Based on JFIF RGB->YUV transform.
2477 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2478 const int rgbToY[] = {19, 37, 7};
2479 const int rgbToCb[] = {-10,-21, 32, 524288};
2480 const int rgbToCr[] = {32,-26, -5, 524288};
2481 // Scale back to 8bpp non-fixed-point
2482 const int scaleOut = 64;
2483 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2484
2485 uint32_t inc = kResolution[0] / stride;
2486 uint32_t outH = kResolution[1] / inc;
2487 for (unsigned int y = 0, outY = 0;
2488 y < kResolution[1]; y+=inc, outY++) {
2489 uint8_t *pxY = img + outY * stride;
2490 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2491 mScene.setReadoutPixel(0,y);
2492 for (unsigned int outX = 0; outX < stride; outX++) {
2493 int32_t rCount, gCount, bCount;
2494 // TODO: Perfect demosaicing is a cheat
2495 const uint32_t *pixel = mScene.getPixelElectrons();
2496 rCount = pixel[Scene::R] * scale64x;
2497 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2498 gCount = pixel[Scene::Gr] * scale64x;
2499 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2500 bCount = pixel[Scene::B] * scale64x;
2501 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2502
2503 *pxY++ = (rgbToY[0] * rCount +
2504 rgbToY[1] * gCount +
2505 rgbToY[2] * bCount) / scaleOutSq;
2506 if (outY % 2 == 0 && outX % 2 == 0) {
2507 *pxVU++ = (rgbToCr[0] * rCount +
2508 rgbToCr[1] * gCount +
2509 rgbToCr[2] * bCount +
2510 rgbToCr[3]) / scaleOutSq;
2511 *pxVU++ = (rgbToCb[0] * rCount +
2512 rgbToCb[1] * gCount +
2513 rgbToCb[2] * bCount +
2514 rgbToCb[3]) / scaleOutSq;
2515 }
2516 for (unsigned int j = 1; j < inc; j++)
2517 mScene.getPixelElectrons();
2518 }
2519 }
2520#else
2521 uint8_t *src;
2522 if (mKernelBuffer) {
2523 src = mKernelBuffer;
2524 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2525 //TODO YUYV scale
2526 //memcpy(img, src, vinfo->preview.buf.length);
2527
2528 } else
2529 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2530
2531 return ;
2532 }
2533
2534 while(1) {
2535 if (mFlushFlag) {
2536 break;
2537 }
2538 if (mExitSensorThread) {
2539 break;
2540 }
2541 src = (uint8_t *)get_frame(vinfo);
2542 if (NULL == src) {
2543 if (get_device_status(vinfo)) {
2544 break;
2545 }
2546 ALOGVV("get frame NULL, sleep 5ms");
2547 usleep(5000);
2548 mTimeOutCount++;
2549 if (mTimeOutCount > 300) {
2550 force_reset_sensor();
2551 }
2552 continue;
2553 }
2554 mTimeOutCount = 0;
2555 if (mSensorType == SENSOR_USB) {
2556 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2557 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2558 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2559 putback_frame(vinfo);
2560 continue;
2561 }
2562 }
2563 }
2564 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2565 memcpy(img, src, vinfo->preview.buf.length);
2566 mKernelBuffer = src;
2567 } else {
2568 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2569 }
2570 mSensorWorkFlag = true;
2571 break;
2572 }
2573#endif
2574 //mKernelBuffer = src;
2575 ALOGVV("YUYV sensor image captured");
2576}
2577
2578void Sensor::dump(int fd) {
2579 String8 result;
2580 result = String8::format("%s, sensor preview information: \n", __FILE__);
2581 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2582 result.appendFormat("camera preview width: %d , height =%d\n",
2583 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2584
2585 result.appendFormat("camera preview format: %.4s\n\n",
2586 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2587
2588 write(fd, result.string(), result.size());
2589}
2590
2591} // namespace android
2592
2593