summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: d94da9501f18511470736da264f9c9d62235fd83
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44
45#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
46
47namespace android {
48
49const unsigned int Sensor::kResolution[2] = {1600, 1200};
50
51const nsecs_t Sensor::kExposureTimeRange[2] =
52 {1000L, 30000000000L} ; // 1 us - 30 sec
53const nsecs_t Sensor::kFrameDurationRange[2] =
54 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
55const nsecs_t Sensor::kMinVerticalBlank = 10000L;
56
57const uint8_t Sensor::kColorFilterArrangement =
58 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
59
60// Output image data characteristics
61const uint32_t Sensor::kMaxRawValue = 4000;
62const uint32_t Sensor::kBlackLevel = 1000;
63
64// Sensor sensitivity
65const float Sensor::kSaturationVoltage = 0.520f;
66const uint32_t Sensor::kSaturationElectrons = 2000;
67const float Sensor::kVoltsPerLuxSecond = 0.100f;
68
69const float Sensor::kElectronsPerLuxSecond =
70 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
71 * Sensor::kVoltsPerLuxSecond;
72
73const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
74 Sensor::kSaturationElectrons;
75
76const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
77const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
78const float Sensor::kReadNoiseVarBeforeGain =
79 Sensor::kReadNoiseStddevBeforeGain *
80 Sensor::kReadNoiseStddevBeforeGain;
81const float Sensor::kReadNoiseVarAfterGain =
82 Sensor::kReadNoiseStddevAfterGain *
83 Sensor::kReadNoiseStddevAfterGain;
84
85// While each row has to read out, reset, and then expose, the (reset +
86// expose) sequence can be overlapped by other row readouts, so the final
87// minimum frame duration is purely a function of row readout time, at least
88// if there's a reasonable number of rows.
89const nsecs_t Sensor::kRowReadoutTime =
90 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
91
92const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
93const uint32_t Sensor::kDefaultSensitivity = 100;
94
95const usb_frmsize_discrete_t kUsbAvailablePictureSize[] = {
96 {4128, 3096},
97 {3264, 2448},
98 {2592, 1944},
99 {2592, 1936},
100 {2560, 1920},
101 {2688, 1520},
102 {2048, 1536},
103 {1600, 1200},
104 {1920, 1088},
105 {1920, 1080},
106 {1440, 1080},
107 {1280, 960},
108 {1280, 720},
109 {1024, 768},
110 {960, 720},
111 {720, 480},
112 {640, 480},
113 {320, 240},
114};
115
116/** A few utility functions for math, normal distributions */
117
118// Take advantage of IEEE floating-point format to calculate an approximate
119// square root. Accurate to within +-3.6%
120float sqrtf_approx(float r) {
121 // Modifier is based on IEEE floating-point representation; the
122 // manipulations boil down to finding approximate log2, dividing by two, and
123 // then inverting the log2. A bias is added to make the relative error
124 // symmetric about the real answer.
125 const int32_t modifier = 0x1FBB4000;
126
127 int32_t r_i = *(int32_t*)(&r);
128 r_i = (r_i >> 1) + modifier;
129
130 return *(float*)(&r_i);
131}
132
133void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
134{
135 int stride = (width + 31) & ( ~31);
136 int w, h;
137 for (h=0; h<height; h++)
138 {
139 memcpy( dst, src, width*3);
140 dst += width*3;
141 src += stride*3;
142 }
143}
144
145static int ALIGN(int x, int y) {
146 // y must be a power of 2.
147 return (x + y - 1) & ~(y - 1);
148}
149
150bool IsUsbAvailablePictureSize(const usb_frmsize_discrete_t AvailablePictureSize[], uint32_t width, uint32_t height)
151{
152 int i;
153 bool ret = false;
154 int count = sizeof(kUsbAvailablePictureSize)/sizeof(kUsbAvailablePictureSize[0]);
155 for (i = 0; i < count; i++) {
156 if ((width == AvailablePictureSize[i].width) && (height == AvailablePictureSize[i].height)) {
157 ret = true;
158 } else {
159 continue;
160 }
161 }
162 return ret;
163}
164
165void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
166{
167 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
168 (mmInt32)vinfo->preview.format.fmt.pix.height,
169 (mmInt32)vinfo->preview.format.fmt.pix.width,
170 IC_FORMAT_YCbCr420_lp,
171 (mmByte *) src,
172 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
173 0};
174
175 structConvImage output = {(mmInt32)width,
176 (mmInt32)height,
177 (mmInt32)width,
178 IC_FORMAT_YCbCr420_lp,
179 (mmByte *) img,
180 (mmByte *) img + width * height,
181 0};
182
183 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
184 ALOGE("Sclale NV21 frame down failed!\n");
185}
186
187Sensor::Sensor():
188 Thread(false),
189 mGotVSync(false),
190 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
191 mFrameDuration(kFrameDurationRange[0]),
192 mGainFactor(kDefaultSensitivity),
193 mNextBuffers(NULL),
194 mFrameNumber(0),
195 mCapturedBuffers(NULL),
196 mListener(NULL),
197 mExitSensorThread(false),
198 mIoctlSupport(0),
199 msupportrotate(0),
200 mTimeOutCount(0),
201 mWait(false),
202 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
203{
204
205}
206
207Sensor::~Sensor() {
208 //shutDown();
209}
210
211status_t Sensor::startUp(int idx) {
212 ALOGV("%s: E", __FUNCTION__);
213 DBG_LOGA("ddd");
214
215 int res;
216 mCapturedBuffers = NULL;
217 res = run("EmulatedFakeCamera3::Sensor",
218 ANDROID_PRIORITY_URGENT_DISPLAY);
219
220 if (res != OK) {
221 ALOGE("Unable to start up sensor capture thread: %d", res);
222 }
223
224 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
225 vinfo->idx = idx;
226
227 res = camera_open(vinfo);
228 if (res < 0) {
229 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
230 }
231
232 mSensorType = SENSOR_MMAP;
233 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
234 mSensorType = SENSOR_USB;
235 }
236
237 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
238 mSensorType = SENSOR_SHARE_FD;
239 }
240
241 if (strstr((const char *)vinfo->cap.card, "front"))
242 mSensorFace = SENSOR_FACE_FRONT;
243 else if (strstr((const char *)vinfo->cap.card, "back"))
244 mSensorFace = SENSOR_FACE_BACK;
245 else
246 mSensorFace = SENSOR_FACE_NONE;
247
248 return res;
249}
250
251sensor_type_e Sensor::getSensorType(void)
252{
253 return mSensorType;
254}
255status_t Sensor::IoctlStateProbe(void) {
256 struct v4l2_queryctrl qc;
257 int ret = 0;
258 mIoctlSupport = 0;
259 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
260 qc.id = V4L2_ROTATE_ID;
261 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
262 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
263 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
264 }else{
265 mIoctlSupport |= IOCTL_MASK_ROTATE;
266 }
267
268 if(mIoctlSupport & IOCTL_MASK_ROTATE){
269 msupportrotate = true;
270 DBG_LOGA("camera support capture rotate");
271 }
272 return mIoctlSupport;
273}
274
275uint32_t Sensor::getStreamUsage(int stream_type)
276{
277 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
278
279 switch (stream_type) {
280 case CAMERA3_STREAM_OUTPUT:
281 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
282 break;
283 case CAMERA3_STREAM_INPUT:
284 usage = GRALLOC_USAGE_HW_CAMERA_READ;
285 break;
286 case CAMERA3_STREAM_BIDIRECTIONAL:
287 usage = GRALLOC_USAGE_HW_CAMERA_READ |
288 GRALLOC_USAGE_HW_CAMERA_WRITE;
289 break;
290 }
291 if ((mSensorType == SENSOR_MMAP)
292 || (mSensorType == SENSOR_USB)) {
293 usage = (GRALLOC_USAGE_HW_TEXTURE
294 | GRALLOC_USAGE_HW_RENDER
295 | GRALLOC_USAGE_SW_READ_MASK
296 | GRALLOC_USAGE_SW_WRITE_MASK
297 );
298 }
299
300 return usage;
301}
302
303status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
304{
305 int res;
306
307 mFramecount = 0;
308 mCurFps = 0;
309 gettimeofday(&mTimeStart, NULL);
310
311 if (isjpeg) {
312 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
313 vinfo->picture.format.fmt.pix.width = width;
314 vinfo->picture.format.fmt.pix.height = height;
315 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
316 } else {
317 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
318 vinfo->preview.format.fmt.pix.width = width;
319 vinfo->preview.format.fmt.pix.height = height;
320 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
321
322 res = setBuffersFormat(vinfo);
323 if (res < 0) {
324 ALOGE("set buffer failed\n");
325 return res;
326 }
327 }
328
329 return OK;
330
331}
332
333status_t Sensor::streamOn() {
334
335 return start_capturing(vinfo);
336}
337
338bool Sensor::isStreaming() {
339
340 return vinfo->isStreaming;
341}
342
343bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
344{
345 if ((vinfo->preview.format.fmt.pix.width != width)
346 ||(vinfo->preview.format.fmt.pix.height != height)
347 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
348 ) {
349
350 return true;
351
352 }
353
354 return false;
355}
356status_t Sensor::streamOff() {
357 if (mSensorType == SENSOR_USB) {
358 return releasebuf_and_stop_capturing(vinfo);
359 } else {
360 return stop_capturing(vinfo);
361 }
362}
363
364int Sensor::getOutputFormat()
365{
366 struct v4l2_fmtdesc fmt;
367 int ret;
368 memset(&fmt,0,sizeof(fmt));
369 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
370
371 fmt.index = 0;
372 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
373 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
374 return V4L2_PIX_FMT_MJPEG;
375 fmt.index++;
376 }
377
378 fmt.index = 0;
379 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
380 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
381 return V4L2_PIX_FMT_NV21;
382 fmt.index++;
383 }
384
385 fmt.index = 0;
386 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
387 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
388 return V4L2_PIX_FMT_YUYV;
389 fmt.index++;
390 }
391
392 ALOGE("Unable to find a supported sensor format!");
393 return BAD_VALUE;
394}
395
396/* if sensor supports MJPEG, return it first, otherwise
397 * trasform HAL format to v4l2 format then check whether
398 * it is supported.
399 */
400int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
401{
402 struct v4l2_fmtdesc fmt;
403 int ret;
404 memset(&fmt,0,sizeof(fmt));
405 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
406
407 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
408 pixelfmt = V4L2_PIX_FMT_YVU420;
409 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
410 pixelfmt = V4L2_PIX_FMT_NV21;
411 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
412 pixelfmt = V4L2_PIX_FMT_YUYV;
413 } else {
414 pixelfmt = V4L2_PIX_FMT_NV21;
415 }
416
417 fmt.index = 0;
418 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
419 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
420 return V4L2_PIX_FMT_MJPEG;
421 fmt.index++;
422 }
423
424 fmt.index = 0;
425 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
426 if (fmt.pixelformat == pixelfmt)
427 return pixelfmt;
428 fmt.index++;
429 }
430
431 fmt.index = 0;
432 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0) {
433 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
434 return V4L2_PIX_FMT_YUYV;
435 fmt.index++;
436 }
437 ALOGE("%s, Unable to find a supported sensor format!", __FUNCTION__);
438 return BAD_VALUE;
439}
440
441void Sensor::setPictureRotate(int rotate)
442{
443 mRotateValue = rotate;
444}
445int Sensor::getPictureRotate()
446{
447 return mRotateValue;
448}
449status_t Sensor::shutDown() {
450 ALOGV("%s: E", __FUNCTION__);
451
452 int res;
453
454 mTimeOutCount = 0;
455
456 res = requestExitAndWait();
457 if (res != OK) {
458 ALOGE("Unable to shut down sensor capture thread: %d", res);
459 }
460
461 if (vinfo != NULL) {
462 if (mSensorType == SENSOR_USB) {
463 releasebuf_and_stop_capturing(vinfo);
464 } else {
465 stop_capturing(vinfo);
466 }
467 }
468
469 camera_close(vinfo);
470
471 if (vinfo){
472 free(vinfo);
473 vinfo = NULL;
474 }
475 ALOGD("%s: Exit", __FUNCTION__);
476 return res;
477}
478
479void Sensor::sendExitSingalToSensor() {
480 {
481 Mutex::Autolock lock(mReadoutMutex);
482 mExitSensorThread = true;
483 mReadoutComplete.signal();
484 }
485
486 {
487 Mutex::Autolock lock(mControlMutex);
488 mVSync.signal();
489 }
490
491 {
492 Mutex::Autolock lock(mReadoutMutex);
493 mReadoutAvailable.signal();
494 }
495}
496
497Scene &Sensor::getScene() {
498 return mScene;
499}
500
501int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
502{
503 int ret = 0;
504 struct v4l2_queryctrl qc;
505
506 memset(&qc, 0, sizeof(qc));
507 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
508 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
509
510 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
511 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
512 ret = -1;
513 *zoomMin = 0;
514 *zoomMax = 0;
515 *zoomStep = 1;
516 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
517 } else {
518 *zoomMin = qc.minimum;
519 *zoomMax = qc.maximum;
520 *zoomStep = qc.step;
521 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
522 }
523
524 return ret ;
525}
526
527int Sensor::setZoom(int zoomValue)
528{
529 int ret = 0;
530 struct v4l2_control ctl;
531
532 memset( &ctl, 0, sizeof(ctl));
533 ctl.value = zoomValue;
534 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
535 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
536 if (ret < 0) {
537 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
538 }
539 return ret ;
540}
541
542status_t Sensor::setEffect(uint8_t effect)
543{
544 int ret = 0;
545 struct v4l2_control ctl;
546 ctl.id = V4L2_CID_COLORFX;
547
548 switch (effect) {
549 case ANDROID_CONTROL_EFFECT_MODE_OFF:
550 ctl.value= CAM_EFFECT_ENC_NORMAL;
551 break;
552 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
553 ctl.value= CAM_EFFECT_ENC_COLORINV;
554 break;
555 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
556 ctl.value= CAM_EFFECT_ENC_SEPIA;
557 break;
558 default:
559 ALOGE("%s: Doesn't support effect mode %d",
560 __FUNCTION__, effect);
561 return BAD_VALUE;
562 }
563
564 DBG_LOGB("set effect mode:%d", effect);
565 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
566 if (ret < 0) {
567 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
568 }
569 return ret ;
570}
571
572#define MAX_LEVEL_FOR_EXPOSURE 16
573#define MIN_LEVEL_FOR_EXPOSURE 3
574
575int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
576{
577 struct v4l2_queryctrl qc;
578 int ret=0;
579 int level = 0;
580 int middle = 0;
581
582 memset( &qc, 0, sizeof(qc));
583
584 DBG_LOGA("getExposure\n");
585 qc.id = V4L2_CID_EXPOSURE;
586 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
587 if(ret < 0) {
588 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
589 *minExp = -4;
590 *maxExp = 4;
591 *def = 0;
592 step->numerator = 1;
593 step->denominator = 1;
594 return ret;
595 }
596
597 if(0 < qc.step)
598 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
599
600 if((level > MAX_LEVEL_FOR_EXPOSURE)
601 || (level < MIN_LEVEL_FOR_EXPOSURE)){
602 *minExp = -4;
603 *maxExp = 4;
604 *def = 0;
605 step->numerator = 1;
606 step->denominator = 1;
607 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
608 *minExp, *maxExp, *def);
609 return true;
610 }
611
612 middle = (qc.minimum+qc.maximum)/2;
613 *minExp = qc.minimum - middle;
614 *maxExp = qc.maximum - middle;
615 *def = qc.default_value - middle;
616 step->numerator = 1;
617 step->denominator = 2;//qc.step;
618 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
619 return ret;
620}
621
622status_t Sensor::setExposure(int expCmp)
623{
624 int ret = 0;
625 struct v4l2_control ctl;
626 struct v4l2_queryctrl qc;
627
628 if(mEV == expCmp){
629 return 0;
630 }else{
631 mEV = expCmp;
632 }
633 memset(&ctl, 0, sizeof(ctl));
634 memset(&qc, 0, sizeof(qc));
635
636 qc.id = V4L2_CID_EXPOSURE;
637
638 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
639 if (ret < 0) {
640 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
641 }
642
643 ctl.id = V4L2_CID_EXPOSURE;
644 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
645
646 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
647 if (ret < 0) {
648 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
649 }
650 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
651 return ret ;
652}
653
654int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
655{
656 struct v4l2_queryctrl qc;
657 struct v4l2_querymenu qm;
658 int ret;
659 int mode_count = -1;
660
661 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
662 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
663 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
664 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
665 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
666 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
667 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
668 } else {
669 memset(&qm, 0, sizeof(qm));
670
671 int index = 0;
672 mode_count = 1;
673 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
674
675 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
676 if (mode_count >= maxCont)
677 break;
678
679 memset(&qm, 0, sizeof(struct v4l2_querymenu));
680 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
681 qm.index = index;
682 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
683 continue;
684 } else {
685 if (strcmp((char*)qm.name,"50hz") == 0) {
686 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
687 mode_count++;
688 } else if (strcmp((char*)qm.name,"60hz") == 0) {
689 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
690 mode_count++;
691 } else if (strcmp((char*)qm.name,"auto") == 0) {
692 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
693 mode_count++;
694 }
695
696 }
697 }
698 }
699
700 return mode_count;
701}
702
703status_t Sensor::setAntiBanding(uint8_t antiBanding)
704{
705 int ret = 0;
706 struct v4l2_control ctl;
707 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
708
709 switch (antiBanding) {
710 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
711 ctl.value= CAM_ANTIBANDING_OFF;
712 break;
713 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
714 ctl.value= CAM_ANTIBANDING_50HZ;
715 break;
716 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
717 ctl.value= CAM_ANTIBANDING_60HZ;
718 break;
719 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
720 ctl.value= CAM_ANTIBANDING_AUTO;
721 break;
722 default:
723 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
724 __FUNCTION__, antiBanding);
725 return BAD_VALUE;
726 }
727
728 DBG_LOGB("anti banding mode:%d", antiBanding);
729 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
730 if ( ret < 0) {
731 CAMHAL_LOGDA("failed to set anti banding mode!\n");
732 return BAD_VALUE;
733 }
734 return ret;
735}
736
737status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
738{
739 int ret = 0;
740 struct v4l2_control ctl;
741 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
742 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
743 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
744
745 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
746 return ret;
747}
748
749
750int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
751{
752 struct v4l2_queryctrl qc;
753 struct v4l2_querymenu qm;
754 int ret;
755 int mode_count = -1;
756
757 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
758 qc.id = V4L2_CID_FOCUS_AUTO;
759 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
760 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
761 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
762 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
763 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
764 }else{
765 memset(&qm, 0, sizeof(qm));
766
767 int index = 0;
768 mode_count = 1;
769 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
770
771 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
772 if (mode_count >= maxCount)
773 break;
774
775 memset(&qm, 0, sizeof(struct v4l2_querymenu));
776 qm.id = V4L2_CID_FOCUS_AUTO;
777 qm.index = index;
778 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
779 continue;
780 } else {
781 if (strcmp((char*)qm.name,"auto") == 0) {
782 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
783 mode_count++;
784 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
785 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
786 mode_count++;
787 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
788 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
789 mode_count++;
790 }
791
792 }
793 }
794 }
795
796 return mode_count;
797}
798
799status_t Sensor::setAutoFocuas(uint8_t afMode)
800{
801 struct v4l2_control ctl;
802 ctl.id = V4L2_CID_FOCUS_AUTO;
803
804 switch (afMode) {
805 case ANDROID_CONTROL_AF_MODE_AUTO:
806 ctl.value = CAM_FOCUS_MODE_AUTO;
807 break;
808 case ANDROID_CONTROL_AF_MODE_MACRO:
809 ctl.value = CAM_FOCUS_MODE_MACRO;
810 break;
811 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
812 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
813 break;
814 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
815 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
816 break;
817 default:
818 ALOGE("%s: Emulator doesn't support AF mode %d",
819 __FUNCTION__, afMode);
820 return BAD_VALUE;
821 }
822
823 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
824 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
825 return BAD_VALUE;
826 }
827
828 return OK;
829}
830
831int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
832{
833 struct v4l2_queryctrl qc;
834 struct v4l2_querymenu qm;
835 int ret;
836 int mode_count = -1;
837
838 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
839 qc.id = V4L2_CID_DO_WHITE_BALANCE;
840 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
841 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
842 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
843 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
844 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
845 }else{
846 memset(&qm, 0, sizeof(qm));
847
848 int index = 0;
849 mode_count = 1;
850 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
851
852 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
853 if (mode_count >= maxCount)
854 break;
855
856 memset(&qm, 0, sizeof(struct v4l2_querymenu));
857 qm.id = V4L2_CID_DO_WHITE_BALANCE;
858 qm.index = index;
859 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
860 continue;
861 } else {
862 if (strcmp((char*)qm.name,"auto") == 0) {
863 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
864 mode_count++;
865 } else if (strcmp((char*)qm.name,"daylight") == 0) {
866 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
867 mode_count++;
868 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
869 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
870 mode_count++;
871 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
872 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
873 mode_count++;
874 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
875 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
876 mode_count++;
877 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
878 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
879 mode_count++;
880 } else if (strcmp((char*)qm.name,"twilight") == 0) {
881 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
882 mode_count++;
883 } else if (strcmp((char*)qm.name,"shade") == 0) {
884 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
885 mode_count++;
886 }
887
888 }
889 }
890 }
891
892 return mode_count;
893}
894
895status_t Sensor::setAWB(uint8_t awbMode)
896{
897 int ret = 0;
898 struct v4l2_control ctl;
899 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
900
901 switch (awbMode) {
902 case ANDROID_CONTROL_AWB_MODE_AUTO:
903 ctl.value = CAM_WB_AUTO;
904 break;
905 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
906 ctl.value = CAM_WB_INCANDESCENCE;
907 break;
908 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
909 ctl.value = CAM_WB_FLUORESCENT;
910 break;
911 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
912 ctl.value = CAM_WB_DAYLIGHT;
913 break;
914 case ANDROID_CONTROL_AWB_MODE_SHADE:
915 ctl.value = CAM_WB_SHADE;
916 break;
917 default:
918 ALOGE("%s: Emulator doesn't support AWB mode %d",
919 __FUNCTION__, awbMode);
920 return BAD_VALUE;
921 }
922 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
923 return ret;
924}
925
926void Sensor::setExposureTime(uint64_t ns) {
927 Mutex::Autolock lock(mControlMutex);
928 ALOGVV("Exposure set to %f", ns/1000000.f);
929 mExposureTime = ns;
930}
931
932void Sensor::setFrameDuration(uint64_t ns) {
933 Mutex::Autolock lock(mControlMutex);
934 ALOGVV("Frame duration set to %f", ns/1000000.f);
935 mFrameDuration = ns;
936}
937
938void Sensor::setSensitivity(uint32_t gain) {
939 Mutex::Autolock lock(mControlMutex);
940 ALOGVV("Gain set to %d", gain);
941 mGainFactor = gain;
942}
943
944void Sensor::setDestinationBuffers(Buffers *buffers) {
945 Mutex::Autolock lock(mControlMutex);
946 mNextBuffers = buffers;
947}
948
949void Sensor::setFrameNumber(uint32_t frameNumber) {
950 Mutex::Autolock lock(mControlMutex);
951 mFrameNumber = frameNumber;
952}
953
954status_t Sensor::waitForVSync(nsecs_t reltime) {
955 int res;
956 Mutex::Autolock lock(mControlMutex);
957 CAMHAL_LOGDB("%s , E mControlMutex" , __FUNCTION__);
958 if (mExitSensorThread) {
959 return -1;
960 }
961
962 mGotVSync = false;
963 res = mVSync.waitRelative(mControlMutex, reltime);
964 if (res != OK && res != TIMED_OUT) {
965 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
966 return false;
967 }
968 CAMHAL_LOGDB("%s , X mControlMutex , mGotVSync = %d " , __FUNCTION__ , mGotVSync);
969 return mGotVSync;
970}
971
972status_t Sensor::waitForNewFrame(nsecs_t reltime,
973 nsecs_t *captureTime) {
974 Mutex::Autolock lock(mReadoutMutex);
975 if (mExitSensorThread) {
976 return -1;
977 }
978
979 if (mCapturedBuffers == NULL) {
980 int res;
981 CAMHAL_LOGDB("%s , E mReadoutMutex , reltime = %d" , __FUNCTION__, reltime);
982 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
983 if (res == TIMED_OUT) {
984 return false;
985 } else if (res != OK || mCapturedBuffers == NULL) {
986 ALOGE("Error waiting for sensor readout signal: %d", res);
987 return false;
988 }
989 }
990 if (mWait) {
991 mWait = false;
992 *captureTime = mCaptureTime;
993 mCapturedBuffers = NULL;
994 mReadoutComplete.signal();
995 } else {
996 *captureTime = mCaptureTime;
997 mCapturedBuffers = NULL;
998 }
999 CAMHAL_LOGDB("%s , X" , __FUNCTION__);
1000 return true;
1001}
1002
1003Sensor::SensorListener::~SensorListener() {
1004}
1005
1006void Sensor::setSensorListener(SensorListener *listener) {
1007 Mutex::Autolock lock(mControlMutex);
1008 mListener = listener;
1009}
1010
1011status_t Sensor::readyToRun() {
1012 int res;
1013 ALOGV("Starting up sensor thread");
1014 mStartupTime = systemTime();
1015 mNextCaptureTime = 0;
1016 mNextCapturedBuffers = NULL;
1017
1018 DBG_LOGA("");
1019
1020 return OK;
1021}
1022
1023bool Sensor::threadLoop() {
1024 /**
1025 * Sensor capture operation main loop.
1026 *
1027 * Stages are out-of-order relative to a single frame's processing, but
1028 * in-order in time.
1029 */
1030
1031 if (mExitSensorThread) {
1032 return false;
1033 }
1034 /**
1035 * Stage 1: Read in latest control parameters
1036 */
1037 uint64_t exposureDuration;
1038 uint64_t frameDuration;
1039 uint32_t gain;
1040 Buffers *nextBuffers;
1041 uint32_t frameNumber;
1042 SensorListener *listener = NULL;
1043 {
1044 Mutex::Autolock lock(mControlMutex);
1045 CAMHAL_LOGDB("%s , E mControlMutex" , __FUNCTION__);
1046 exposureDuration = mExposureTime;
1047 frameDuration = mFrameDuration;
1048 gain = mGainFactor;
1049 nextBuffers = mNextBuffers;
1050 frameNumber = mFrameNumber;
1051 listener = mListener;
1052 // Don't reuse a buffer set
1053 mNextBuffers = NULL;
1054
1055 // Signal VSync for start of readout
1056 ALOGVV("Sensor VSync");
1057 mGotVSync = true;
1058 mVSync.signal();
1059 }
1060
1061 /**
1062 * Stage 3: Read out latest captured image
1063 */
1064
1065 Buffers *capturedBuffers = NULL;
1066 nsecs_t captureTime = 0;
1067
1068 nsecs_t startRealTime = systemTime();
1069 // Stagefright cares about system time for timestamps, so base simulated
1070 // time on that.
1071 nsecs_t simulatedTime = startRealTime;
1072 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1073 nsecs_t frameReadoutEndRealTime = startRealTime +
1074 kRowReadoutTime * kResolution[1];
1075
1076 if (mNextCapturedBuffers != NULL) {
1077 ALOGVV("Sensor starting readout");
1078 // Pretend we're doing readout now; will signal once enough time has elapsed
1079 capturedBuffers = mNextCapturedBuffers;
1080 captureTime = mNextCaptureTime;
1081 }
1082 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1083
1084 // TODO: Move this signal to another thread to simulate readout
1085 // time properly
1086 if (capturedBuffers != NULL) {
1087 ALOGVV("Sensor readout complete");
1088 Mutex::Autolock lock(mReadoutMutex);
1089 CAMHAL_LOGDB("%s , E mReadoutMutex" , __FUNCTION__);
1090 if (mCapturedBuffers != NULL) {
1091 ALOGE("Waiting for readout thread to catch up!");
1092 mWait = true;
1093 mReadoutComplete.wait(mReadoutMutex);
1094 }
1095
1096 mCapturedBuffers = capturedBuffers;
1097 mCaptureTime = captureTime;
1098 mReadoutAvailable.signal();
1099 capturedBuffers = NULL;
1100 }
1101 CAMHAL_LOGDB("%s , X mReadoutMutex" , __FUNCTION__);
1102
1103 if (mExitSensorThread) {
1104 return false;
1105 }
1106 /**
1107 * Stage 2: Capture new image
1108 */
1109 mNextCaptureTime = simulatedTime;
1110 mNextCapturedBuffers = nextBuffers;
1111
1112 if (mNextCapturedBuffers != NULL) {
1113 if (listener != NULL) {
1114#if 0
1115 if (get_device_status(vinfo)) {
1116 listener->onSensorEvent(frameNumber, SensorListener::ERROR_CAMERA_DEVICE, mNextCaptureTime);
1117 }
1118#endif
1119 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1120 mNextCaptureTime);
1121 }
1122
1123 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1124 (float)exposureDuration/1e6, gain);
1125 mScene.setExposureDuration((float)exposureDuration/1e9);
1126 mScene.calculateScene(mNextCaptureTime);
1127
1128 if ( mSensorType == SENSOR_SHARE_FD) {
1129 captureNewImageWithGe2d();
1130 } else {
1131 captureNewImage();
1132 }
1133 mFramecount ++;
1134 }
1135
1136 if (mExitSensorThread) {
1137 return false;
1138 }
1139
1140 if (mFramecount == 100) {
1141 gettimeofday(&mTimeEnd, NULL);
1142 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1143 mCurFps = mFramecount/(interval/1000000.0f);
1144 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1145 mFramecount = 0;
1146 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1147 }
1148 ALOGVV("Sensor vertical blanking interval");
1149 nsecs_t workDoneRealTime = systemTime();
1150 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1151 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1152 timespec t;
1153 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1154 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1155
1156 int ret;
1157 do {
1158 ret = nanosleep(&t, &t);
1159 } while (ret != 0);
1160 }
1161 nsecs_t endRealTime = systemTime();
1162 ALOGVV("Frame cycle took %d ms, target %d ms",
1163 (int)((endRealTime - startRealTime)/1000000),
1164 (int)(frameDuration / 1000000));
1165 CAMHAL_LOGDB("%s , X" , __FUNCTION__);
1166 return true;
1167};
1168
1169int Sensor::captureNewImageWithGe2d() {
1170
1171 uint32_t gain = mGainFactor;
1172 mKernelPhysAddr = 0;
1173
1174
1175 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1176 usleep(5000);
1177 }
1178
1179 // Might be adding more buffers, so size isn't constant
1180 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1181 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1182 fillStream(vinfo, mKernelPhysAddr, b);
1183 }
1184 putback_frame(vinfo);
1185 mKernelPhysAddr = 0;
1186
1187 return 0;
1188
1189}
1190
1191int Sensor::captureNewImage() {
1192 bool isjpeg = false;
1193 uint32_t gain = mGainFactor;
1194 mKernelBuffer = NULL;
1195
1196 // Might be adding more buffers, so size isn't constant
1197 ALOGVV("size=%d\n", mNextCapturedBuffers->size());
1198 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1199 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1200 ALOGVV("Sensor capturing buffer %d: stream %d,"
1201 " %d x %d, format %x, stride %d, buf %p, img %p",
1202 i, b.streamId, b.width, b.height, b.format, b.stride,
1203 b.buffer, b.img);
1204 switch (b.format) {
1205 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1206 captureRaw(b.img, gain, b.stride);
1207 break;
1208 case HAL_PIXEL_FORMAT_RGB_888:
1209 captureRGB(b.img, gain, b.stride);
1210 break;
1211 case HAL_PIXEL_FORMAT_RGBA_8888:
1212 captureRGBA(b.img, gain, b.stride);
1213 break;
1214 case HAL_PIXEL_FORMAT_BLOB:
1215 // Add auxillary buffer of the right size
1216 // Assumes only one BLOB (JPEG) buffer in
1217 // mNextCapturedBuffers
1218 StreamBuffer bAux;
1219 int orientation;
1220 orientation = getPictureRotate();
1221 ALOGD("bAux orientation=%d",orientation);
1222 uint32_t pixelfmt;
1223 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1224 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1225
1226 pixelfmt = getOutputFormat();
1227 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1228 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1229 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1230 DBG_LOGA("");
1231 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1232 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1233 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1234 } else {
1235 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1236 }
1237 } else {
1238 isjpeg = true;
1239 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1240 }
1241
1242 if (!msupportrotate) {
1243 bAux.streamId = 0;
1244 bAux.width = b.width;
1245 bAux.height = b.height;
1246 bAux.format = pixelfmt;
1247 bAux.stride = b.width;
1248 bAux.buffer = NULL;
1249 } else {
1250 if ((orientation == 90) || (orientation == 270)) {
1251 bAux.streamId = 0;
1252 bAux.width = b.height;
1253 bAux.height = b.width;
1254 bAux.format = pixelfmt;
1255 bAux.stride = b.height;
1256 bAux.buffer = NULL;
1257 } else {
1258 bAux.streamId = 0;
1259 bAux.width = b.width;
1260 bAux.height = b.height;
1261 bAux.format = pixelfmt;
1262 bAux.stride = b.width;
1263 bAux.buffer = NULL;
1264 }
1265 }
1266 // TODO: Reuse these
1267 bAux.img = new uint8_t[b.width * b.height * 3];
1268 mNextCapturedBuffers->push_back(bAux);
1269 break;
1270 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1271 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1272 captureNV21(b, gain);
1273 break;
1274 case HAL_PIXEL_FORMAT_YV12:
1275 captureYV12(b, gain);
1276 break;
1277 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1278 captureYUYV(b.img, gain, b.stride);
1279 break;
1280 default:
1281 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1282 b.format);
1283 break;
1284 }
1285 }
1286 if (!isjpeg) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1287 // whose buffer putback separately.
1288 putback_frame(vinfo);
1289 }
1290 mKernelBuffer = NULL;
1291
1292 return 0;
1293}
1294
1295int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1296 int res;
1297 int i, j, k, START;
1298 int count = 0;
1299 int pixelfmt;
1300 struct v4l2_frmsizeenum frmsize;
1301 char property[PROPERTY_VALUE_MAX];
1302 unsigned int support_w,support_h;
1303
1304 support_w = 10000;
1305 support_h = 10000;
1306 memset(property, 0, sizeof(property));
1307 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1308 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1309 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1310 support_w = 10000;
1311 support_h = 10000;
1312 }
1313 }
1314
1315 memset(&frmsize,0,sizeof(frmsize));
1316 frmsize.pixel_format = getOutputFormat();
1317
1318 START = 0;
1319 for (i = 0; ; i++) {
1320 frmsize.index = i;
1321 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1322 if (res < 0){
1323 DBG_LOGB("index=%d, break\n", i);
1324 break;
1325 }
1326
1327 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1328
1329 if (0 != (frmsize.discrete.width%16))
1330 continue;
1331
1332 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1333 continue;
1334
1335 if (count >= size)
1336 break;
1337
1338 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1339 picSizes[count+1] = frmsize.discrete.width;
1340 picSizes[count+2] = frmsize.discrete.height;
1341 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1342
1343 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1344 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1345 if (0 == i) {
1346 count += 4;
1347 continue;
1348 }
1349
1350 for (k = count; k > START; k -= 4) {
1351 if (frmsize.discrete.width * frmsize.discrete.height >
1352 picSizes[k - 3] * picSizes[k - 2]) {
1353 picSizes[k + 1] = picSizes[k - 3];
1354 picSizes[k + 2] = picSizes[k - 2];
1355
1356 } else {
1357 break;
1358 }
1359 }
1360 picSizes[k + 1] = frmsize.discrete.width;
1361 picSizes[k + 2] = frmsize.discrete.height;
1362
1363 count += 4;
1364 }
1365 }
1366
1367 START = count;
1368 for (i = 0; ; i++) {
1369 frmsize.index = i;
1370 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1371 if (res < 0){
1372 DBG_LOGB("index=%d, break\n", i);
1373 break;
1374 }
1375
1376 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1377
1378 if (0 != (frmsize.discrete.width%16))
1379 continue;
1380
1381 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1382 continue;
1383
1384 if (count >= size)
1385 break;
1386
1387 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1388 picSizes[count+1] = frmsize.discrete.width;
1389 picSizes[count+2] = frmsize.discrete.height;
1390 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1391
1392 DBG_LOGB("get output width=%d, height=%d, format =\
1393 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1394 frmsize.discrete.height);
1395 if (0 == i) {
1396 count += 4;
1397 continue;
1398 }
1399
1400 for (k = count; k > START; k -= 4) {
1401 if (frmsize.discrete.width * frmsize.discrete.height >
1402 picSizes[k - 3] * picSizes[k - 2]) {
1403 picSizes[k + 1] = picSizes[k - 3];
1404 picSizes[k + 2] = picSizes[k - 2];
1405
1406 } else {
1407 break;
1408 }
1409 }
1410 picSizes[k + 1] = frmsize.discrete.width;
1411 picSizes[k + 2] = frmsize.discrete.height;
1412
1413 count += 4;
1414 }
1415 }
1416
1417#if 0
1418 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1419 START = count;
1420 for (i = 0; ; i++) {
1421 frmsize.index = i;
1422 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1423 if (res < 0){
1424 DBG_LOGB("index=%d, break\n", i);
1425 break;
1426 }
1427
1428 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1429
1430 if (0 != (frmsize.discrete.width%16))
1431 continue;
1432
1433 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1434 continue;
1435
1436 if (count >= size)
1437 break;
1438
1439 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1440 picSizes[count+1] = frmsize.discrete.width;
1441 picSizes[count+2] = frmsize.discrete.height;
1442 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1443
1444 DBG_LOGB("get output width=%d, height=%d, format =\
1445 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1446 frmsize.discrete.height);
1447 if (0 == i) {
1448 count += 4;
1449 continue;
1450 }
1451
1452 for (k = count; k > START; k -= 4) {
1453 if (frmsize.discrete.width * frmsize.discrete.height >
1454 picSizes[k - 3] * picSizes[k - 2]) {
1455 picSizes[k + 1] = picSizes[k - 3];
1456 picSizes[k + 2] = picSizes[k - 2];
1457
1458 } else {
1459 break;
1460 }
1461 }
1462 picSizes[k + 1] = frmsize.discrete.width;
1463 picSizes[k + 2] = frmsize.discrete.height;
1464
1465 count += 4;
1466 }
1467 }
1468 }
1469#endif
1470
1471 uint32_t jpgSrcfmt[] = {
1472 V4L2_PIX_FMT_RGB24,
1473 V4L2_PIX_FMT_MJPEG,
1474 V4L2_PIX_FMT_YUYV,
1475 };
1476
1477 START = count;
1478 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1479 memset(&frmsize,0,sizeof(frmsize));
1480 frmsize.pixel_format = jpgSrcfmt[j];
1481
1482 for (i = 0; ; i++) {
1483 frmsize.index = i;
1484 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1485 if (res < 0){
1486 DBG_LOGB("index=%d, break\n", i);
1487 break;
1488 }
1489
1490 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1491
1492 if (0 != (frmsize.discrete.width%16))
1493 continue;
1494
1495 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1496 // continue;
1497
1498 if (count >= size)
1499 break;
1500
1501 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1502 if (!IsUsbAvailablePictureSize(kUsbAvailablePictureSize, frmsize.discrete.width, frmsize.discrete.height))
1503 continue;
1504 }
1505
1506 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1507 picSizes[count+1] = frmsize.discrete.width;
1508 picSizes[count+2] = frmsize.discrete.height;
1509 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1510
1511 if (0 == i) {
1512 count += 4;
1513 continue;
1514 }
1515
1516 //TODO insert in descend order
1517 for (k = count; k > START; k -= 4) {
1518 if (frmsize.discrete.width * frmsize.discrete.height >
1519 picSizes[k - 3] * picSizes[k - 2]) {
1520 picSizes[k + 1] = picSizes[k - 3];
1521 picSizes[k + 2] = picSizes[k - 2];
1522
1523 } else {
1524 break;
1525 }
1526 }
1527
1528 picSizes[k + 1] = frmsize.discrete.width;
1529 picSizes[k + 2] = frmsize.discrete.height;
1530
1531 count += 4;
1532 }
1533 }
1534
1535 if (frmsize.index > 0)
1536 break;
1537 }
1538
1539 if (frmsize.index == 0)
1540 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1541
1542 return count;
1543
1544}
1545
1546int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1547{
1548 int ret=0; int framerate=0; int temp_rate=0;
1549 struct v4l2_frmivalenum fival;
1550 int i,j=0;
1551 int count = 0;
1552 int tmp_size = size;
1553 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1554 int pixelfmt_tbl[] = {
1555 V4L2_PIX_FMT_MJPEG,
1556 V4L2_PIX_FMT_YVU420,
1557 V4L2_PIX_FMT_NV21,
1558 V4L2_PIX_FMT_RGB24,
1559 V4L2_PIX_FMT_YUYV,
1560 //V4L2_PIX_FMT_YVU420
1561 };
1562
1563 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1564 {
1565 /* we got all duration for each resolution for prev format*/
1566 if (count >= tmp_size)
1567 break;
1568
1569 for( ; size > 0; size-=4)
1570 {
1571 memset(&fival, 0, sizeof(fival));
1572
1573 for (fival.index = 0;;fival.index++)
1574 {
1575 fival.pixel_format = pixelfmt_tbl[i];
1576 fival.width = picSizes[size-3];
1577 fival.height = picSizes[size-2];
1578 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1579 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1580 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1581 if(framerate < temp_rate)
1582 framerate = temp_rate;
1583 duration[count+0] = (int64_t)(picSizes[size-4]);
1584 duration[count+1] = (int64_t)(picSizes[size-3]);
1585 duration[count+2] = (int64_t)(picSizes[size-2]);
1586 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1587 j++;
1588 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1589 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1590 if(framerate < temp_rate)
1591 framerate = temp_rate;
1592 duration[count+0] = (int64_t)picSizes[size-4];
1593 duration[count+1] = (int64_t)picSizes[size-3];
1594 duration[count+2] = (int64_t)picSizes[size-2];
1595 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1596 j++;
1597 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1598 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1599 if(framerate < temp_rate)
1600 framerate = temp_rate;
1601 duration[count+0] = (int64_t)picSizes[size-4];
1602 duration[count+1] = (int64_t)picSizes[size-3];
1603 duration[count+2] = (int64_t)picSizes[size-2];
1604 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1605 j++;
1606 }
1607 } else {
1608 if (j > 0) {
1609 if (count >= tmp_size)
1610 break;
1611 duration[count+0] = (int64_t)(picSizes[size-4]);
1612 duration[count+1] = (int64_t)(picSizes[size-3]);
1613 duration[count+2] = (int64_t)(picSizes[size-2]);
1614 if (framerate == 5) {
1615 duration[count+3] = (int64_t)200000000L;
1616 } else if (framerate == 10) {
1617 duration[count+3] = (int64_t)100000000L;
1618 } else if (framerate == 15) {
1619 duration[count+3] = (int64_t)66666666L;
1620 } else if (framerate == 30) {
1621 duration[count+3] = (int64_t)33333333L;
1622 } else {
1623 duration[count+3] = (int64_t)66666666L;
1624 }
1625 count += 4;
1626 break;
1627 } else {
1628 break;
1629 }
1630 }
1631 }
1632 j=0;
1633 }
1634 size = tmp_size;
1635 }
1636
1637 return count;
1638
1639}
1640
1641int64_t Sensor::getMinFrameDuration()
1642{
1643 int64_t tmpDuration = 66666666L; // 1/15 s
1644 int64_t frameDuration = 66666666L; // 1/15 s
1645 struct v4l2_frmivalenum fival;
1646 int i,j;
1647
1648 uint32_t pixelfmt_tbl[]={
1649 V4L2_PIX_FMT_MJPEG,
1650 V4L2_PIX_FMT_YUYV,
1651 V4L2_PIX_FMT_NV21,
1652 };
1653 struct v4l2_frmsize_discrete resolution_tbl[]={
1654 {1920, 1080},
1655 {1280, 960},
1656 {640, 480},
1657 {320, 240},
1658 };
1659
1660 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1661 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1662 memset(&fival, 0, sizeof(fival));
1663 fival.index = 0;
1664 fival.pixel_format = pixelfmt_tbl[i];
1665 fival.width = resolution_tbl[j].width;
1666 fival.height = resolution_tbl[j].height;
1667
1668 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1669 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1670 tmpDuration =
1671 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1672
1673 if (frameDuration > tmpDuration)
1674 frameDuration = tmpDuration;
1675 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1676 frameDuration =
1677 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1678 break;
1679 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1680 frameDuration =
1681 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1682 break;
1683 }
1684 fival.index++;
1685 }
1686 }
1687
1688 if (fival.index > 0) {
1689 break;
1690 }
1691 }
1692
1693 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1694 return frameDuration;
1695}
1696
1697int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1698 int res;
1699 int i;
1700 int count = 0;
1701 struct v4l2_frmsizeenum frmsize;
1702 char property[PROPERTY_VALUE_MAX];
1703 unsigned int support_w,support_h;
1704 int preview_fmt;
1705
1706 support_w = 10000;
1707 support_h = 10000;
1708 memset(property, 0, sizeof(property));
1709 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1710 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1711 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1712 support_w = 10000;
1713 support_h = 10000;
1714 }
1715 }
1716
1717
1718 memset(&frmsize,0,sizeof(frmsize));
1719 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1720
1721 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1722 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1723 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1724 if (preview == true)
1725 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1726 else
1727 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1728 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1729 if (preview == true)
1730 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1731 else
1732 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1733 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1734 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1735
1736 for (i = 0; ; i++) {
1737 frmsize.index = i;
1738 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1739 if (res < 0){
1740 DBG_LOGB("index=%d, break\n", i);
1741 break;
1742 }
1743
1744
1745 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1746
1747 if (0 != (frmsize.discrete.width%16))
1748 continue;
1749
1750 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1751 continue;
1752
1753 if (count >= size)
1754 break;
1755
1756 picSizes[count] = frmsize.discrete.width;
1757 picSizes[count+1] = frmsize.discrete.height;
1758
1759 if (0 == i) {
1760 count += 2;
1761 continue;
1762 }
1763
1764 //TODO insert in descend order
1765 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1766 picSizes[count + 0] = picSizes[count - 2];
1767 picSizes[count + 1] = picSizes[count - 1];
1768
1769 picSizes[count - 2] = frmsize.discrete.width;
1770 picSizes[count - 1] = frmsize.discrete.height;
1771 }
1772
1773 count += 2;
1774 }
1775 }
1776
1777 return count;
1778
1779}
1780
1781void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1782 float totalGain = gain/100.0 * kBaseGainFactor;
1783 float noiseVarGain = totalGain * totalGain;
1784 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1785 + kReadNoiseVarAfterGain;
1786
1787 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1788 mScene.setReadoutPixel(0,0);
1789 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1790 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1791 uint16_t *px = (uint16_t*)img + y * stride;
1792 for (unsigned int x = 0; x < kResolution[0]; x++) {
1793 uint32_t electronCount;
1794 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1795
1796 // TODO: Better pixel saturation curve?
1797 electronCount = (electronCount < kSaturationElectrons) ?
1798 electronCount : kSaturationElectrons;
1799
1800 // TODO: Better A/D saturation curve?
1801 uint16_t rawCount = electronCount * totalGain;
1802 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1803
1804 // Calculate noise value
1805 // TODO: Use more-correct Gaussian instead of uniform noise
1806 float photonNoiseVar = electronCount * noiseVarGain;
1807 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1808 // Scaled to roughly match gaussian/uniform noise stddev
1809 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1810
1811 rawCount += kBlackLevel;
1812 rawCount += noiseStddev * noiseSample;
1813
1814 *px++ = rawCount;
1815 }
1816 // TODO: Handle this better
1817 //simulatedTime += kRowReadoutTime;
1818 }
1819 ALOGVV("Raw sensor image captured");
1820}
1821
1822void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1823 float totalGain = gain/100.0 * kBaseGainFactor;
1824 // In fixed-point math, calculate total scaling from electrons to 8bpp
1825 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1826 uint32_t inc = kResolution[0] / stride;
1827
1828 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1829 uint8_t *px = img + outY * stride * 4;
1830 mScene.setReadoutPixel(0, y);
1831 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1832 uint32_t rCount, gCount, bCount;
1833 // TODO: Perfect demosaicing is a cheat
1834 const uint32_t *pixel = mScene.getPixelElectrons();
1835 rCount = pixel[Scene::R] * scale64x;
1836 gCount = pixel[Scene::Gr] * scale64x;
1837 bCount = pixel[Scene::B] * scale64x;
1838
1839 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1840 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1841 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1842 *px++ = 255;
1843 for (unsigned int j = 1; j < inc; j++)
1844 mScene.getPixelElectrons();
1845 }
1846 // TODO: Handle this better
1847 //simulatedTime += kRowReadoutTime;
1848 }
1849 ALOGVV("RGBA sensor image captured");
1850}
1851
1852void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1853#if 0
1854 float totalGain = gain/100.0 * kBaseGainFactor;
1855 // In fixed-point math, calculate total scaling from electrons to 8bpp
1856 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1857 uint32_t inc = kResolution[0] / stride;
1858
1859 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1860 mScene.setReadoutPixel(0, y);
1861 uint8_t *px = img + outY * stride * 3;
1862 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1863 uint32_t rCount, gCount, bCount;
1864 // TODO: Perfect demosaicing is a cheat
1865 const uint32_t *pixel = mScene.getPixelElectrons();
1866 rCount = pixel[Scene::R] * scale64x;
1867 gCount = pixel[Scene::Gr] * scale64x;
1868 bCount = pixel[Scene::B] * scale64x;
1869
1870 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1871 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1872 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1873 for (unsigned int j = 1; j < inc; j++)
1874 mScene.getPixelElectrons();
1875 }
1876 // TODO: Handle this better
1877 //simulatedTime += kRowReadoutTime;
1878 }
1879#else
1880 uint8_t *src = NULL;
1881 int ret = 0, rotate = 0;
1882 uint32_t width = 0, height = 0;
1883 int dqTryNum = 3;
1884
1885 rotate = getPictureRotate();
1886 width = vinfo->picture.format.fmt.pix.width;
1887 height = vinfo->picture.format.fmt.pix.height;
1888
1889 if (mSensorType == SENSOR_USB) {
1890 releasebuf_and_stop_capturing(vinfo);
1891 } else {
1892 stop_capturing(vinfo);
1893 }
1894
1895 ret = start_picture(vinfo,rotate);
1896 if (ret < 0)
1897 {
1898 ALOGD("start picture failed!");
1899 }
1900 while(1)
1901 {
1902 src = (uint8_t *)get_picture(vinfo);
1903 if ((NULL != src) && (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)) {
1904 while (dqTryNum > 0) {
1905 if (NULL != src) {
1906 putback_picture_frame(vinfo);
1907 }
1908 usleep(10000);
1909 dqTryNum --;
1910 src = (uint8_t *)get_picture(vinfo);
1911 }
1912 }
1913
1914 if (NULL != src) {
1915 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
1916 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1917 if ( tmp_buffer == NULL) {
1918 ALOGE("new buffer failed!\n");
1919 return;
1920 }
1921 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
1922 width, tmp_buffer + width * height, (width + 1) / 2, width,
1923 height, width, height, libyuv::FOURCC_MJPG) != 0) {
1924 DBG_LOGA("Decode MJPEG frame failed\n");
1925 putback_picture_frame(vinfo);
1926 usleep(5000);
1927 } else {
1928 nv21_to_rgb24(tmp_buffer,img,width,height);
1929 if (tmp_buffer != NULL)
1930 delete [] tmp_buffer;
1931 break;
1932 }
1933 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1934 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
1935 yuyv422_to_rgb24(src,img,width,height);
1936 break;
1937 } else {
1938 putback_picture_frame(vinfo);
1939 usleep(5000);
1940 }
1941 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
1942 if (vinfo->picture.buf.length == width * height * 3) {
1943 memcpy(img, src, vinfo->picture.buf.length);
1944 } else {
1945 rgb24_memcpy(img, src, width, height);
1946 }
1947 break;
1948 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
1949 memcpy(img, src, vinfo->picture.buf.length);
1950 break;
1951 }
1952 }
1953 }
1954 ALOGD("get picture success !");
1955
1956 if (mSensorType == SENSOR_USB) {
1957 releasebuf_and_stop_picture(vinfo);
1958 } else {
1959 stop_picture(vinfo);
1960 }
1961
1962#endif
1963}
1964
1965void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
1966{
1967 for (int i = 0; i < width * height * 2; i += 2) {
1968 *dst++ = *(src + i);
1969 }
1970
1971 for (int y = 0; y < height - 1; y +=2) {
1972 for (int j = 0; j < width * 2; j += 4) {
1973 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
1974 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
1975 }
1976 src += width * 2 * 2;
1977 }
1978
1979 if (height & 1)
1980 for (int j = 0; j < width * 2; j += 4) {
1981 *dst++ = *(src + 3 + j); //v
1982 *dst++ = *(src + 1 + j); //u
1983 }
1984}
1985
1986void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
1987{
1988 //width should be an even number.
1989 //uv ALIGN 32.
1990 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
1991 unsigned char *dst_copy,*src_copy;
1992
1993 dst_copy = dst;
1994 src_copy = src;
1995
1996 y_size = width*height;
1997 c_stride = ALIGN(width/2, 16);
1998 c_size = c_stride * height/2;
1999 cr_offset = y_size;
2000 cb_offset = y_size+c_size;
2001
2002 for(i=0;i< y_size;i++){
2003 *dst++ = *src;
2004 src += 2;
2005 }
2006
2007 dst = dst_copy;
2008 src = src_copy;
2009
2010 for(i=0;i<height;i+=2){
2011 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
2012 //ceil(u1+u2)/2
2013 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
2014 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
2015 }
2016 dst += c_stride;
2017 src += width*4;
2018 }
2019}
2020
2021status_t Sensor::force_reset_sensor() {
2022 DBG_LOGA("force_reset_sensor");
2023 status_t ret;
2024 mTimeOutCount = 0;
2025 ret = streamOff();
2026 ret = setBuffersFormat(vinfo);
2027 ret = streamOn();
2028 DBG_LOGB("%s , ret = %d", __FUNCTION__, ret);
2029 return ret;
2030}
2031
2032void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
2033#if 0
2034 float totalGain = gain/100.0 * kBaseGainFactor;
2035 // Using fixed-point math with 6 bits of fractional precision.
2036 // In fixed-point math, calculate total scaling from electrons to 8bpp
2037 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2038 // In fixed-point math, saturation point of sensor after gain
2039 const int saturationPoint = 64 * 255;
2040 // Fixed-point coefficients for RGB-YUV transform
2041 // Based on JFIF RGB->YUV transform.
2042 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2043 const int rgbToY[] = {19, 37, 7};
2044 const int rgbToCb[] = {-10,-21, 32, 524288};
2045 const int rgbToCr[] = {32,-26, -5, 524288};
2046 // Scale back to 8bpp non-fixed-point
2047 const int scaleOut = 64;
2048 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2049
2050 uint32_t inc = kResolution[0] / stride;
2051 uint32_t outH = kResolution[1] / inc;
2052 for (unsigned int y = 0, outY = 0;
2053 y < kResolution[1]; y+=inc, outY++) {
2054 uint8_t *pxY = img + outY * stride;
2055 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2056 mScene.setReadoutPixel(0,y);
2057 for (unsigned int outX = 0; outX < stride; outX++) {
2058 int32_t rCount, gCount, bCount;
2059 // TODO: Perfect demosaicing is a cheat
2060 const uint32_t *pixel = mScene.getPixelElectrons();
2061 rCount = pixel[Scene::R] * scale64x;
2062 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2063 gCount = pixel[Scene::Gr] * scale64x;
2064 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2065 bCount = pixel[Scene::B] * scale64x;
2066 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2067
2068 *pxY++ = (rgbToY[0] * rCount +
2069 rgbToY[1] * gCount +
2070 rgbToY[2] * bCount) / scaleOutSq;
2071 if (outY % 2 == 0 && outX % 2 == 0) {
2072 *pxVU++ = (rgbToCr[0] * rCount +
2073 rgbToCr[1] * gCount +
2074 rgbToCr[2] * bCount +
2075 rgbToCr[3]) / scaleOutSq;
2076 *pxVU++ = (rgbToCb[0] * rCount +
2077 rgbToCb[1] * gCount +
2078 rgbToCb[2] * bCount +
2079 rgbToCb[3]) / scaleOutSq;
2080 }
2081 for (unsigned int j = 1; j < inc; j++)
2082 mScene.getPixelElectrons();
2083 }
2084 }
2085#else
2086 uint8_t *src;
2087
2088 if (mKernelBuffer) {
2089 src = mKernelBuffer;
2090 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2091 uint32_t width = vinfo->preview.format.fmt.pix.width;
2092 uint32_t height = vinfo->preview.format.fmt.pix.height;
2093 if ((width == b.width) && (height == b.height)) {
2094 memcpy(b.img, src, b.width * b.height * 3/2);
2095 } else {
2096 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2097 }
2098 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2099 uint32_t width = vinfo->preview.format.fmt.pix.width;
2100 uint32_t height = vinfo->preview.format.fmt.pix.height;
2101
2102 if ((width == b.width) && (height == b.height)) {
2103 memcpy(b.img, src, b.width * b.height * 3/2);
2104 } else {
2105 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2106 }
2107 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2108 uint32_t width = vinfo->preview.format.fmt.pix.width;
2109 uint32_t height = vinfo->preview.format.fmt.pix.height;
2110
2111 if ((width == b.width) && (height == b.height)) {
2112 memcpy(b.img, src, b.width * b.height * 3/2);
2113 } else {
2114 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2115 }
2116 } else {
2117 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2118 }
2119 return ;
2120 }
2121 while(1){
2122 if (mExitSensorThread) {
2123 break;
2124 }
2125
2126 src = (uint8_t *)get_frame(vinfo);
2127 if (NULL == src) {
2128 if (get_device_status(vinfo)) {
2129 break;
2130 }
2131 ALOGVV("get frame NULL, sleep 5ms");
2132 usleep(5000);
2133 mTimeOutCount++;
2134 if (mTimeOutCount > 300) {
2135 force_reset_sensor();
2136 }
2137 continue;
2138 }
2139 mTimeOutCount = 0;
2140 if (mSensorType == SENSOR_USB) {
2141 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2142 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2143 DBG_LOGB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2144 putback_frame(vinfo);
2145 continue;
2146 }
2147 }
2148 }
2149 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2150 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2151 memcpy(b.img, src, vinfo->preview.buf.length);
2152 } else {
2153 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2154 }
2155 mKernelBuffer = b.img;
2156 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2157 int width = vinfo->preview.format.fmt.pix.width;
2158 int height = vinfo->preview.format.fmt.pix.height;
2159 YUYVToNV21(src, b.img, width, height);
2160 mKernelBuffer = b.img;
2161 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2162 int width = vinfo->preview.format.fmt.pix.width;
2163 int height = vinfo->preview.format.fmt.pix.height;
2164 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, b.img,
2165 width, b.img + width * height, (width + 1) / 2, width,
2166 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2167 putback_frame(vinfo);
2168 DBG_LOGA("Decode MJPEG frame failed\n");
2169 continue;
2170 }
2171 mKernelBuffer = b.img;
2172 }
2173
2174 break;
2175 }
2176#endif
2177
2178 ALOGVV("NV21 sensor image captured");
2179}
2180
2181void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2182#if 0
2183 float totalGain = gain/100.0 * kBaseGainFactor;
2184 // Using fixed-point math with 6 bits of fractional precision.
2185 // In fixed-point math, calculate total scaling from electrons to 8bpp
2186 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2187 // In fixed-point math, saturation point of sensor after gain
2188 const int saturationPoint = 64 * 255;
2189 // Fixed-point coefficients for RGB-YUV transform
2190 // Based on JFIF RGB->YUV transform.
2191 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2192 const int rgbToY[] = {19, 37, 7};
2193 const int rgbToCb[] = {-10,-21, 32, 524288};
2194 const int rgbToCr[] = {32,-26, -5, 524288};
2195 // Scale back to 8bpp non-fixed-point
2196 const int scaleOut = 64;
2197 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2198
2199 uint32_t inc = kResolution[0] / stride;
2200 uint32_t outH = kResolution[1] / inc;
2201 for (unsigned int y = 0, outY = 0;
2202 y < kResolution[1]; y+=inc, outY++) {
2203 uint8_t *pxY = img + outY * stride;
2204 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2205 mScene.setReadoutPixel(0,y);
2206 for (unsigned int outX = 0; outX < stride; outX++) {
2207 int32_t rCount, gCount, bCount;
2208 // TODO: Perfect demosaicing is a cheat
2209 const uint32_t *pixel = mScene.getPixelElectrons();
2210 rCount = pixel[Scene::R] * scale64x;
2211 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2212 gCount = pixel[Scene::Gr] * scale64x;
2213 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2214 bCount = pixel[Scene::B] * scale64x;
2215 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2216
2217 *pxY++ = (rgbToY[0] * rCount +
2218 rgbToY[1] * gCount +
2219 rgbToY[2] * bCount) / scaleOutSq;
2220 if (outY % 2 == 0 && outX % 2 == 0) {
2221 *pxVU++ = (rgbToCr[0] * rCount +
2222 rgbToCr[1] * gCount +
2223 rgbToCr[2] * bCount +
2224 rgbToCr[3]) / scaleOutSq;
2225 *pxVU++ = (rgbToCb[0] * rCount +
2226 rgbToCb[1] * gCount +
2227 rgbToCb[2] * bCount +
2228 rgbToCb[3]) / scaleOutSq;
2229 }
2230 for (unsigned int j = 1; j < inc; j++)
2231 mScene.getPixelElectrons();
2232 }
2233 }
2234#else
2235 uint8_t *src;
2236 if (mKernelBuffer) {
2237 src = mKernelBuffer;
2238 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2239 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2240 ALOGI("Sclale YV12 frame down \n");
2241
2242 int width = vinfo->preview.format.fmt.pix.width;
2243 int height = vinfo->preview.format.fmt.pix.height;
2244 int ret = libyuv::I420Scale(src, width,
2245 src + width * height, width / 2,
2246 src + width * height + width * height / 4, width / 2,
2247 width, height,
2248 b.img, b.width,
2249 b.img + b.width * b.height, b.width / 2,
2250 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2251 b.width, b.height,
2252 libyuv::kFilterNone);
2253 if (ret < 0)
2254 ALOGE("Sclale YV12 frame down failed!\n");
2255 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2256 int width = vinfo->preview.format.fmt.pix.width;
2257 int height = vinfo->preview.format.fmt.pix.height;
2258 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2259
2260 if ( tmp_buffer == NULL) {
2261 ALOGE("new buffer failed!\n");
2262 return;
2263 }
2264
2265 YUYVToYV12(src, tmp_buffer, width, height);
2266
2267 int ret = libyuv::I420Scale(tmp_buffer, width,
2268 tmp_buffer + width * height, width / 2,
2269 tmp_buffer + width * height + width * height / 4, width / 2,
2270 width, height,
2271 b.img, b.width,
2272 b.img + b.width * b.height, b.width / 2,
2273 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2274 b.width, b.height,
2275 libyuv::kFilterNone);
2276 if (ret < 0)
2277 ALOGE("Sclale YV12 frame down failed!\n");
2278 delete [] tmp_buffer;
2279 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2280 int width = vinfo->preview.format.fmt.pix.width;
2281 int height = vinfo->preview.format.fmt.pix.height;
2282 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2283
2284 if ( tmp_buffer == NULL) {
2285 ALOGE("new buffer failed!\n");
2286 return;
2287 }
2288
2289 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2290 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2291 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2292 DBG_LOGA("Decode MJPEG frame failed\n");
2293 }
2294
2295 int ret = libyuv::I420Scale(tmp_buffer, width,
2296 tmp_buffer + width * height, width / 2,
2297 tmp_buffer + width * height + width * height / 4, width / 2,
2298 width, height,
2299 b.img, b.width,
2300 b.img + b.width * b.height, b.width / 2,
2301 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2302 b.width, b.height,
2303 libyuv::kFilterNone);
2304 if (ret < 0)
2305 ALOGE("Sclale YV12 frame down failed!\n");
2306
2307 delete [] tmp_buffer;
2308 } else {
2309 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2310 }
2311 return ;
2312 }
2313 while(1){
2314 if (mExitSensorThread) {
2315 break;
2316 }
2317 src = (uint8_t *)get_frame(vinfo);
2318
2319 if (NULL == src) {
2320 if (get_device_status(vinfo)) {
2321 break;
2322 }
2323 ALOGVV("get frame NULL, sleep 5ms");
2324 usleep(5000);
2325 mTimeOutCount++;
2326 if (mTimeOutCount > 300) {
2327 force_reset_sensor();
2328 }
2329 continue;
2330 }
2331 mTimeOutCount = 0;
2332 if (mSensorType == SENSOR_USB) {
2333 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2334 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2335 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2336 putback_frame(vinfo);
2337 continue;
2338 }
2339 }
2340 }
2341 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2342 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2343 memcpy(b.img, src, vinfo->preview.buf.length);
2344 } else {
2345 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2346 }
2347 mKernelBuffer = b.img;
2348 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2349 int width = vinfo->preview.format.fmt.pix.width;
2350 int height = vinfo->preview.format.fmt.pix.height;
2351 YUYVToYV12(src, b.img, width, height);
2352 mKernelBuffer = b.img;
2353 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2354 int width = vinfo->preview.format.fmt.pix.width;
2355 int height = vinfo->preview.format.fmt.pix.height;
2356 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2357 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2358 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2359 putback_frame(vinfo);
2360 DBG_LOGA("Decode MJPEG frame failed\n");
2361 continue;
2362 }
2363 mKernelBuffer = b.img;
2364 } else {
2365 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2366 }
2367
2368 break;
2369 }
2370#endif
2371 //mKernelBuffer = src;
2372 ALOGVV("YV12 sensor image captured");
2373}
2374
2375void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2376#if 0
2377 float totalGain = gain/100.0 * kBaseGainFactor;
2378 // Using fixed-point math with 6 bits of fractional precision.
2379 // In fixed-point math, calculate total scaling from electrons to 8bpp
2380 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2381 // In fixed-point math, saturation point of sensor after gain
2382 const int saturationPoint = 64 * 255;
2383 // Fixed-point coefficients for RGB-YUV transform
2384 // Based on JFIF RGB->YUV transform.
2385 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2386 const int rgbToY[] = {19, 37, 7};
2387 const int rgbToCb[] = {-10,-21, 32, 524288};
2388 const int rgbToCr[] = {32,-26, -5, 524288};
2389 // Scale back to 8bpp non-fixed-point
2390 const int scaleOut = 64;
2391 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2392
2393 uint32_t inc = kResolution[0] / stride;
2394 uint32_t outH = kResolution[1] / inc;
2395 for (unsigned int y = 0, outY = 0;
2396 y < kResolution[1]; y+=inc, outY++) {
2397 uint8_t *pxY = img + outY * stride;
2398 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2399 mScene.setReadoutPixel(0,y);
2400 for (unsigned int outX = 0; outX < stride; outX++) {
2401 int32_t rCount, gCount, bCount;
2402 // TODO: Perfect demosaicing is a cheat
2403 const uint32_t *pixel = mScene.getPixelElectrons();
2404 rCount = pixel[Scene::R] * scale64x;
2405 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2406 gCount = pixel[Scene::Gr] * scale64x;
2407 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2408 bCount = pixel[Scene::B] * scale64x;
2409 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2410
2411 *pxY++ = (rgbToY[0] * rCount +
2412 rgbToY[1] * gCount +
2413 rgbToY[2] * bCount) / scaleOutSq;
2414 if (outY % 2 == 0 && outX % 2 == 0) {
2415 *pxVU++ = (rgbToCr[0] * rCount +
2416 rgbToCr[1] * gCount +
2417 rgbToCr[2] * bCount +
2418 rgbToCr[3]) / scaleOutSq;
2419 *pxVU++ = (rgbToCb[0] * rCount +
2420 rgbToCb[1] * gCount +
2421 rgbToCb[2] * bCount +
2422 rgbToCb[3]) / scaleOutSq;
2423 }
2424 for (unsigned int j = 1; j < inc; j++)
2425 mScene.getPixelElectrons();
2426 }
2427 }
2428#else
2429 uint8_t *src;
2430 if (mKernelBuffer) {
2431 src = mKernelBuffer;
2432 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2433 //TODO YUYV scale
2434 //memcpy(img, src, vinfo->preview.buf.length);
2435
2436 } else
2437 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2438
2439 return ;
2440 }
2441
2442 while(1) {
2443 if (mExitSensorThread) {
2444 break;
2445 }
2446 src = (uint8_t *)get_frame(vinfo);
2447 if (NULL == src) {
2448 if (get_device_status(vinfo)) {
2449 break;
2450 }
2451 ALOGVV("get frame NULL, sleep 5ms");
2452 usleep(5000);
2453 mTimeOutCount++;
2454 if (mTimeOutCount > 300) {
2455 force_reset_sensor();
2456 }
2457 continue;
2458 }
2459 mTimeOutCount = 0;
2460 if (mSensorType == SENSOR_USB) {
2461 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2462 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2463 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2464 putback_frame(vinfo);
2465 continue;
2466 }
2467 }
2468 }
2469 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2470 memcpy(img, src, vinfo->preview.buf.length);
2471 mKernelBuffer = src;
2472 } else {
2473 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2474 }
2475
2476 break;
2477 }
2478#endif
2479 //mKernelBuffer = src;
2480 ALOGVV("YUYV sensor image captured");
2481}
2482
2483void Sensor::dump(int fd) {
2484 String8 result;
2485 result = String8::format("%s, sensor preview information: \n", __FILE__);
2486 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2487 result.appendFormat("camera preview width: %d , height =%d\n",
2488 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2489
2490 result.appendFormat("camera preview format: %.4s\n\n",
2491 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2492
2493 write(fd, result.string(), result.size());
2494}
2495
2496} // namespace android
2497
2498