summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: fa826bb2196a45d25db57cfdeb3d251456d5f456
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44
45#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
46
47namespace android {
48
49const unsigned int Sensor::kResolution[2] = {1600, 1200};
50
51const nsecs_t Sensor::kExposureTimeRange[2] =
52 {1000L, 30000000000L} ; // 1 us - 30 sec
53const nsecs_t Sensor::kFrameDurationRange[2] =
54 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
55const nsecs_t Sensor::kMinVerticalBlank = 10000L;
56
57const uint8_t Sensor::kColorFilterArrangement =
58 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
59
60// Output image data characteristics
61const uint32_t Sensor::kMaxRawValue = 4000;
62const uint32_t Sensor::kBlackLevel = 1000;
63
64// Sensor sensitivity
65const float Sensor::kSaturationVoltage = 0.520f;
66const uint32_t Sensor::kSaturationElectrons = 2000;
67const float Sensor::kVoltsPerLuxSecond = 0.100f;
68
69const float Sensor::kElectronsPerLuxSecond =
70 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
71 * Sensor::kVoltsPerLuxSecond;
72
73const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
74 Sensor::kSaturationElectrons;
75
76const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
77const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
78const float Sensor::kReadNoiseVarBeforeGain =
79 Sensor::kReadNoiseStddevBeforeGain *
80 Sensor::kReadNoiseStddevBeforeGain;
81const float Sensor::kReadNoiseVarAfterGain =
82 Sensor::kReadNoiseStddevAfterGain *
83 Sensor::kReadNoiseStddevAfterGain;
84
85// While each row has to read out, reset, and then expose, the (reset +
86// expose) sequence can be overlapped by other row readouts, so the final
87// minimum frame duration is purely a function of row readout time, at least
88// if there's a reasonable number of rows.
89const nsecs_t Sensor::kRowReadoutTime =
90 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
91
92const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
93const uint32_t Sensor::kDefaultSensitivity = 100;
94
95const usb_frmsize_discrete_t kUsbAvailablePictureSize[] = {
96 {4128, 3096},
97 {3264, 2448},
98 {2592, 1944},
99 {2592, 1936},
100 {2560, 1920},
101 {2688, 1520},
102 {2048, 1536},
103 {1600, 1200},
104 {1920, 1088},
105 {1920, 1080},
106 {1440, 1080},
107 {1280, 960},
108 {1280, 720},
109 {1024, 768},
110 {960, 720},
111 {720, 480},
112 {640, 480},
113 {320, 240},
114};
115
116/** A few utility functions for math, normal distributions */
117
118// Take advantage of IEEE floating-point format to calculate an approximate
119// square root. Accurate to within +-3.6%
120float sqrtf_approx(float r) {
121 // Modifier is based on IEEE floating-point representation; the
122 // manipulations boil down to finding approximate log2, dividing by two, and
123 // then inverting the log2. A bias is added to make the relative error
124 // symmetric about the real answer.
125 const int32_t modifier = 0x1FBB4000;
126
127 int32_t r_i = *(int32_t*)(&r);
128 r_i = (r_i >> 1) + modifier;
129
130 return *(float*)(&r_i);
131}
132
133void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
134{
135 int stride = (width + 31) & ( ~31);
136 int w, h;
137 for (h=0; h<height; h++)
138 {
139 memcpy( dst, src, width*3);
140 dst += width*3;
141 src += stride*3;
142 }
143}
144
145static int ALIGN(int x, int y) {
146 // y must be a power of 2.
147 return (x + y - 1) & ~(y - 1);
148}
149
150bool IsUsbAvailablePictureSize(const usb_frmsize_discrete_t AvailablePictureSize[], uint32_t width, uint32_t height)
151{
152 int i;
153 bool ret = false;
154 int count = sizeof(kUsbAvailablePictureSize)/sizeof(kUsbAvailablePictureSize[0]);
155 for (i = 0; i < count; i++) {
156 if ((width == AvailablePictureSize[i].width) && (height == AvailablePictureSize[i].height)) {
157 ret = true;
158 } else {
159 continue;
160 }
161 }
162 return ret;
163}
164
165void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
166{
167 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
168 (mmInt32)vinfo->preview.format.fmt.pix.height,
169 (mmInt32)vinfo->preview.format.fmt.pix.width,
170 IC_FORMAT_YCbCr420_lp,
171 (mmByte *) src,
172 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
173 0};
174
175 structConvImage output = {(mmInt32)width,
176 (mmInt32)height,
177 (mmInt32)width,
178 IC_FORMAT_YCbCr420_lp,
179 (mmByte *) img,
180 (mmByte *) img + width * height,
181 0};
182
183 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
184 ALOGE("Sclale NV21 frame down failed!\n");
185}
186
187Sensor::Sensor():
188 Thread(false),
189 mGotVSync(false),
190 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
191 mFrameDuration(kFrameDurationRange[0]),
192 mGainFactor(kDefaultSensitivity),
193 mNextBuffers(NULL),
194 mFrameNumber(0),
195 mCapturedBuffers(NULL),
196 mListener(NULL),
197 mExitSensorThread(false),
198 mIoctlSupport(0),
199 msupportrotate(0),
200 mTimeOutCount(0),
201 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
202{
203
204}
205
206Sensor::~Sensor() {
207 //shutDown();
208}
209
210status_t Sensor::startUp(int idx) {
211 ALOGV("%s: E", __FUNCTION__);
212 DBG_LOGA("ddd");
213
214 int res;
215 mCapturedBuffers = NULL;
216 res = run("EmulatedFakeCamera3::Sensor",
217 ANDROID_PRIORITY_URGENT_DISPLAY);
218
219 if (res != OK) {
220 ALOGE("Unable to start up sensor capture thread: %d", res);
221 }
222
223 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
224 vinfo->idx = idx;
225
226 res = camera_open(vinfo);
227 if (res < 0) {
228 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
229 }
230
231 mSensorType = SENSOR_MMAP;
232 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
233 mSensorType = SENSOR_USB;
234 }
235
236 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
237 mSensorType = SENSOR_SHARE_FD;
238 }
239
240 if (strstr((const char *)vinfo->cap.card, "front"))
241 mSensorFace = SENSOR_FACE_FRONT;
242 else if (strstr((const char *)vinfo->cap.card, "back"))
243 mSensorFace = SENSOR_FACE_BACK;
244 else
245 mSensorFace = SENSOR_FACE_NONE;
246
247 return res;
248}
249
250sensor_type_e Sensor::getSensorType(void)
251{
252 return mSensorType;
253}
254status_t Sensor::IoctlStateProbe(void) {
255 struct v4l2_queryctrl qc;
256 int ret = 0;
257 mIoctlSupport = 0;
258 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
259 qc.id = V4L2_ROTATE_ID;
260 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
261 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
262 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
263 }else{
264 mIoctlSupport |= IOCTL_MASK_ROTATE;
265 }
266
267 if(mIoctlSupport & IOCTL_MASK_ROTATE){
268 msupportrotate = true;
269 DBG_LOGA("camera support capture rotate");
270 }
271 return mIoctlSupport;
272}
273
274uint32_t Sensor::getStreamUsage(int stream_type)
275{
276 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
277
278 switch (stream_type) {
279 case CAMERA3_STREAM_OUTPUT:
280 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
281 break;
282 case CAMERA3_STREAM_INPUT:
283 usage = GRALLOC_USAGE_HW_CAMERA_READ;
284 break;
285 case CAMERA3_STREAM_BIDIRECTIONAL:
286 usage = GRALLOC_USAGE_HW_CAMERA_READ |
287 GRALLOC_USAGE_HW_CAMERA_WRITE;
288 break;
289 }
290 if ((mSensorType == SENSOR_MMAP)
291 || (mSensorType == SENSOR_USB)) {
292 usage = (GRALLOC_USAGE_HW_TEXTURE
293 | GRALLOC_USAGE_HW_RENDER
294 | GRALLOC_USAGE_SW_READ_MASK
295 | GRALLOC_USAGE_SW_WRITE_MASK
296 );
297 }
298
299 return usage;
300}
301
302status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
303{
304 int res;
305
306 mFramecount = 0;
307 mCurFps = 0;
308 gettimeofday(&mTimeStart, NULL);
309
310 if (isjpeg) {
311 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
312 vinfo->picture.format.fmt.pix.width = width;
313 vinfo->picture.format.fmt.pix.height = height;
314 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
315 } else {
316 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
317 vinfo->preview.format.fmt.pix.width = width;
318 vinfo->preview.format.fmt.pix.height = height;
319 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
320
321 res = setBuffersFormat(vinfo);
322 if (res < 0) {
323 ALOGE("set buffer failed\n");
324 return res;
325 }
326 }
327
328 return OK;
329
330}
331
332status_t Sensor::streamOn() {
333
334 return start_capturing(vinfo);
335}
336
337bool Sensor::isStreaming() {
338
339 return vinfo->isStreaming;
340}
341
342bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
343{
344 if ((vinfo->preview.format.fmt.pix.width != width)
345 ||(vinfo->preview.format.fmt.pix.height != height)
346 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
347 ) {
348
349 return true;
350
351 }
352
353 return false;
354}
355status_t Sensor::streamOff() {
356 if (mSensorType == SENSOR_USB) {
357 return releasebuf_and_stop_capturing(vinfo);
358 } else {
359 return stop_capturing(vinfo);
360 }
361}
362
363int Sensor::getOutputFormat()
364{
365 struct v4l2_fmtdesc fmt;
366 int ret;
367 memset(&fmt,0,sizeof(fmt));
368 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
369
370 fmt.index = 0;
371 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
372 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
373 return V4L2_PIX_FMT_MJPEG;
374 fmt.index++;
375 }
376
377 fmt.index = 0;
378 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
379 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
380 return V4L2_PIX_FMT_NV21;
381 fmt.index++;
382 }
383
384 fmt.index = 0;
385 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
386 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
387 return V4L2_PIX_FMT_YUYV;
388 fmt.index++;
389 }
390
391 ALOGE("Unable to find a supported sensor format!");
392 return BAD_VALUE;
393}
394
395/* if sensor supports MJPEG, return it first, otherwise
396 * trasform HAL format to v4l2 format then check whether
397 * it is supported.
398 */
399int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
400{
401 struct v4l2_fmtdesc fmt;
402 int ret;
403 memset(&fmt,0,sizeof(fmt));
404 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
405
406 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
407 pixelfmt = V4L2_PIX_FMT_YVU420;
408 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
409 pixelfmt = V4L2_PIX_FMT_NV21;
410 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
411 pixelfmt = V4L2_PIX_FMT_YUYV;
412 } else {
413 pixelfmt = V4L2_PIX_FMT_NV21;
414 }
415
416 fmt.index = 0;
417 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
418 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
419 return V4L2_PIX_FMT_MJPEG;
420 fmt.index++;
421 }
422
423 fmt.index = 0;
424 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
425 if (fmt.pixelformat == pixelfmt)
426 return pixelfmt;
427 fmt.index++;
428 }
429
430 fmt.index = 0;
431 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0) {
432 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
433 return V4L2_PIX_FMT_YUYV;
434 fmt.index++;
435 }
436 ALOGE("%s, Unable to find a supported sensor format!", __FUNCTION__);
437 return BAD_VALUE;
438}
439
440void Sensor::setPictureRotate(int rotate)
441{
442 mRotateValue = rotate;
443}
444int Sensor::getPictureRotate()
445{
446 return mRotateValue;
447}
448status_t Sensor::shutDown() {
449 ALOGV("%s: E", __FUNCTION__);
450
451 int res;
452
453 mTimeOutCount = 0;
454
455 res = requestExitAndWait();
456 if (res != OK) {
457 ALOGE("Unable to shut down sensor capture thread: %d", res);
458 }
459
460 if (vinfo != NULL) {
461 if (mSensorType == SENSOR_USB) {
462 releasebuf_and_stop_capturing(vinfo);
463 } else {
464 stop_capturing(vinfo);
465 }
466 }
467
468 camera_close(vinfo);
469
470 if (vinfo){
471 free(vinfo);
472 vinfo = NULL;
473 }
474 ALOGD("%s: Exit", __FUNCTION__);
475 return res;
476}
477
478void Sensor::sendExitSingalToSensor() {
479 {
480 Mutex::Autolock lock(mReadoutMutex);
481 mExitSensorThread = true;
482 mReadoutComplete.signal();
483 }
484
485 {
486 Mutex::Autolock lock(mControlMutex);
487 mVSync.signal();
488 }
489
490 {
491 Mutex::Autolock lock(mReadoutMutex);
492 mReadoutAvailable.signal();
493 }
494}
495
496Scene &Sensor::getScene() {
497 return mScene;
498}
499
500int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
501{
502 int ret = 0;
503 struct v4l2_queryctrl qc;
504
505 memset(&qc, 0, sizeof(qc));
506 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
507 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
508
509 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
510 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
511 ret = -1;
512 *zoomMin = 0;
513 *zoomMax = 0;
514 *zoomStep = 1;
515 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
516 } else {
517 *zoomMin = qc.minimum;
518 *zoomMax = qc.maximum;
519 *zoomStep = qc.step;
520 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
521 }
522
523 return ret ;
524}
525
526int Sensor::setZoom(int zoomValue)
527{
528 int ret = 0;
529 struct v4l2_control ctl;
530
531 memset( &ctl, 0, sizeof(ctl));
532 ctl.value = zoomValue;
533 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
534 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
535 if (ret < 0) {
536 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
537 }
538 return ret ;
539}
540
541status_t Sensor::setEffect(uint8_t effect)
542{
543 int ret = 0;
544 struct v4l2_control ctl;
545 ctl.id = V4L2_CID_COLORFX;
546
547 switch (effect) {
548 case ANDROID_CONTROL_EFFECT_MODE_OFF:
549 ctl.value= CAM_EFFECT_ENC_NORMAL;
550 break;
551 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
552 ctl.value= CAM_EFFECT_ENC_COLORINV;
553 break;
554 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
555 ctl.value= CAM_EFFECT_ENC_SEPIA;
556 break;
557 default:
558 ALOGE("%s: Doesn't support effect mode %d",
559 __FUNCTION__, effect);
560 return BAD_VALUE;
561 }
562
563 DBG_LOGB("set effect mode:%d", effect);
564 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
565 if (ret < 0) {
566 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
567 }
568 return ret ;
569}
570
571#define MAX_LEVEL_FOR_EXPOSURE 16
572#define MIN_LEVEL_FOR_EXPOSURE 3
573
574int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
575{
576 struct v4l2_queryctrl qc;
577 int ret=0;
578 int level = 0;
579 int middle = 0;
580
581 memset( &qc, 0, sizeof(qc));
582
583 DBG_LOGA("getExposure\n");
584 qc.id = V4L2_CID_EXPOSURE;
585 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
586 if(ret < 0) {
587 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
588 *minExp = -4;
589 *maxExp = 4;
590 *def = 0;
591 step->numerator = 1;
592 step->denominator = 1;
593 return ret;
594 }
595
596 if(0 < qc.step)
597 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
598
599 if((level > MAX_LEVEL_FOR_EXPOSURE)
600 || (level < MIN_LEVEL_FOR_EXPOSURE)){
601 *minExp = -4;
602 *maxExp = 4;
603 *def = 0;
604 step->numerator = 1;
605 step->denominator = 1;
606 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
607 *minExp, *maxExp, *def);
608 return true;
609 }
610
611 middle = (qc.minimum+qc.maximum)/2;
612 *minExp = qc.minimum - middle;
613 *maxExp = qc.maximum - middle;
614 *def = qc.default_value - middle;
615 step->numerator = 1;
616 step->denominator = 2;//qc.step;
617 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
618 return ret;
619}
620
621status_t Sensor::setExposure(int expCmp)
622{
623 int ret = 0;
624 struct v4l2_control ctl;
625 struct v4l2_queryctrl qc;
626
627 if(mEV == expCmp){
628 return 0;
629 }else{
630 mEV = expCmp;
631 }
632 memset(&ctl, 0, sizeof(ctl));
633 memset(&qc, 0, sizeof(qc));
634
635 qc.id = V4L2_CID_EXPOSURE;
636
637 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
638 if (ret < 0) {
639 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
640 }
641
642 ctl.id = V4L2_CID_EXPOSURE;
643 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
644
645 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
646 if (ret < 0) {
647 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
648 }
649 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
650 return ret ;
651}
652
653int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
654{
655 struct v4l2_queryctrl qc;
656 struct v4l2_querymenu qm;
657 int ret;
658 int mode_count = -1;
659
660 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
661 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
662 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
663 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
664 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
665 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
666 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
667 } else {
668 memset(&qm, 0, sizeof(qm));
669
670 int index = 0;
671 mode_count = 1;
672 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
673
674 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
675 if (mode_count >= maxCont)
676 break;
677
678 memset(&qm, 0, sizeof(struct v4l2_querymenu));
679 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
680 qm.index = index;
681 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
682 continue;
683 } else {
684 if (strcmp((char*)qm.name,"50hz") == 0) {
685 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
686 mode_count++;
687 } else if (strcmp((char*)qm.name,"60hz") == 0) {
688 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
689 mode_count++;
690 } else if (strcmp((char*)qm.name,"auto") == 0) {
691 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
692 mode_count++;
693 }
694
695 }
696 }
697 }
698
699 return mode_count;
700}
701
702status_t Sensor::setAntiBanding(uint8_t antiBanding)
703{
704 int ret = 0;
705 struct v4l2_control ctl;
706 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
707
708 switch (antiBanding) {
709 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
710 ctl.value= CAM_ANTIBANDING_OFF;
711 break;
712 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
713 ctl.value= CAM_ANTIBANDING_50HZ;
714 break;
715 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
716 ctl.value= CAM_ANTIBANDING_60HZ;
717 break;
718 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
719 ctl.value= CAM_ANTIBANDING_AUTO;
720 break;
721 default:
722 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
723 __FUNCTION__, antiBanding);
724 return BAD_VALUE;
725 }
726
727 DBG_LOGB("anti banding mode:%d", antiBanding);
728 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
729 if ( ret < 0) {
730 CAMHAL_LOGDA("failed to set anti banding mode!\n");
731 return BAD_VALUE;
732 }
733 return ret;
734}
735
736status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
737{
738 int ret = 0;
739 struct v4l2_control ctl;
740 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
741 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
742 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
743
744 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
745 return ret;
746}
747
748
749int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
750{
751 struct v4l2_queryctrl qc;
752 struct v4l2_querymenu qm;
753 int ret;
754 int mode_count = -1;
755
756 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
757 qc.id = V4L2_CID_FOCUS_AUTO;
758 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
759 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
760 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
761 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
762 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
763 }else{
764 memset(&qm, 0, sizeof(qm));
765
766 int index = 0;
767 mode_count = 1;
768 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
769
770 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
771 if (mode_count >= maxCount)
772 break;
773
774 memset(&qm, 0, sizeof(struct v4l2_querymenu));
775 qm.id = V4L2_CID_FOCUS_AUTO;
776 qm.index = index;
777 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
778 continue;
779 } else {
780 if (strcmp((char*)qm.name,"auto") == 0) {
781 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
782 mode_count++;
783 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
784 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
785 mode_count++;
786 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
787 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
788 mode_count++;
789 }
790
791 }
792 }
793 }
794
795 return mode_count;
796}
797
798status_t Sensor::setAutoFocuas(uint8_t afMode)
799{
800 struct v4l2_control ctl;
801 ctl.id = V4L2_CID_FOCUS_AUTO;
802
803 switch (afMode) {
804 case ANDROID_CONTROL_AF_MODE_AUTO:
805 ctl.value = CAM_FOCUS_MODE_AUTO;
806 break;
807 case ANDROID_CONTROL_AF_MODE_MACRO:
808 ctl.value = CAM_FOCUS_MODE_MACRO;
809 break;
810 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
811 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
812 break;
813 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
814 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
815 break;
816 default:
817 ALOGE("%s: Emulator doesn't support AF mode %d",
818 __FUNCTION__, afMode);
819 return BAD_VALUE;
820 }
821
822 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
823 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
824 return BAD_VALUE;
825 }
826
827 return OK;
828}
829
830int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
831{
832 struct v4l2_queryctrl qc;
833 struct v4l2_querymenu qm;
834 int ret;
835 int mode_count = -1;
836
837 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
838 qc.id = V4L2_CID_DO_WHITE_BALANCE;
839 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
840 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
841 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
842 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
843 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
844 }else{
845 memset(&qm, 0, sizeof(qm));
846
847 int index = 0;
848 mode_count = 1;
849 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
850
851 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
852 if (mode_count >= maxCount)
853 break;
854
855 memset(&qm, 0, sizeof(struct v4l2_querymenu));
856 qm.id = V4L2_CID_DO_WHITE_BALANCE;
857 qm.index = index;
858 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
859 continue;
860 } else {
861 if (strcmp((char*)qm.name,"auto") == 0) {
862 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
863 mode_count++;
864 } else if (strcmp((char*)qm.name,"daylight") == 0) {
865 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
866 mode_count++;
867 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
868 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
869 mode_count++;
870 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
871 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
872 mode_count++;
873 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
874 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
875 mode_count++;
876 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
877 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
878 mode_count++;
879 } else if (strcmp((char*)qm.name,"twilight") == 0) {
880 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
881 mode_count++;
882 } else if (strcmp((char*)qm.name,"shade") == 0) {
883 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
884 mode_count++;
885 }
886
887 }
888 }
889 }
890
891 return mode_count;
892}
893
894status_t Sensor::setAWB(uint8_t awbMode)
895{
896 int ret = 0;
897 struct v4l2_control ctl;
898 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
899
900 switch (awbMode) {
901 case ANDROID_CONTROL_AWB_MODE_AUTO:
902 ctl.value = CAM_WB_AUTO;
903 break;
904 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
905 ctl.value = CAM_WB_INCANDESCENCE;
906 break;
907 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
908 ctl.value = CAM_WB_FLUORESCENT;
909 break;
910 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
911 ctl.value = CAM_WB_DAYLIGHT;
912 break;
913 case ANDROID_CONTROL_AWB_MODE_SHADE:
914 ctl.value = CAM_WB_SHADE;
915 break;
916 default:
917 ALOGE("%s: Emulator doesn't support AWB mode %d",
918 __FUNCTION__, awbMode);
919 return BAD_VALUE;
920 }
921 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
922 return ret;
923}
924
925void Sensor::setExposureTime(uint64_t ns) {
926 Mutex::Autolock lock(mControlMutex);
927 ALOGVV("Exposure set to %f", ns/1000000.f);
928 mExposureTime = ns;
929}
930
931void Sensor::setFrameDuration(uint64_t ns) {
932 Mutex::Autolock lock(mControlMutex);
933 ALOGVV("Frame duration set to %f", ns/1000000.f);
934 mFrameDuration = ns;
935}
936
937void Sensor::setSensitivity(uint32_t gain) {
938 Mutex::Autolock lock(mControlMutex);
939 ALOGVV("Gain set to %d", gain);
940 mGainFactor = gain;
941}
942
943void Sensor::setDestinationBuffers(Buffers *buffers) {
944 Mutex::Autolock lock(mControlMutex);
945 mNextBuffers = buffers;
946}
947
948void Sensor::setFrameNumber(uint32_t frameNumber) {
949 Mutex::Autolock lock(mControlMutex);
950 mFrameNumber = frameNumber;
951}
952
953status_t Sensor::waitForVSync(nsecs_t reltime) {
954 int res;
955 Mutex::Autolock lock(mControlMutex);
956 if (mExitSensorThread) {
957 return -1;
958 }
959
960 mGotVSync = false;
961 res = mVSync.waitRelative(mControlMutex, reltime);
962 if (res != OK && res != TIMED_OUT) {
963 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
964 return false;
965 }
966 return mGotVSync;
967}
968
969status_t Sensor::waitForNewFrame(nsecs_t reltime,
970 nsecs_t *captureTime) {
971 Mutex::Autolock lock(mReadoutMutex);
972 uint8_t *ret;
973 if (mExitSensorThread) {
974 return -1;
975 }
976
977 if (mCapturedBuffers == NULL) {
978 int res;
979 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
980 if (res == TIMED_OUT) {
981 return false;
982 } else if (res != OK || mCapturedBuffers == NULL) {
983 ALOGE("Error waiting for sensor readout signal: %d", res);
984 return false;
985 }
986 } else {
987 mReadoutComplete.signal();
988 }
989
990 *captureTime = mCaptureTime;
991 mCapturedBuffers = NULL;
992 return true;
993}
994
995Sensor::SensorListener::~SensorListener() {
996}
997
998void Sensor::setSensorListener(SensorListener *listener) {
999 Mutex::Autolock lock(mControlMutex);
1000 mListener = listener;
1001}
1002
1003status_t Sensor::readyToRun() {
1004 int res;
1005 ALOGV("Starting up sensor thread");
1006 mStartupTime = systemTime();
1007 mNextCaptureTime = 0;
1008 mNextCapturedBuffers = NULL;
1009
1010 DBG_LOGA("");
1011
1012 return OK;
1013}
1014
1015bool Sensor::threadLoop() {
1016 /**
1017 * Sensor capture operation main loop.
1018 *
1019 * Stages are out-of-order relative to a single frame's processing, but
1020 * in-order in time.
1021 */
1022
1023 if (mExitSensorThread) {
1024 return false;
1025 }
1026 /**
1027 * Stage 1: Read in latest control parameters
1028 */
1029 uint64_t exposureDuration;
1030 uint64_t frameDuration;
1031 uint32_t gain;
1032 Buffers *nextBuffers;
1033 uint32_t frameNumber;
1034 SensorListener *listener = NULL;
1035 {
1036 Mutex::Autolock lock(mControlMutex);
1037 exposureDuration = mExposureTime;
1038 frameDuration = mFrameDuration;
1039 gain = mGainFactor;
1040 nextBuffers = mNextBuffers;
1041 frameNumber = mFrameNumber;
1042 listener = mListener;
1043 // Don't reuse a buffer set
1044 mNextBuffers = NULL;
1045
1046 // Signal VSync for start of readout
1047 ALOGVV("Sensor VSync");
1048 mGotVSync = true;
1049 mVSync.signal();
1050 }
1051
1052 /**
1053 * Stage 3: Read out latest captured image
1054 */
1055
1056 Buffers *capturedBuffers = NULL;
1057 nsecs_t captureTime = 0;
1058
1059 nsecs_t startRealTime = systemTime();
1060 // Stagefright cares about system time for timestamps, so base simulated
1061 // time on that.
1062 nsecs_t simulatedTime = startRealTime;
1063 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1064 nsecs_t frameReadoutEndRealTime = startRealTime +
1065 kRowReadoutTime * kResolution[1];
1066
1067 if (mNextCapturedBuffers != NULL) {
1068 ALOGVV("Sensor starting readout");
1069 // Pretend we're doing readout now; will signal once enough time has elapsed
1070 capturedBuffers = mNextCapturedBuffers;
1071 captureTime = mNextCaptureTime;
1072 }
1073 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1074
1075 // TODO: Move this signal to another thread to simulate readout
1076 // time properly
1077 if (capturedBuffers != NULL) {
1078 ALOGVV("Sensor readout complete");
1079 Mutex::Autolock lock(mReadoutMutex);
1080 if (mCapturedBuffers != NULL) {
1081 ALOGV("Waiting for readout thread to catch up!");
1082 mReadoutComplete.wait(mReadoutMutex);
1083 }
1084
1085 mCapturedBuffers = capturedBuffers;
1086 mCaptureTime = captureTime;
1087 mReadoutAvailable.signal();
1088 capturedBuffers = NULL;
1089 }
1090
1091 if (mExitSensorThread) {
1092 return false;
1093 }
1094 /**
1095 * Stage 2: Capture new image
1096 */
1097 mNextCaptureTime = simulatedTime;
1098 mNextCapturedBuffers = nextBuffers;
1099
1100 if (mNextCapturedBuffers != NULL) {
1101 if (listener != NULL) {
1102#if 0
1103 if (get_device_status(vinfo)) {
1104 listener->onSensorEvent(frameNumber, SensorListener::ERROR_CAMERA_DEVICE, mNextCaptureTime);
1105 }
1106#endif
1107 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1108 mNextCaptureTime);
1109 }
1110
1111 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1112 (float)exposureDuration/1e6, gain);
1113 mScene.setExposureDuration((float)exposureDuration/1e9);
1114 mScene.calculateScene(mNextCaptureTime);
1115
1116 if ( mSensorType == SENSOR_SHARE_FD) {
1117 captureNewImageWithGe2d();
1118 } else {
1119 captureNewImage();
1120 }
1121 mFramecount ++;
1122 }
1123
1124 if (mExitSensorThread) {
1125 return false;
1126 }
1127
1128 if (mFramecount == 100) {
1129 gettimeofday(&mTimeEnd, NULL);
1130 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1131 mCurFps = mFramecount/(interval/1000000.0f);
1132 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1133 mFramecount = 0;
1134 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1135 }
1136 ALOGVV("Sensor vertical blanking interval");
1137 nsecs_t workDoneRealTime = systemTime();
1138 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1139 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1140 timespec t;
1141 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1142 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1143
1144 int ret;
1145 do {
1146 ret = nanosleep(&t, &t);
1147 } while (ret != 0);
1148 }
1149 nsecs_t endRealTime = systemTime();
1150 ALOGVV("Frame cycle took %d ms, target %d ms",
1151 (int)((endRealTime - startRealTime)/1000000),
1152 (int)(frameDuration / 1000000));
1153 return true;
1154};
1155
1156int Sensor::captureNewImageWithGe2d() {
1157
1158 uint32_t gain = mGainFactor;
1159 mKernelPhysAddr = 0;
1160
1161
1162 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1163 usleep(5000);
1164 }
1165
1166 // Might be adding more buffers, so size isn't constant
1167 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1168 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1169 fillStream(vinfo, mKernelPhysAddr, b);
1170 }
1171 putback_frame(vinfo);
1172 mKernelPhysAddr = 0;
1173
1174 return 0;
1175
1176}
1177
1178int Sensor::captureNewImage() {
1179 bool isjpeg = false;
1180 uint32_t gain = mGainFactor;
1181 mKernelBuffer = NULL;
1182
1183 // Might be adding more buffers, so size isn't constant
1184 CAMHAL_LOGDB("size=%d\n", mNextCapturedBuffers->size());
1185 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1186 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1187 ALOGVV("Sensor capturing buffer %d: stream %d,"
1188 " %d x %d, format %x, stride %d, buf %p, img %p",
1189 i, b.streamId, b.width, b.height, b.format, b.stride,
1190 b.buffer, b.img);
1191 switch (b.format) {
1192 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1193 captureRaw(b.img, gain, b.stride);
1194 break;
1195 case HAL_PIXEL_FORMAT_RGB_888:
1196 captureRGB(b.img, gain, b.stride);
1197 break;
1198 case HAL_PIXEL_FORMAT_RGBA_8888:
1199 captureRGBA(b.img, gain, b.stride);
1200 break;
1201 case HAL_PIXEL_FORMAT_BLOB:
1202 // Add auxillary buffer of the right size
1203 // Assumes only one BLOB (JPEG) buffer in
1204 // mNextCapturedBuffers
1205 StreamBuffer bAux;
1206 int orientation;
1207 orientation = getPictureRotate();
1208 ALOGD("bAux orientation=%d",orientation);
1209 uint32_t pixelfmt;
1210 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1211 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1212
1213 pixelfmt = getOutputFormat();
1214 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1215 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1216 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1217 DBG_LOGA("");
1218 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1219 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1220 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1221 } else {
1222 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1223 }
1224 } else {
1225 isjpeg = true;
1226 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1227 }
1228
1229 if (!msupportrotate) {
1230 bAux.streamId = 0;
1231 bAux.width = b.width;
1232 bAux.height = b.height;
1233 bAux.format = pixelfmt;
1234 bAux.stride = b.width;
1235 bAux.buffer = NULL;
1236 } else {
1237 if ((orientation == 90) || (orientation == 270)) {
1238 bAux.streamId = 0;
1239 bAux.width = b.height;
1240 bAux.height = b.width;
1241 bAux.format = pixelfmt;
1242 bAux.stride = b.height;
1243 bAux.buffer = NULL;
1244 } else {
1245 bAux.streamId = 0;
1246 bAux.width = b.width;
1247 bAux.height = b.height;
1248 bAux.format = pixelfmt;
1249 bAux.stride = b.width;
1250 bAux.buffer = NULL;
1251 }
1252 }
1253 // TODO: Reuse these
1254 bAux.img = new uint8_t[b.width * b.height * 3];
1255 mNextCapturedBuffers->push_back(bAux);
1256 break;
1257 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1258 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1259 captureNV21(b, gain);
1260 break;
1261 case HAL_PIXEL_FORMAT_YV12:
1262 captureYV12(b, gain);
1263 break;
1264 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1265 captureYUYV(b.img, gain, b.stride);
1266 break;
1267 default:
1268 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1269 b.format);
1270 break;
1271 }
1272 }
1273 if (!isjpeg) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1274 // whose buffer putback separately.
1275 putback_frame(vinfo);
1276 }
1277 mKernelBuffer = NULL;
1278
1279 return 0;
1280}
1281
1282int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1283 int res;
1284 int i, j, k, START;
1285 int count = 0;
1286 int pixelfmt;
1287 struct v4l2_frmsizeenum frmsize;
1288 char property[PROPERTY_VALUE_MAX];
1289 unsigned int support_w,support_h;
1290
1291 support_w = 10000;
1292 support_h = 10000;
1293 memset(property, 0, sizeof(property));
1294 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1295 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1296 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1297 support_w = 10000;
1298 support_h = 10000;
1299 }
1300 }
1301
1302 memset(&frmsize,0,sizeof(frmsize));
1303 frmsize.pixel_format = getOutputFormat();
1304
1305 START = 0;
1306 for (i = 0; ; i++) {
1307 frmsize.index = i;
1308 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1309 if (res < 0){
1310 DBG_LOGB("index=%d, break\n", i);
1311 break;
1312 }
1313
1314 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1315
1316 if (0 != (frmsize.discrete.width%16))
1317 continue;
1318
1319 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1320 continue;
1321
1322 if (count >= size)
1323 break;
1324
1325 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1326 picSizes[count+1] = frmsize.discrete.width;
1327 picSizes[count+2] = frmsize.discrete.height;
1328 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1329
1330 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1331 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1332 if (0 == i) {
1333 count += 4;
1334 continue;
1335 }
1336
1337 for (k = count; k > START; k -= 4) {
1338 if (frmsize.discrete.width * frmsize.discrete.height >
1339 picSizes[k - 3] * picSizes[k - 2]) {
1340 picSizes[k + 1] = picSizes[k - 3];
1341 picSizes[k + 2] = picSizes[k - 2];
1342
1343 } else {
1344 break;
1345 }
1346 }
1347 picSizes[k + 1] = frmsize.discrete.width;
1348 picSizes[k + 2] = frmsize.discrete.height;
1349
1350 count += 4;
1351 }
1352 }
1353
1354 START = count;
1355 for (i = 0; ; i++) {
1356 frmsize.index = i;
1357 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1358 if (res < 0){
1359 DBG_LOGB("index=%d, break\n", i);
1360 break;
1361 }
1362
1363 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1364
1365 if (0 != (frmsize.discrete.width%16))
1366 continue;
1367
1368 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1369 continue;
1370
1371 if (count >= size)
1372 break;
1373
1374 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1375 picSizes[count+1] = frmsize.discrete.width;
1376 picSizes[count+2] = frmsize.discrete.height;
1377 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1378
1379 DBG_LOGB("get output width=%d, height=%d, format =\
1380 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1381 frmsize.discrete.height);
1382 if (0 == i) {
1383 count += 4;
1384 continue;
1385 }
1386
1387 for (k = count; k > START; k -= 4) {
1388 if (frmsize.discrete.width * frmsize.discrete.height >
1389 picSizes[k - 3] * picSizes[k - 2]) {
1390 picSizes[k + 1] = picSizes[k - 3];
1391 picSizes[k + 2] = picSizes[k - 2];
1392
1393 } else {
1394 break;
1395 }
1396 }
1397 picSizes[k + 1] = frmsize.discrete.width;
1398 picSizes[k + 2] = frmsize.discrete.height;
1399
1400 count += 4;
1401 }
1402 }
1403
1404#if 0
1405 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1406 START = count;
1407 for (i = 0; ; i++) {
1408 frmsize.index = i;
1409 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1410 if (res < 0){
1411 DBG_LOGB("index=%d, break\n", i);
1412 break;
1413 }
1414
1415 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1416
1417 if (0 != (frmsize.discrete.width%16))
1418 continue;
1419
1420 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1421 continue;
1422
1423 if (count >= size)
1424 break;
1425
1426 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1427 picSizes[count+1] = frmsize.discrete.width;
1428 picSizes[count+2] = frmsize.discrete.height;
1429 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1430
1431 DBG_LOGB("get output width=%d, height=%d, format =\
1432 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1433 frmsize.discrete.height);
1434 if (0 == i) {
1435 count += 4;
1436 continue;
1437 }
1438
1439 for (k = count; k > START; k -= 4) {
1440 if (frmsize.discrete.width * frmsize.discrete.height >
1441 picSizes[k - 3] * picSizes[k - 2]) {
1442 picSizes[k + 1] = picSizes[k - 3];
1443 picSizes[k + 2] = picSizes[k - 2];
1444
1445 } else {
1446 break;
1447 }
1448 }
1449 picSizes[k + 1] = frmsize.discrete.width;
1450 picSizes[k + 2] = frmsize.discrete.height;
1451
1452 count += 4;
1453 }
1454 }
1455 }
1456#endif
1457
1458 uint32_t jpgSrcfmt[] = {
1459 V4L2_PIX_FMT_RGB24,
1460 V4L2_PIX_FMT_MJPEG,
1461 V4L2_PIX_FMT_YUYV,
1462 };
1463
1464 START = count;
1465 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1466 memset(&frmsize,0,sizeof(frmsize));
1467 frmsize.pixel_format = jpgSrcfmt[j];
1468
1469 for (i = 0; ; i++) {
1470 frmsize.index = i;
1471 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1472 if (res < 0){
1473 DBG_LOGB("index=%d, break\n", i);
1474 break;
1475 }
1476
1477 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1478
1479 if (0 != (frmsize.discrete.width%16))
1480 continue;
1481
1482 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1483 // continue;
1484
1485 if (count >= size)
1486 break;
1487
1488 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1489 if (!IsUsbAvailablePictureSize(kUsbAvailablePictureSize, frmsize.discrete.width, frmsize.discrete.height))
1490 continue;
1491 }
1492
1493 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1494 picSizes[count+1] = frmsize.discrete.width;
1495 picSizes[count+2] = frmsize.discrete.height;
1496 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1497
1498 if (0 == i) {
1499 count += 4;
1500 continue;
1501 }
1502
1503 //TODO insert in descend order
1504 for (k = count; k > START; k -= 4) {
1505 if (frmsize.discrete.width * frmsize.discrete.height >
1506 picSizes[k - 3] * picSizes[k - 2]) {
1507 picSizes[k + 1] = picSizes[k - 3];
1508 picSizes[k + 2] = picSizes[k - 2];
1509
1510 } else {
1511 break;
1512 }
1513 }
1514
1515 picSizes[k + 1] = frmsize.discrete.width;
1516 picSizes[k + 2] = frmsize.discrete.height;
1517
1518 count += 4;
1519 }
1520 }
1521
1522 if (frmsize.index > 0)
1523 break;
1524 }
1525
1526 if (frmsize.index == 0)
1527 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1528
1529 return count;
1530
1531}
1532
1533int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1534{
1535 int ret=0; int framerate=0; int temp_rate=0;
1536 struct v4l2_frmivalenum fival;
1537 int i,j=0;
1538 int count = 0;
1539 int tmp_size = size;
1540 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1541 int pixelfmt_tbl[] = {
1542 V4L2_PIX_FMT_MJPEG,
1543 V4L2_PIX_FMT_YVU420,
1544 V4L2_PIX_FMT_NV21,
1545 V4L2_PIX_FMT_RGB24,
1546 V4L2_PIX_FMT_YUYV,
1547 //V4L2_PIX_FMT_YVU420
1548 };
1549
1550 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1551 {
1552 /* we got all duration for each resolution for prev format*/
1553 if (count >= tmp_size)
1554 break;
1555
1556 for( ; size > 0; size-=4)
1557 {
1558 memset(&fival, 0, sizeof(fival));
1559
1560 for (fival.index = 0;;fival.index++)
1561 {
1562 fival.pixel_format = pixelfmt_tbl[i];
1563 fival.width = picSizes[size-3];
1564 fival.height = picSizes[size-2];
1565 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1566 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1567 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1568 if(framerate < temp_rate)
1569 framerate = temp_rate;
1570 duration[count+0] = (int64_t)(picSizes[size-4]);
1571 duration[count+1] = (int64_t)(picSizes[size-3]);
1572 duration[count+2] = (int64_t)(picSizes[size-2]);
1573 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1574 j++;
1575 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1576 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1577 if(framerate < temp_rate)
1578 framerate = temp_rate;
1579 duration[count+0] = (int64_t)picSizes[size-4];
1580 duration[count+1] = (int64_t)picSizes[size-3];
1581 duration[count+2] = (int64_t)picSizes[size-2];
1582 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1583 j++;
1584 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1585 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1586 if(framerate < temp_rate)
1587 framerate = temp_rate;
1588 duration[count+0] = (int64_t)picSizes[size-4];
1589 duration[count+1] = (int64_t)picSizes[size-3];
1590 duration[count+2] = (int64_t)picSizes[size-2];
1591 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1592 j++;
1593 }
1594 } else {
1595 if (j > 0) {
1596 if (count >= tmp_size)
1597 break;
1598 duration[count+0] = (int64_t)(picSizes[size-4]);
1599 duration[count+1] = (int64_t)(picSizes[size-3]);
1600 duration[count+2] = (int64_t)(picSizes[size-2]);
1601 if (framerate == 5) {
1602 duration[count+3] = (int64_t)200000000L;
1603 } else if (framerate == 10) {
1604 duration[count+3] = (int64_t)100000000L;
1605 } else if (framerate == 15) {
1606 duration[count+3] = (int64_t)66666666L;
1607 } else if (framerate == 30) {
1608 duration[count+3] = (int64_t)33333333L;
1609 } else {
1610 duration[count+3] = (int64_t)66666666L;
1611 }
1612 count += 4;
1613 break;
1614 } else {
1615 break;
1616 }
1617 }
1618 }
1619 j=0;
1620 }
1621 size = tmp_size;
1622 }
1623
1624 return count;
1625
1626}
1627
1628int64_t Sensor::getMinFrameDuration()
1629{
1630 int64_t tmpDuration = 66666666L; // 1/15 s
1631 int64_t frameDuration = 66666666L; // 1/15 s
1632 struct v4l2_frmivalenum fival;
1633 int i,j;
1634
1635 uint32_t pixelfmt_tbl[]={
1636 V4L2_PIX_FMT_MJPEG,
1637 V4L2_PIX_FMT_YUYV,
1638 V4L2_PIX_FMT_NV21,
1639 };
1640 struct v4l2_frmsize_discrete resolution_tbl[]={
1641 {1920, 1080},
1642 {1280, 960},
1643 {640, 480},
1644 {320, 240},
1645 };
1646
1647 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1648 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1649 memset(&fival, 0, sizeof(fival));
1650 fival.index = 0;
1651 fival.pixel_format = pixelfmt_tbl[i];
1652 fival.width = resolution_tbl[j].width;
1653 fival.height = resolution_tbl[j].height;
1654
1655 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1656 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1657 tmpDuration =
1658 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1659
1660 if (frameDuration > tmpDuration)
1661 frameDuration = tmpDuration;
1662 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1663 frameDuration =
1664 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1665 break;
1666 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1667 frameDuration =
1668 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1669 break;
1670 }
1671 fival.index++;
1672 }
1673 }
1674
1675 if (fival.index > 0) {
1676 break;
1677 }
1678 }
1679
1680 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1681 return frameDuration;
1682}
1683
1684int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1685 int res;
1686 int i;
1687 int count = 0;
1688 struct v4l2_frmsizeenum frmsize;
1689 char property[PROPERTY_VALUE_MAX];
1690 unsigned int support_w,support_h;
1691 int preview_fmt;
1692
1693 support_w = 10000;
1694 support_h = 10000;
1695 memset(property, 0, sizeof(property));
1696 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1697 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1698 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1699 support_w = 10000;
1700 support_h = 10000;
1701 }
1702 }
1703
1704
1705 memset(&frmsize,0,sizeof(frmsize));
1706 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1707
1708 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1709 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1710 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1711 if (preview == true)
1712 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1713 else
1714 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1715 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1716 if (preview == true)
1717 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1718 else
1719 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1720 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1721 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1722
1723 for (i = 0; ; i++) {
1724 frmsize.index = i;
1725 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1726 if (res < 0){
1727 DBG_LOGB("index=%d, break\n", i);
1728 break;
1729 }
1730
1731
1732 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1733
1734 if (0 != (frmsize.discrete.width%16))
1735 continue;
1736
1737 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1738 continue;
1739
1740 if (count >= size)
1741 break;
1742
1743 picSizes[count] = frmsize.discrete.width;
1744 picSizes[count+1] = frmsize.discrete.height;
1745
1746 if (0 == i) {
1747 count += 2;
1748 continue;
1749 }
1750
1751 //TODO insert in descend order
1752 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1753 picSizes[count + 0] = picSizes[count - 2];
1754 picSizes[count + 1] = picSizes[count - 1];
1755
1756 picSizes[count - 2] = frmsize.discrete.width;
1757 picSizes[count - 1] = frmsize.discrete.height;
1758 }
1759
1760 count += 2;
1761 }
1762 }
1763
1764 return count;
1765
1766}
1767
1768void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1769 float totalGain = gain/100.0 * kBaseGainFactor;
1770 float noiseVarGain = totalGain * totalGain;
1771 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1772 + kReadNoiseVarAfterGain;
1773
1774 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1775 mScene.setReadoutPixel(0,0);
1776 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1777 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1778 uint16_t *px = (uint16_t*)img + y * stride;
1779 for (unsigned int x = 0; x < kResolution[0]; x++) {
1780 uint32_t electronCount;
1781 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1782
1783 // TODO: Better pixel saturation curve?
1784 electronCount = (electronCount < kSaturationElectrons) ?
1785 electronCount : kSaturationElectrons;
1786
1787 // TODO: Better A/D saturation curve?
1788 uint16_t rawCount = electronCount * totalGain;
1789 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1790
1791 // Calculate noise value
1792 // TODO: Use more-correct Gaussian instead of uniform noise
1793 float photonNoiseVar = electronCount * noiseVarGain;
1794 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1795 // Scaled to roughly match gaussian/uniform noise stddev
1796 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1797
1798 rawCount += kBlackLevel;
1799 rawCount += noiseStddev * noiseSample;
1800
1801 *px++ = rawCount;
1802 }
1803 // TODO: Handle this better
1804 //simulatedTime += kRowReadoutTime;
1805 }
1806 ALOGVV("Raw sensor image captured");
1807}
1808
1809void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1810 float totalGain = gain/100.0 * kBaseGainFactor;
1811 // In fixed-point math, calculate total scaling from electrons to 8bpp
1812 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1813 uint32_t inc = kResolution[0] / stride;
1814
1815 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1816 uint8_t *px = img + outY * stride * 4;
1817 mScene.setReadoutPixel(0, y);
1818 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1819 uint32_t rCount, gCount, bCount;
1820 // TODO: Perfect demosaicing is a cheat
1821 const uint32_t *pixel = mScene.getPixelElectrons();
1822 rCount = pixel[Scene::R] * scale64x;
1823 gCount = pixel[Scene::Gr] * scale64x;
1824 bCount = pixel[Scene::B] * scale64x;
1825
1826 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1827 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1828 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1829 *px++ = 255;
1830 for (unsigned int j = 1; j < inc; j++)
1831 mScene.getPixelElectrons();
1832 }
1833 // TODO: Handle this better
1834 //simulatedTime += kRowReadoutTime;
1835 }
1836 ALOGVV("RGBA sensor image captured");
1837}
1838
1839void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1840#if 0
1841 float totalGain = gain/100.0 * kBaseGainFactor;
1842 // In fixed-point math, calculate total scaling from electrons to 8bpp
1843 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1844 uint32_t inc = kResolution[0] / stride;
1845
1846 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1847 mScene.setReadoutPixel(0, y);
1848 uint8_t *px = img + outY * stride * 3;
1849 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1850 uint32_t rCount, gCount, bCount;
1851 // TODO: Perfect demosaicing is a cheat
1852 const uint32_t *pixel = mScene.getPixelElectrons();
1853 rCount = pixel[Scene::R] * scale64x;
1854 gCount = pixel[Scene::Gr] * scale64x;
1855 bCount = pixel[Scene::B] * scale64x;
1856
1857 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1858 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1859 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1860 for (unsigned int j = 1; j < inc; j++)
1861 mScene.getPixelElectrons();
1862 }
1863 // TODO: Handle this better
1864 //simulatedTime += kRowReadoutTime;
1865 }
1866#else
1867 uint8_t *src = NULL;
1868 int ret = 0, rotate = 0;
1869 uint32_t width = 0, height = 0;
1870 int dqTryNum = 3;
1871
1872 rotate = getPictureRotate();
1873 width = vinfo->picture.format.fmt.pix.width;
1874 height = vinfo->picture.format.fmt.pix.height;
1875
1876 if (mSensorType == SENSOR_USB) {
1877 releasebuf_and_stop_capturing(vinfo);
1878 } else {
1879 stop_capturing(vinfo);
1880 }
1881
1882 ret = start_picture(vinfo,rotate);
1883 if (ret < 0)
1884 {
1885 ALOGD("start picture failed!");
1886 }
1887 while(1)
1888 {
1889 src = (uint8_t *)get_picture(vinfo);
1890 if ((NULL != src) && (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)) {
1891 while (dqTryNum > 0) {
1892 if (NULL != src) {
1893 putback_picture_frame(vinfo);
1894 }
1895 usleep(10000);
1896 dqTryNum --;
1897 src = (uint8_t *)get_picture(vinfo);
1898 }
1899 }
1900
1901 if (NULL != src) {
1902 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
1903 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1904 if ( tmp_buffer == NULL) {
1905 ALOGE("new buffer failed!\n");
1906 return;
1907 }
1908 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
1909 width, tmp_buffer + width * height, (width + 1) / 2, width,
1910 height, width, height, libyuv::FOURCC_MJPG) != 0) {
1911 DBG_LOGA("Decode MJPEG frame failed\n");
1912 putback_picture_frame(vinfo);
1913 usleep(5000);
1914 } else {
1915 nv21_to_rgb24(tmp_buffer,img,width,height);
1916 if (tmp_buffer != NULL)
1917 delete [] tmp_buffer;
1918 break;
1919 }
1920 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1921 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
1922 yuyv422_to_rgb24(src,img,width,height);
1923 break;
1924 } else {
1925 putback_picture_frame(vinfo);
1926 usleep(5000);
1927 }
1928 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
1929 if (vinfo->picture.buf.length == width * height * 3) {
1930 memcpy(img, src, vinfo->picture.buf.length);
1931 } else {
1932 rgb24_memcpy(img, src, width, height);
1933 }
1934 break;
1935 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
1936 memcpy(img, src, vinfo->picture.buf.length);
1937 break;
1938 }
1939 }
1940 }
1941 ALOGD("get picture success !");
1942
1943 if (mSensorType == SENSOR_USB) {
1944 releasebuf_and_stop_picture(vinfo);
1945 } else {
1946 stop_picture(vinfo);
1947 }
1948
1949#endif
1950}
1951
1952void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
1953{
1954 for (int i = 0; i < width * height * 2; i += 2) {
1955 *dst++ = *(src + i);
1956 }
1957
1958 for (int y = 0; y < height - 1; y +=2) {
1959 for (int j = 0; j < width * 2; j += 4) {
1960 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
1961 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
1962 }
1963 src += width * 2 * 2;
1964 }
1965
1966 if (height & 1)
1967 for (int j = 0; j < width * 2; j += 4) {
1968 *dst++ = *(src + 3 + j); //v
1969 *dst++ = *(src + 1 + j); //u
1970 }
1971}
1972
1973void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
1974{
1975 //width should be an even number.
1976 //uv ALIGN 32.
1977 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
1978 unsigned char *dst_copy,*src_copy;
1979
1980 dst_copy = dst;
1981 src_copy = src;
1982
1983 y_size = width*height;
1984 c_stride = ALIGN(width/2, 16);
1985 c_size = c_stride * height/2;
1986 cr_offset = y_size;
1987 cb_offset = y_size+c_size;
1988
1989 for(i=0;i< y_size;i++){
1990 *dst++ = *src;
1991 src += 2;
1992 }
1993
1994 dst = dst_copy;
1995 src = src_copy;
1996
1997 for(i=0;i<height;i+=2){
1998 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
1999 //ceil(u1+u2)/2
2000 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
2001 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
2002 }
2003 dst += c_stride;
2004 src += width*4;
2005 }
2006}
2007
2008status_t Sensor::force_reset_sensor() {
2009 DBG_LOGA("force_reset_sensor");
2010 status_t ret;
2011 mTimeOutCount = 0;
2012 ret = streamOff();
2013 ret = setBuffersFormat(vinfo);
2014 ret = streamOn();
2015 DBG_LOGB("%s , ret = %d", __FUNCTION__, ret);
2016 return ret;
2017}
2018
2019void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
2020#if 0
2021 float totalGain = gain/100.0 * kBaseGainFactor;
2022 // Using fixed-point math with 6 bits of fractional precision.
2023 // In fixed-point math, calculate total scaling from electrons to 8bpp
2024 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2025 // In fixed-point math, saturation point of sensor after gain
2026 const int saturationPoint = 64 * 255;
2027 // Fixed-point coefficients for RGB-YUV transform
2028 // Based on JFIF RGB->YUV transform.
2029 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2030 const int rgbToY[] = {19, 37, 7};
2031 const int rgbToCb[] = {-10,-21, 32, 524288};
2032 const int rgbToCr[] = {32,-26, -5, 524288};
2033 // Scale back to 8bpp non-fixed-point
2034 const int scaleOut = 64;
2035 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2036
2037 uint32_t inc = kResolution[0] / stride;
2038 uint32_t outH = kResolution[1] / inc;
2039 for (unsigned int y = 0, outY = 0;
2040 y < kResolution[1]; y+=inc, outY++) {
2041 uint8_t *pxY = img + outY * stride;
2042 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2043 mScene.setReadoutPixel(0,y);
2044 for (unsigned int outX = 0; outX < stride; outX++) {
2045 int32_t rCount, gCount, bCount;
2046 // TODO: Perfect demosaicing is a cheat
2047 const uint32_t *pixel = mScene.getPixelElectrons();
2048 rCount = pixel[Scene::R] * scale64x;
2049 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2050 gCount = pixel[Scene::Gr] * scale64x;
2051 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2052 bCount = pixel[Scene::B] * scale64x;
2053 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2054
2055 *pxY++ = (rgbToY[0] * rCount +
2056 rgbToY[1] * gCount +
2057 rgbToY[2] * bCount) / scaleOutSq;
2058 if (outY % 2 == 0 && outX % 2 == 0) {
2059 *pxVU++ = (rgbToCr[0] * rCount +
2060 rgbToCr[1] * gCount +
2061 rgbToCr[2] * bCount +
2062 rgbToCr[3]) / scaleOutSq;
2063 *pxVU++ = (rgbToCb[0] * rCount +
2064 rgbToCb[1] * gCount +
2065 rgbToCb[2] * bCount +
2066 rgbToCb[3]) / scaleOutSq;
2067 }
2068 for (unsigned int j = 1; j < inc; j++)
2069 mScene.getPixelElectrons();
2070 }
2071 }
2072#else
2073 uint8_t *src;
2074
2075 if (mKernelBuffer) {
2076 src = mKernelBuffer;
2077 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2078 uint32_t width = vinfo->preview.format.fmt.pix.width;
2079 uint32_t height = vinfo->preview.format.fmt.pix.height;
2080 if ((width == b.width) && (height == b.height)) {
2081 memcpy(b.img, src, b.width * b.height * 3/2);
2082 } else {
2083 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2084 }
2085 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2086 uint32_t width = vinfo->preview.format.fmt.pix.width;
2087 uint32_t height = vinfo->preview.format.fmt.pix.height;
2088
2089 if ((width == b.width) && (height == b.height)) {
2090 memcpy(b.img, src, b.width * b.height * 3/2);
2091 } else {
2092 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2093 }
2094 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2095 uint32_t width = vinfo->preview.format.fmt.pix.width;
2096 uint32_t height = vinfo->preview.format.fmt.pix.height;
2097
2098 if ((width == b.width) && (height == b.height)) {
2099 memcpy(b.img, src, b.width * b.height * 3/2);
2100 } else {
2101 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2102 }
2103 } else {
2104 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2105 }
2106 return ;
2107 }
2108 while(1){
2109 if (mExitSensorThread) {
2110 break;
2111 }
2112
2113 src = (uint8_t *)get_frame(vinfo);
2114 if (NULL == src) {
2115 if (get_device_status(vinfo)) {
2116 break;
2117 }
2118 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2119 usleep(5000);
2120 mTimeOutCount++;
2121 if (mTimeOutCount > 300) {
2122 force_reset_sensor();
2123 }
2124 continue;
2125 }
2126 mTimeOutCount = 0;
2127 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2128 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2129 DBG_LOGB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2130 putback_frame(vinfo);
2131 continue;
2132 }
2133 }
2134 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2135 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2136 memcpy(b.img, src, vinfo->preview.buf.length);
2137 } else {
2138 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2139 }
2140 mKernelBuffer = b.img;
2141 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2142 int width = vinfo->preview.format.fmt.pix.width;
2143 int height = vinfo->preview.format.fmt.pix.height;
2144 YUYVToNV21(src, b.img, width, height);
2145 mKernelBuffer = b.img;
2146 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2147 int width = vinfo->preview.format.fmt.pix.width;
2148 int height = vinfo->preview.format.fmt.pix.height;
2149 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, b.img,
2150 width, b.img + width * height, (width + 1) / 2, width,
2151 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2152 putback_frame(vinfo);
2153 DBG_LOGA("Decode MJPEG frame failed\n");
2154 continue;
2155 }
2156 mKernelBuffer = b.img;
2157 }
2158
2159 break;
2160 }
2161#endif
2162
2163 ALOGVV("NV21 sensor image captured");
2164}
2165
2166void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2167#if 0
2168 float totalGain = gain/100.0 * kBaseGainFactor;
2169 // Using fixed-point math with 6 bits of fractional precision.
2170 // In fixed-point math, calculate total scaling from electrons to 8bpp
2171 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2172 // In fixed-point math, saturation point of sensor after gain
2173 const int saturationPoint = 64 * 255;
2174 // Fixed-point coefficients for RGB-YUV transform
2175 // Based on JFIF RGB->YUV transform.
2176 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2177 const int rgbToY[] = {19, 37, 7};
2178 const int rgbToCb[] = {-10,-21, 32, 524288};
2179 const int rgbToCr[] = {32,-26, -5, 524288};
2180 // Scale back to 8bpp non-fixed-point
2181 const int scaleOut = 64;
2182 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2183
2184 uint32_t inc = kResolution[0] / stride;
2185 uint32_t outH = kResolution[1] / inc;
2186 for (unsigned int y = 0, outY = 0;
2187 y < kResolution[1]; y+=inc, outY++) {
2188 uint8_t *pxY = img + outY * stride;
2189 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2190 mScene.setReadoutPixel(0,y);
2191 for (unsigned int outX = 0; outX < stride; outX++) {
2192 int32_t rCount, gCount, bCount;
2193 // TODO: Perfect demosaicing is a cheat
2194 const uint32_t *pixel = mScene.getPixelElectrons();
2195 rCount = pixel[Scene::R] * scale64x;
2196 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2197 gCount = pixel[Scene::Gr] * scale64x;
2198 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2199 bCount = pixel[Scene::B] * scale64x;
2200 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2201
2202 *pxY++ = (rgbToY[0] * rCount +
2203 rgbToY[1] * gCount +
2204 rgbToY[2] * bCount) / scaleOutSq;
2205 if (outY % 2 == 0 && outX % 2 == 0) {
2206 *pxVU++ = (rgbToCr[0] * rCount +
2207 rgbToCr[1] * gCount +
2208 rgbToCr[2] * bCount +
2209 rgbToCr[3]) / scaleOutSq;
2210 *pxVU++ = (rgbToCb[0] * rCount +
2211 rgbToCb[1] * gCount +
2212 rgbToCb[2] * bCount +
2213 rgbToCb[3]) / scaleOutSq;
2214 }
2215 for (unsigned int j = 1; j < inc; j++)
2216 mScene.getPixelElectrons();
2217 }
2218 }
2219#else
2220 uint8_t *src;
2221 if (mKernelBuffer) {
2222 src = mKernelBuffer;
2223 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2224 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2225 ALOGI("Sclale YV12 frame down \n");
2226
2227 int width = vinfo->preview.format.fmt.pix.width;
2228 int height = vinfo->preview.format.fmt.pix.height;
2229 int ret = libyuv::I420Scale(src, width,
2230 src + width * height, width / 2,
2231 src + width * height + width * height / 4, width / 2,
2232 width, height,
2233 b.img, b.width,
2234 b.img + b.width * b.height, b.width / 2,
2235 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2236 b.width, b.height,
2237 libyuv::kFilterNone);
2238 if (ret < 0)
2239 ALOGE("Sclale YV12 frame down failed!\n");
2240 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2241 int width = vinfo->preview.format.fmt.pix.width;
2242 int height = vinfo->preview.format.fmt.pix.height;
2243 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2244
2245 if ( tmp_buffer == NULL) {
2246 ALOGE("new buffer failed!\n");
2247 return;
2248 }
2249
2250 YUYVToYV12(src, tmp_buffer, width, height);
2251
2252 int ret = libyuv::I420Scale(tmp_buffer, width,
2253 tmp_buffer + width * height, width / 2,
2254 tmp_buffer + width * height + width * height / 4, width / 2,
2255 width, height,
2256 b.img, b.width,
2257 b.img + b.width * b.height, b.width / 2,
2258 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2259 b.width, b.height,
2260 libyuv::kFilterNone);
2261 if (ret < 0)
2262 ALOGE("Sclale YV12 frame down failed!\n");
2263 delete [] tmp_buffer;
2264 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2265 int width = vinfo->preview.format.fmt.pix.width;
2266 int height = vinfo->preview.format.fmt.pix.height;
2267 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2268
2269 if ( tmp_buffer == NULL) {
2270 ALOGE("new buffer failed!\n");
2271 return;
2272 }
2273
2274 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2275 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2276 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2277 DBG_LOGA("Decode MJPEG frame failed\n");
2278 }
2279
2280 int ret = libyuv::I420Scale(tmp_buffer, width,
2281 tmp_buffer + width * height, width / 2,
2282 tmp_buffer + width * height + width * height / 4, width / 2,
2283 width, height,
2284 b.img, b.width,
2285 b.img + b.width * b.height, b.width / 2,
2286 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2287 b.width, b.height,
2288 libyuv::kFilterNone);
2289 if (ret < 0)
2290 ALOGE("Sclale YV12 frame down failed!\n");
2291
2292 delete [] tmp_buffer;
2293 } else {
2294 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2295 }
2296 return ;
2297 }
2298 while(1){
2299 if (mExitSensorThread) {
2300 break;
2301 }
2302 src = (uint8_t *)get_frame(vinfo);
2303
2304 if (NULL == src) {
2305 if (get_device_status(vinfo)) {
2306 break;
2307 }
2308 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2309 usleep(5000);
2310 mTimeOutCount++;
2311 if (mTimeOutCount > 300) {
2312 force_reset_sensor();
2313 }
2314 continue;
2315 }
2316 mTimeOutCount = 0;
2317 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2318 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2319 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2320 putback_frame(vinfo);
2321 continue;
2322 }
2323 }
2324 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2325 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2326 memcpy(b.img, src, vinfo->preview.buf.length);
2327 } else {
2328 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2329 }
2330 mKernelBuffer = b.img;
2331 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2332 int width = vinfo->preview.format.fmt.pix.width;
2333 int height = vinfo->preview.format.fmt.pix.height;
2334 YUYVToYV12(src, b.img, width, height);
2335 mKernelBuffer = b.img;
2336 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2337 int width = vinfo->preview.format.fmt.pix.width;
2338 int height = vinfo->preview.format.fmt.pix.height;
2339 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2340 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2341 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2342 putback_frame(vinfo);
2343 DBG_LOGA("Decode MJPEG frame failed\n");
2344 continue;
2345 }
2346 mKernelBuffer = b.img;
2347 } else {
2348 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2349 }
2350
2351 break;
2352 }
2353#endif
2354 //mKernelBuffer = src;
2355 ALOGVV("YV12 sensor image captured");
2356}
2357
2358void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2359#if 0
2360 float totalGain = gain/100.0 * kBaseGainFactor;
2361 // Using fixed-point math with 6 bits of fractional precision.
2362 // In fixed-point math, calculate total scaling from electrons to 8bpp
2363 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2364 // In fixed-point math, saturation point of sensor after gain
2365 const int saturationPoint = 64 * 255;
2366 // Fixed-point coefficients for RGB-YUV transform
2367 // Based on JFIF RGB->YUV transform.
2368 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2369 const int rgbToY[] = {19, 37, 7};
2370 const int rgbToCb[] = {-10,-21, 32, 524288};
2371 const int rgbToCr[] = {32,-26, -5, 524288};
2372 // Scale back to 8bpp non-fixed-point
2373 const int scaleOut = 64;
2374 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2375
2376 uint32_t inc = kResolution[0] / stride;
2377 uint32_t outH = kResolution[1] / inc;
2378 for (unsigned int y = 0, outY = 0;
2379 y < kResolution[1]; y+=inc, outY++) {
2380 uint8_t *pxY = img + outY * stride;
2381 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2382 mScene.setReadoutPixel(0,y);
2383 for (unsigned int outX = 0; outX < stride; outX++) {
2384 int32_t rCount, gCount, bCount;
2385 // TODO: Perfect demosaicing is a cheat
2386 const uint32_t *pixel = mScene.getPixelElectrons();
2387 rCount = pixel[Scene::R] * scale64x;
2388 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2389 gCount = pixel[Scene::Gr] * scale64x;
2390 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2391 bCount = pixel[Scene::B] * scale64x;
2392 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2393
2394 *pxY++ = (rgbToY[0] * rCount +
2395 rgbToY[1] * gCount +
2396 rgbToY[2] * bCount) / scaleOutSq;
2397 if (outY % 2 == 0 && outX % 2 == 0) {
2398 *pxVU++ = (rgbToCr[0] * rCount +
2399 rgbToCr[1] * gCount +
2400 rgbToCr[2] * bCount +
2401 rgbToCr[3]) / scaleOutSq;
2402 *pxVU++ = (rgbToCb[0] * rCount +
2403 rgbToCb[1] * gCount +
2404 rgbToCb[2] * bCount +
2405 rgbToCb[3]) / scaleOutSq;
2406 }
2407 for (unsigned int j = 1; j < inc; j++)
2408 mScene.getPixelElectrons();
2409 }
2410 }
2411#else
2412 uint8_t *src;
2413 if (mKernelBuffer) {
2414 src = mKernelBuffer;
2415 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2416 //TODO YUYV scale
2417 //memcpy(img, src, vinfo->preview.buf.length);
2418
2419 } else
2420 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2421
2422 return ;
2423 }
2424
2425 while(1) {
2426 if (mExitSensorThread) {
2427 break;
2428 }
2429 src = (uint8_t *)get_frame(vinfo);
2430 if (NULL == src) {
2431 if (get_device_status(vinfo)) {
2432 break;
2433 }
2434 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2435 usleep(5000);
2436 mTimeOutCount++;
2437 if (mTimeOutCount > 300) {
2438 force_reset_sensor();
2439 }
2440 continue;
2441 }
2442 mTimeOutCount = 0;
2443 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2444 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2445 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2446 putback_frame(vinfo);
2447 continue;
2448 }
2449 }
2450 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2451 memcpy(img, src, vinfo->preview.buf.length);
2452 mKernelBuffer = src;
2453 } else {
2454 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2455 }
2456
2457 break;
2458 }
2459#endif
2460 //mKernelBuffer = src;
2461 ALOGVV("YUYV sensor image captured");
2462}
2463
2464void Sensor::dump(int fd) {
2465 String8 result;
2466 result = String8::format("%s, sensor preview information: \n", __FILE__);
2467 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2468 result.appendFormat("camera preview width: %d , height =%d\n",
2469 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2470
2471 result.appendFormat("camera preview format: %.4s\n\n",
2472 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2473
2474 write(fd, result.string(), result.size());
2475}
2476
2477} // namespace android
2478
2479