summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: ae69b96c07d6ee000ce25c2628066f2cee4cf915
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
45
46namespace android {
47
48const unsigned int Sensor::kResolution[2] = {1600, 1200};
49
50const nsecs_t Sensor::kExposureTimeRange[2] =
51 {1000L, 30000000000L} ; // 1 us - 30 sec
52const nsecs_t Sensor::kFrameDurationRange[2] =
53 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
54const nsecs_t Sensor::kMinVerticalBlank = 10000L;
55
56const uint8_t Sensor::kColorFilterArrangement =
57 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
58
59// Output image data characteristics
60const uint32_t Sensor::kMaxRawValue = 4000;
61const uint32_t Sensor::kBlackLevel = 1000;
62
63// Sensor sensitivity
64const float Sensor::kSaturationVoltage = 0.520f;
65const uint32_t Sensor::kSaturationElectrons = 2000;
66const float Sensor::kVoltsPerLuxSecond = 0.100f;
67
68const float Sensor::kElectronsPerLuxSecond =
69 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
70 * Sensor::kVoltsPerLuxSecond;
71
72const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
73 Sensor::kSaturationElectrons;
74
75const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
76const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
77const float Sensor::kReadNoiseVarBeforeGain =
78 Sensor::kReadNoiseStddevBeforeGain *
79 Sensor::kReadNoiseStddevBeforeGain;
80const float Sensor::kReadNoiseVarAfterGain =
81 Sensor::kReadNoiseStddevAfterGain *
82 Sensor::kReadNoiseStddevAfterGain;
83
84// While each row has to read out, reset, and then expose, the (reset +
85// expose) sequence can be overlapped by other row readouts, so the final
86// minimum frame duration is purely a function of row readout time, at least
87// if there's a reasonable number of rows.
88const nsecs_t Sensor::kRowReadoutTime =
89 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
90
91const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
92const uint32_t Sensor::kDefaultSensitivity = 100;
93
94const usb_frmsize_discrete_t kUsbAvailablePictureSize[] = {
95 {4128, 3096},
96 {3264, 2448},
97 {2592, 1944},
98 {2592, 1936},
99 {2560, 1920},
100 {2688, 1520},
101 {2048, 1536},
102 {1600, 1200},
103 {1920, 1088},
104 {1920, 1080},
105 {1440, 1080},
106 {1280, 960},
107 {1280, 720},
108 {1024, 768},
109 {960, 720},
110 {720, 480},
111 {640, 480},
112 {320, 240},
113};
114
115/** A few utility functions for math, normal distributions */
116
117// Take advantage of IEEE floating-point format to calculate an approximate
118// square root. Accurate to within +-3.6%
119float sqrtf_approx(float r) {
120 // Modifier is based on IEEE floating-point representation; the
121 // manipulations boil down to finding approximate log2, dividing by two, and
122 // then inverting the log2. A bias is added to make the relative error
123 // symmetric about the real answer.
124 const int32_t modifier = 0x1FBB4000;
125
126 int32_t r_i = *(int32_t*)(&r);
127 r_i = (r_i >> 1) + modifier;
128
129 return *(float*)(&r_i);
130}
131
132void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
133{
134 int stride = (width + 31) & ( ~31);
135 int w, h;
136 for (h=0; h<height; h++)
137 {
138 memcpy( dst, src, width*3);
139 dst += width*3;
140 src += stride*3;
141 }
142}
143
144static int ALIGN(int x, int y) {
145 // y must be a power of 2.
146 return (x + y - 1) & ~(y - 1);
147}
148
149bool IsUsbAvailablePictureSize(const usb_frmsize_discrete_t AvailablePictureSize[], uint32_t width, uint32_t height)
150{
151 int i;
152 bool ret = false;
153 int count = sizeof(kUsbAvailablePictureSize)/sizeof(kUsbAvailablePictureSize[0]);
154 for (i = 0; i < count; i++) {
155 if ((width == AvailablePictureSize[i].width) && (height == AvailablePictureSize[i].height)) {
156 ret = true;
157 } else {
158 continue;
159 }
160 }
161 return ret;
162}
163
164void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
165{
166 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
167 (mmInt32)vinfo->preview.format.fmt.pix.height,
168 (mmInt32)vinfo->preview.format.fmt.pix.width,
169 IC_FORMAT_YCbCr420_lp,
170 (mmByte *) src,
171 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
172 0};
173
174 structConvImage output = {(mmInt32)width,
175 (mmInt32)height,
176 (mmInt32)width,
177 IC_FORMAT_YCbCr420_lp,
178 (mmByte *) img,
179 (mmByte *) img + width * height,
180 0};
181
182 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
183 ALOGE("Sclale NV21 frame down failed!\n");
184}
185
186Sensor::Sensor():
187 Thread(false),
188 mGotVSync(false),
189 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
190 mFrameDuration(kFrameDurationRange[0]),
191 mGainFactor(kDefaultSensitivity),
192 mNextBuffers(NULL),
193 mFrameNumber(0),
194 mCapturedBuffers(NULL),
195 mListener(NULL),
196 mTemp_buffer(NULL),
197 mExitSensorThread(false),
198 mIoctlSupport(0),
199 msupportrotate(0),
200 mTimeOutCount(0),
201 mWait(false),
202 mPre_width(0),
203 mPre_height(0),
204 mFlushFlag(false),
205 mSensorWorkFlag(false),
206 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
207{
208
209}
210
211Sensor::~Sensor() {
212 //shutDown();
213}
214
215status_t Sensor::startUp(int idx) {
216 ALOGV("%s: E", __FUNCTION__);
217 DBG_LOGA("ddd");
218
219 int res;
220 mCapturedBuffers = NULL;
221 res = run("EmulatedFakeCamera3::Sensor",
222 ANDROID_PRIORITY_URGENT_DISPLAY);
223
224 if (res != OK) {
225 ALOGE("Unable to start up sensor capture thread: %d", res);
226 }
227
228 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
229 vinfo->idx = idx;
230
231 res = camera_open(vinfo);
232 if (res < 0) {
233 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
234 }
235
236 mSensorType = SENSOR_MMAP;
237 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
238 mSensorType = SENSOR_USB;
239 }
240
241 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
242 mSensorType = SENSOR_SHARE_FD;
243 }
244
245 if (strstr((const char *)vinfo->cap.card, "front"))
246 mSensorFace = SENSOR_FACE_FRONT;
247 else if (strstr((const char *)vinfo->cap.card, "back"))
248 mSensorFace = SENSOR_FACE_BACK;
249 else
250 mSensorFace = SENSOR_FACE_NONE;
251
252 return res;
253}
254
255sensor_type_e Sensor::getSensorType(void)
256{
257 return mSensorType;
258}
259status_t Sensor::IoctlStateProbe(void) {
260 struct v4l2_queryctrl qc;
261 int ret = 0;
262 mIoctlSupport = 0;
263 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
264 qc.id = V4L2_ROTATE_ID;
265 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
266 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
267 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
268 }else{
269 mIoctlSupport |= IOCTL_MASK_ROTATE;
270 }
271
272 if(mIoctlSupport & IOCTL_MASK_ROTATE){
273 msupportrotate = true;
274 DBG_LOGA("camera support capture rotate");
275 }
276 return mIoctlSupport;
277}
278
279uint32_t Sensor::getStreamUsage(int stream_type)
280{
281 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
282
283 switch (stream_type) {
284 case CAMERA3_STREAM_OUTPUT:
285 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
286 break;
287 case CAMERA3_STREAM_INPUT:
288 usage = GRALLOC_USAGE_HW_CAMERA_READ;
289 break;
290 case CAMERA3_STREAM_BIDIRECTIONAL:
291 usage = GRALLOC_USAGE_HW_CAMERA_READ |
292 GRALLOC_USAGE_HW_CAMERA_WRITE;
293 break;
294 }
295 if ((mSensorType == SENSOR_MMAP)
296 || (mSensorType == SENSOR_USB)) {
297 usage = (GRALLOC_USAGE_HW_TEXTURE
298 | GRALLOC_USAGE_HW_RENDER
299 | GRALLOC_USAGE_SW_READ_MASK
300 | GRALLOC_USAGE_SW_WRITE_MASK
301 );
302 }
303
304 return usage;
305}
306
307status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
308{
309 int res;
310
311 mFramecount = 0;
312 mCurFps = 0;
313 gettimeofday(&mTimeStart, NULL);
314
315 if (isjpeg) {
316 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
317 vinfo->picture.format.fmt.pix.width = width;
318 vinfo->picture.format.fmt.pix.height = height;
319 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
320 } else {
321 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
322 vinfo->preview.format.fmt.pix.width = width;
323 vinfo->preview.format.fmt.pix.height = height;
324 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
325
326 res = setBuffersFormat(vinfo);
327 if (res < 0) {
328 ALOGE("set buffer failed\n");
329 return res;
330 }
331 }
332
333 if (NULL == mTemp_buffer) {
334 mPre_width = vinfo->preview.format.fmt.pix.width;
335 mPre_height = vinfo->preview.format.fmt.pix.height;
336 DBG_LOGB("setOutputFormat :: pre_width = %d, pre_height = %d \n" , mPre_width , mPre_height);
337 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
338 if (mTemp_buffer == NULL) {
339 ALOGE("first time allocate mTemp_buffer failed !");
340 return -1;
341 }
342 }
343
344 if ((mPre_width != vinfo->preview.format.fmt.pix.width) && (mPre_height != vinfo->preview.format.fmt.pix.height)) {
345 if (mTemp_buffer) {
346 delete [] mTemp_buffer;
347 mTemp_buffer = NULL;
348 }
349 mPre_width = vinfo->preview.format.fmt.pix.width;
350 mPre_height = vinfo->preview.format.fmt.pix.height;
351 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
352 if (mTemp_buffer == NULL) {
353 ALOGE("allocate mTemp_buffer failed !");
354 return -1;
355 }
356 }
357
358 return OK;
359
360}
361
362status_t Sensor::streamOn() {
363
364 return start_capturing(vinfo);
365}
366
367bool Sensor::isStreaming() {
368
369 return vinfo->isStreaming;
370}
371
372bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
373{
374 if ((vinfo->preview.format.fmt.pix.width != width)
375 ||(vinfo->preview.format.fmt.pix.height != height)
376 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
377 ) {
378
379 return true;
380
381 }
382
383 return false;
384}
385status_t Sensor::streamOff() {
386 if (mSensorType == SENSOR_USB) {
387 return releasebuf_and_stop_capturing(vinfo);
388 } else {
389 return stop_capturing(vinfo);
390 }
391}
392
393int Sensor::getOutputFormat()
394{
395 struct v4l2_fmtdesc fmt;
396 int ret;
397 memset(&fmt,0,sizeof(fmt));
398 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
399
400 fmt.index = 0;
401 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
402 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
403 return V4L2_PIX_FMT_MJPEG;
404 fmt.index++;
405 }
406
407 fmt.index = 0;
408 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
409 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
410 return V4L2_PIX_FMT_NV21;
411 fmt.index++;
412 }
413
414 fmt.index = 0;
415 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
416 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
417 return V4L2_PIX_FMT_YUYV;
418 fmt.index++;
419 }
420
421 ALOGE("Unable to find a supported sensor format!");
422 return BAD_VALUE;
423}
424
425/* if sensor supports MJPEG, return it first, otherwise
426 * trasform HAL format to v4l2 format then check whether
427 * it is supported.
428 */
429int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
430{
431 struct v4l2_fmtdesc fmt;
432 int ret;
433 memset(&fmt,0,sizeof(fmt));
434 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
435
436 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
437 pixelfmt = V4L2_PIX_FMT_YVU420;
438 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
439 pixelfmt = V4L2_PIX_FMT_NV21;
440 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
441 pixelfmt = V4L2_PIX_FMT_YUYV;
442 } else {
443 pixelfmt = V4L2_PIX_FMT_NV21;
444 }
445
446 fmt.index = 0;
447 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
448 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
449 return V4L2_PIX_FMT_MJPEG;
450 fmt.index++;
451 }
452
453 fmt.index = 0;
454 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
455 if (fmt.pixelformat == pixelfmt)
456 return pixelfmt;
457 fmt.index++;
458 }
459
460 fmt.index = 0;
461 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0) {
462 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
463 return V4L2_PIX_FMT_YUYV;
464 fmt.index++;
465 }
466 ALOGE("%s, Unable to find a supported sensor format!", __FUNCTION__);
467 return BAD_VALUE;
468}
469
470void Sensor::setPictureRotate(int rotate)
471{
472 mRotateValue = rotate;
473}
474int Sensor::getPictureRotate()
475{
476 return mRotateValue;
477}
478status_t Sensor::shutDown() {
479 ALOGV("%s: E", __FUNCTION__);
480
481 int res;
482
483 mTimeOutCount = 0;
484
485 res = requestExitAndWait();
486 if (res != OK) {
487 ALOGE("Unable to shut down sensor capture thread: %d", res);
488 }
489
490 if (vinfo != NULL) {
491 if (mSensorType == SENSOR_USB) {
492 releasebuf_and_stop_capturing(vinfo);
493 } else {
494 stop_capturing(vinfo);
495 }
496 }
497
498 camera_close(vinfo);
499
500 if (vinfo){
501 free(vinfo);
502 vinfo = NULL;
503 }
504
505 if (mTemp_buffer) {
506 delete [] mTemp_buffer;
507 mTemp_buffer = NULL;
508 }
509
510 mSensorWorkFlag = false;
511
512 ALOGD("%s: Exit", __FUNCTION__);
513 return res;
514}
515
516void Sensor::sendExitSingalToSensor() {
517 {
518 Mutex::Autolock lock(mReadoutMutex);
519 mExitSensorThread = true;
520 mReadoutComplete.signal();
521 }
522
523 {
524 Mutex::Autolock lock(mControlMutex);
525 mVSync.signal();
526 }
527
528 {
529 Mutex::Autolock lock(mReadoutMutex);
530 mReadoutAvailable.signal();
531 }
532}
533
534Scene &Sensor::getScene() {
535 return mScene;
536}
537
538int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
539{
540 int ret = 0;
541 struct v4l2_queryctrl qc;
542
543 memset(&qc, 0, sizeof(qc));
544 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
545 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
546
547 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
548 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
549 ret = -1;
550 *zoomMin = 0;
551 *zoomMax = 0;
552 *zoomStep = 1;
553 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
554 } else {
555 *zoomMin = qc.minimum;
556 *zoomMax = qc.maximum;
557 *zoomStep = qc.step;
558 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
559 }
560
561 return ret ;
562}
563
564int Sensor::setZoom(int zoomValue)
565{
566 int ret = 0;
567 struct v4l2_control ctl;
568
569 memset( &ctl, 0, sizeof(ctl));
570 ctl.value = zoomValue;
571 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
572 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
573 if (ret < 0) {
574 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
575 }
576 return ret ;
577}
578
579status_t Sensor::setEffect(uint8_t effect)
580{
581 int ret = 0;
582 struct v4l2_control ctl;
583 ctl.id = V4L2_CID_COLORFX;
584
585 switch (effect) {
586 case ANDROID_CONTROL_EFFECT_MODE_OFF:
587 ctl.value= CAM_EFFECT_ENC_NORMAL;
588 break;
589 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
590 ctl.value= CAM_EFFECT_ENC_COLORINV;
591 break;
592 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
593 ctl.value= CAM_EFFECT_ENC_SEPIA;
594 break;
595 default:
596 ALOGE("%s: Doesn't support effect mode %d",
597 __FUNCTION__, effect);
598 return BAD_VALUE;
599 }
600
601 DBG_LOGB("set effect mode:%d", effect);
602 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
603 if (ret < 0) {
604 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
605 }
606 return ret ;
607}
608
609#define MAX_LEVEL_FOR_EXPOSURE 16
610#define MIN_LEVEL_FOR_EXPOSURE 3
611
612int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
613{
614 struct v4l2_queryctrl qc;
615 int ret=0;
616 int level = 0;
617 int middle = 0;
618
619 memset( &qc, 0, sizeof(qc));
620
621 DBG_LOGA("getExposure\n");
622 qc.id = V4L2_CID_EXPOSURE;
623 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
624 if(ret < 0) {
625 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
626 *minExp = -4;
627 *maxExp = 4;
628 *def = 0;
629 step->numerator = 1;
630 step->denominator = 1;
631 return ret;
632 }
633
634 if(0 < qc.step)
635 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
636
637 if((level > MAX_LEVEL_FOR_EXPOSURE)
638 || (level < MIN_LEVEL_FOR_EXPOSURE)){
639 *minExp = -4;
640 *maxExp = 4;
641 *def = 0;
642 step->numerator = 1;
643 step->denominator = 1;
644 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
645 *minExp, *maxExp, *def);
646 return true;
647 }
648
649 middle = (qc.minimum+qc.maximum)/2;
650 *minExp = qc.minimum - middle;
651 *maxExp = qc.maximum - middle;
652 *def = qc.default_value - middle;
653 step->numerator = 1;
654 step->denominator = 2;//qc.step;
655 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
656 return ret;
657}
658
659status_t Sensor::setExposure(int expCmp)
660{
661 int ret = 0;
662 struct v4l2_control ctl;
663 struct v4l2_queryctrl qc;
664
665 if(mEV == expCmp){
666 return 0;
667 }else{
668 mEV = expCmp;
669 }
670 memset(&ctl, 0, sizeof(ctl));
671 memset(&qc, 0, sizeof(qc));
672
673 qc.id = V4L2_CID_EXPOSURE;
674
675 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
676 if (ret < 0) {
677 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
678 }
679
680 ctl.id = V4L2_CID_EXPOSURE;
681 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
682
683 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
684 if (ret < 0) {
685 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
686 }
687 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
688 return ret ;
689}
690
691int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
692{
693 struct v4l2_queryctrl qc;
694 struct v4l2_querymenu qm;
695 int ret;
696 int mode_count = -1;
697
698 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
699 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
700 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
701 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
702 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
703 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
704 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
705 } else {
706 memset(&qm, 0, sizeof(qm));
707
708 int index = 0;
709 mode_count = 1;
710 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
711
712 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
713 if (mode_count >= maxCont)
714 break;
715
716 memset(&qm, 0, sizeof(struct v4l2_querymenu));
717 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
718 qm.index = index;
719 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
720 continue;
721 } else {
722 if (strcmp((char*)qm.name,"50hz") == 0) {
723 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
724 mode_count++;
725 } else if (strcmp((char*)qm.name,"60hz") == 0) {
726 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
727 mode_count++;
728 } else if (strcmp((char*)qm.name,"auto") == 0) {
729 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
730 mode_count++;
731 }
732
733 }
734 }
735 }
736
737 return mode_count;
738}
739
740status_t Sensor::setAntiBanding(uint8_t antiBanding)
741{
742 int ret = 0;
743 struct v4l2_control ctl;
744 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
745
746 switch (antiBanding) {
747 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
748 ctl.value= CAM_ANTIBANDING_OFF;
749 break;
750 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
751 ctl.value= CAM_ANTIBANDING_50HZ;
752 break;
753 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
754 ctl.value= CAM_ANTIBANDING_60HZ;
755 break;
756 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
757 ctl.value= CAM_ANTIBANDING_AUTO;
758 break;
759 default:
760 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
761 __FUNCTION__, antiBanding);
762 return BAD_VALUE;
763 }
764
765 DBG_LOGB("anti banding mode:%d", antiBanding);
766 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
767 if ( ret < 0) {
768 CAMHAL_LOGDA("failed to set anti banding mode!\n");
769 return BAD_VALUE;
770 }
771 return ret;
772}
773
774status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
775{
776 int ret = 0;
777 struct v4l2_control ctl;
778 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
779 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
780 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
781
782 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
783 return ret;
784}
785
786
787int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
788{
789 struct v4l2_queryctrl qc;
790 struct v4l2_querymenu qm;
791 int ret;
792 int mode_count = -1;
793
794 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
795 qc.id = V4L2_CID_FOCUS_AUTO;
796 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
797 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
798 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
799 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
800 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
801 }else{
802 memset(&qm, 0, sizeof(qm));
803
804 int index = 0;
805 mode_count = 1;
806 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
807
808 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
809 if (mode_count >= maxCount)
810 break;
811
812 memset(&qm, 0, sizeof(struct v4l2_querymenu));
813 qm.id = V4L2_CID_FOCUS_AUTO;
814 qm.index = index;
815 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
816 continue;
817 } else {
818 if (strcmp((char*)qm.name,"auto") == 0) {
819 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
820 mode_count++;
821 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
822 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
823 mode_count++;
824 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
825 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
826 mode_count++;
827 }
828
829 }
830 }
831 }
832
833 return mode_count;
834}
835
836status_t Sensor::setAutoFocuas(uint8_t afMode)
837{
838 struct v4l2_control ctl;
839 ctl.id = V4L2_CID_FOCUS_AUTO;
840
841 switch (afMode) {
842 case ANDROID_CONTROL_AF_MODE_AUTO:
843 ctl.value = CAM_FOCUS_MODE_AUTO;
844 break;
845 case ANDROID_CONTROL_AF_MODE_MACRO:
846 ctl.value = CAM_FOCUS_MODE_MACRO;
847 break;
848 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
849 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
850 break;
851 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
852 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
853 break;
854 default:
855 ALOGE("%s: Emulator doesn't support AF mode %d",
856 __FUNCTION__, afMode);
857 return BAD_VALUE;
858 }
859
860 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
861 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
862 return BAD_VALUE;
863 }
864
865 return OK;
866}
867
868int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
869{
870 struct v4l2_queryctrl qc;
871 struct v4l2_querymenu qm;
872 int ret;
873 int mode_count = -1;
874
875 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
876 qc.id = V4L2_CID_DO_WHITE_BALANCE;
877 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
878 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
879 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
880 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
881 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
882 }else{
883 memset(&qm, 0, sizeof(qm));
884
885 int index = 0;
886 mode_count = 1;
887 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
888
889 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
890 if (mode_count >= maxCount)
891 break;
892
893 memset(&qm, 0, sizeof(struct v4l2_querymenu));
894 qm.id = V4L2_CID_DO_WHITE_BALANCE;
895 qm.index = index;
896 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
897 continue;
898 } else {
899 if (strcmp((char*)qm.name,"auto") == 0) {
900 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
901 mode_count++;
902 } else if (strcmp((char*)qm.name,"daylight") == 0) {
903 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
904 mode_count++;
905 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
906 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
907 mode_count++;
908 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
909 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
910 mode_count++;
911 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
912 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
913 mode_count++;
914 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
915 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
916 mode_count++;
917 } else if (strcmp((char*)qm.name,"twilight") == 0) {
918 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
919 mode_count++;
920 } else if (strcmp((char*)qm.name,"shade") == 0) {
921 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
922 mode_count++;
923 }
924
925 }
926 }
927 }
928
929 return mode_count;
930}
931
932status_t Sensor::setAWB(uint8_t awbMode)
933{
934 int ret = 0;
935 struct v4l2_control ctl;
936 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
937
938 switch (awbMode) {
939 case ANDROID_CONTROL_AWB_MODE_AUTO:
940 ctl.value = CAM_WB_AUTO;
941 break;
942 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
943 ctl.value = CAM_WB_INCANDESCENCE;
944 break;
945 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
946 ctl.value = CAM_WB_FLUORESCENT;
947 break;
948 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
949 ctl.value = CAM_WB_DAYLIGHT;
950 break;
951 case ANDROID_CONTROL_AWB_MODE_SHADE:
952 ctl.value = CAM_WB_SHADE;
953 break;
954 default:
955 ALOGE("%s: Emulator doesn't support AWB mode %d",
956 __FUNCTION__, awbMode);
957 return BAD_VALUE;
958 }
959 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
960 return ret;
961}
962
963void Sensor::setExposureTime(uint64_t ns) {
964 Mutex::Autolock lock(mControlMutex);
965 ALOGVV("Exposure set to %f", ns/1000000.f);
966 mExposureTime = ns;
967}
968
969void Sensor::setFrameDuration(uint64_t ns) {
970 Mutex::Autolock lock(mControlMutex);
971 ALOGVV("Frame duration set to %f", ns/1000000.f);
972 mFrameDuration = ns;
973}
974
975void Sensor::setSensitivity(uint32_t gain) {
976 Mutex::Autolock lock(mControlMutex);
977 ALOGVV("Gain set to %d", gain);
978 mGainFactor = gain;
979}
980
981void Sensor::setDestinationBuffers(Buffers *buffers) {
982 Mutex::Autolock lock(mControlMutex);
983 mNextBuffers = buffers;
984}
985
986void Sensor::setFrameNumber(uint32_t frameNumber) {
987 Mutex::Autolock lock(mControlMutex);
988 mFrameNumber = frameNumber;
989}
990
991void Sensor::setFlushFlag(bool flushFlag) {
992 mFlushFlag = flushFlag;
993}
994
995status_t Sensor::waitForVSync(nsecs_t reltime) {
996 int res;
997 Mutex::Autolock lock(mControlMutex);
998 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
999 if (mExitSensorThread) {
1000 return -1;
1001 }
1002
1003 mGotVSync = false;
1004 res = mVSync.waitRelative(mControlMutex, reltime);
1005 if (res != OK && res != TIMED_OUT) {
1006 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
1007 return false;
1008 }
1009 CAMHAL_LOGVB("%s , X mControlMutex , mGotVSync = %d " , __FUNCTION__ , mGotVSync);
1010 return mGotVSync;
1011}
1012
1013status_t Sensor::waitForNewFrame(nsecs_t reltime,
1014 nsecs_t *captureTime) {
1015 Mutex::Autolock lock(mReadoutMutex);
1016 if (mExitSensorThread) {
1017 return -1;
1018 }
1019
1020 if (mCapturedBuffers == NULL) {
1021 int res;
1022 CAMHAL_LOGVB("%s , E mReadoutMutex , reltime = %d" , __FUNCTION__, reltime);
1023 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
1024 if (res == TIMED_OUT) {
1025 return false;
1026 } else if (res != OK || mCapturedBuffers == NULL) {
1027 if (mFlushFlag) {
1028 ALOGE("%s , return immediately , mWait = %d", __FUNCTION__, mWait);
1029 if (mWait) {
1030 mWait = false;
1031 *captureTime = mCaptureTime;
1032 mCapturedBuffers = NULL;
1033 mReadoutComplete.signal();
1034 } else {
1035 *captureTime = mCaptureTime;
1036 mCapturedBuffers = NULL;
1037 }
1038 return -2;
1039 } else {
1040 ALOGE("Error waiting for sensor readout signal: %d", res);
1041 return false;
1042 }
1043 }
1044 }
1045 if (mWait) {
1046 mWait = false;
1047 *captureTime = mCaptureTime;
1048 mCapturedBuffers = NULL;
1049 mReadoutComplete.signal();
1050 } else {
1051 *captureTime = mCaptureTime;
1052 mCapturedBuffers = NULL;
1053 }
1054 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1055 return true;
1056}
1057
1058Sensor::SensorListener::~SensorListener() {
1059}
1060
1061void Sensor::setSensorListener(SensorListener *listener) {
1062 Mutex::Autolock lock(mControlMutex);
1063 mListener = listener;
1064}
1065
1066status_t Sensor::readyToRun() {
1067 int res;
1068 ALOGV("Starting up sensor thread");
1069 mStartupTime = systemTime();
1070 mNextCaptureTime = 0;
1071 mNextCapturedBuffers = NULL;
1072
1073 DBG_LOGA("");
1074
1075 return OK;
1076}
1077
1078bool Sensor::threadLoop() {
1079 /**
1080 * Sensor capture operation main loop.
1081 *
1082 * Stages are out-of-order relative to a single frame's processing, but
1083 * in-order in time.
1084 */
1085
1086 if (mExitSensorThread) {
1087 return false;
1088 }
1089
1090 /**
1091 * Stage 1: Read in latest control parameters
1092 */
1093 uint64_t exposureDuration;
1094 uint64_t frameDuration;
1095 uint32_t gain;
1096 Buffers *nextBuffers;
1097 uint32_t frameNumber;
1098 SensorListener *listener = NULL;
1099 {
1100 Mutex::Autolock lock(mControlMutex);
1101 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
1102 exposureDuration = mExposureTime;
1103 frameDuration = mFrameDuration;
1104 gain = mGainFactor;
1105 nextBuffers = mNextBuffers;
1106 frameNumber = mFrameNumber;
1107 listener = mListener;
1108 // Don't reuse a buffer set
1109 mNextBuffers = NULL;
1110
1111 // Signal VSync for start of readout
1112 ALOGVV("Sensor VSync");
1113 mGotVSync = true;
1114 mVSync.signal();
1115 }
1116
1117 /**
1118 * Stage 3: Read out latest captured image
1119 */
1120
1121 Buffers *capturedBuffers = NULL;
1122 nsecs_t captureTime = 0;
1123
1124 nsecs_t startRealTime = systemTime();
1125 // Stagefright cares about system time for timestamps, so base simulated
1126 // time on that.
1127 nsecs_t simulatedTime = startRealTime;
1128 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1129 nsecs_t frameReadoutEndRealTime = startRealTime +
1130 kRowReadoutTime * kResolution[1];
1131
1132 if (mNextCapturedBuffers != NULL) {
1133 ALOGVV("Sensor starting readout");
1134 // Pretend we're doing readout now; will signal once enough time has elapsed
1135 capturedBuffers = mNextCapturedBuffers;
1136 captureTime = mNextCaptureTime;
1137 }
1138 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1139
1140 // TODO: Move this signal to another thread to simulate readout
1141 // time properly
1142 if (capturedBuffers != NULL) {
1143 ALOGVV("Sensor readout complete");
1144 Mutex::Autolock lock(mReadoutMutex);
1145 CAMHAL_LOGVB("%s , E mReadoutMutex" , __FUNCTION__);
1146 if (mCapturedBuffers != NULL) {
1147 ALOGE("Waiting for readout thread to catch up!");
1148 mWait = true;
1149 mReadoutComplete.wait(mReadoutMutex);
1150 }
1151
1152 mCapturedBuffers = capturedBuffers;
1153 mCaptureTime = captureTime;
1154 mReadoutAvailable.signal();
1155 capturedBuffers = NULL;
1156 }
1157 CAMHAL_LOGVB("%s , X mReadoutMutex" , __FUNCTION__);
1158
1159 if (mExitSensorThread) {
1160 return false;
1161 }
1162 /**
1163 * Stage 2: Capture new image
1164 */
1165 mNextCaptureTime = simulatedTime;
1166 mNextCapturedBuffers = nextBuffers;
1167
1168 if (mNextCapturedBuffers != NULL) {
1169 if (listener != NULL) {
1170#if 0
1171 if (get_device_status(vinfo)) {
1172 listener->onSensorEvent(frameNumber, SensorListener::ERROR_CAMERA_DEVICE, mNextCaptureTime);
1173 }
1174#endif
1175 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1176 mNextCaptureTime);
1177 }
1178
1179 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1180 (float)exposureDuration/1e6, gain);
1181 mScene.setExposureDuration((float)exposureDuration/1e9);
1182 mScene.calculateScene(mNextCaptureTime);
1183
1184 if ( mSensorType == SENSOR_SHARE_FD) {
1185 captureNewImageWithGe2d();
1186 } else {
1187 captureNewImage();
1188 }
1189 mFramecount ++;
1190 }
1191
1192 if (mExitSensorThread) {
1193 return false;
1194 }
1195
1196 if (mFramecount == 100) {
1197 gettimeofday(&mTimeEnd, NULL);
1198 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1199 mCurFps = mFramecount/(interval/1000000.0f);
1200 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1201 mFramecount = 0;
1202 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1203 }
1204 ALOGVV("Sensor vertical blanking interval");
1205 nsecs_t workDoneRealTime = systemTime();
1206 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1207 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1208 timespec t;
1209 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1210 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1211
1212 int ret;
1213 do {
1214 ret = nanosleep(&t, &t);
1215 } while (ret != 0);
1216 }
1217 nsecs_t endRealTime = systemTime();
1218 ALOGVV("Frame cycle took %d ms, target %d ms",
1219 (int)((endRealTime - startRealTime)/1000000),
1220 (int)(frameDuration / 1000000));
1221 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1222 return true;
1223};
1224
1225int Sensor::captureNewImageWithGe2d() {
1226
1227 uint32_t gain = mGainFactor;
1228 mKernelPhysAddr = 0;
1229
1230
1231 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1232 usleep(5000);
1233 }
1234
1235 // Might be adding more buffers, so size isn't constant
1236 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1237 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1238 fillStream(vinfo, mKernelPhysAddr, b);
1239 }
1240 putback_frame(vinfo);
1241 mKernelPhysAddr = 0;
1242
1243 return 0;
1244
1245}
1246
1247int Sensor::captureNewImage() {
1248 bool isjpeg = false;
1249 uint32_t gain = mGainFactor;
1250 mKernelBuffer = NULL;
1251
1252 // Might be adding more buffers, so size isn't constant
1253 ALOGVV("size=%d\n", mNextCapturedBuffers->size());
1254 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1255 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1256 ALOGVV("Sensor capturing buffer %d: stream %d,"
1257 " %d x %d, format %x, stride %d, buf %p, img %p",
1258 i, b.streamId, b.width, b.height, b.format, b.stride,
1259 b.buffer, b.img);
1260 switch (b.format) {
1261#if PLATFORM_SDK_VERSION <= 22
1262 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1263 captureRaw(b.img, gain, b.stride);
1264 break;
1265#endif
1266 case HAL_PIXEL_FORMAT_RGB_888:
1267 captureRGB(b.img, gain, b.stride);
1268 break;
1269 case HAL_PIXEL_FORMAT_RGBA_8888:
1270 captureRGBA(b.img, gain, b.stride);
1271 break;
1272 case HAL_PIXEL_FORMAT_BLOB:
1273 // Add auxillary buffer of the right size
1274 // Assumes only one BLOB (JPEG) buffer in
1275 // mNextCapturedBuffers
1276 StreamBuffer bAux;
1277 int orientation;
1278 orientation = getPictureRotate();
1279 ALOGD("bAux orientation=%d",orientation);
1280 uint32_t pixelfmt;
1281 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1282 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1283
1284 pixelfmt = getOutputFormat();
1285 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1286 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1287 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1288 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1289 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1290 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1291 } else {
1292 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1293 }
1294 } else {
1295 isjpeg = true;
1296 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1297 }
1298
1299 if (!msupportrotate) {
1300 bAux.streamId = 0;
1301 bAux.width = b.width;
1302 bAux.height = b.height;
1303 bAux.format = pixelfmt;
1304 bAux.stride = b.width;
1305 bAux.buffer = NULL;
1306 } else {
1307 if ((orientation == 90) || (orientation == 270)) {
1308 bAux.streamId = 0;
1309 bAux.width = b.height;
1310 bAux.height = b.width;
1311 bAux.format = pixelfmt;
1312 bAux.stride = b.height;
1313 bAux.buffer = NULL;
1314 } else {
1315 bAux.streamId = 0;
1316 bAux.width = b.width;
1317 bAux.height = b.height;
1318 bAux.format = pixelfmt;
1319 bAux.stride = b.width;
1320 bAux.buffer = NULL;
1321 }
1322 }
1323 // TODO: Reuse these
1324 bAux.img = new uint8_t[b.width * b.height * 3];
1325 mNextCapturedBuffers->push_back(bAux);
1326 break;
1327 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1328 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1329 captureNV21(b, gain);
1330 break;
1331 case HAL_PIXEL_FORMAT_YV12:
1332 captureYV12(b, gain);
1333 break;
1334 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1335 captureYUYV(b.img, gain, b.stride);
1336 break;
1337 default:
1338 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1339 b.format);
1340 break;
1341 }
1342 }
1343 if ((!isjpeg)&&(mKernelBuffer)) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1344 // whose buffer putback separately.
1345 putback_frame(vinfo);
1346 }
1347 mKernelBuffer = NULL;
1348
1349 return 0;
1350}
1351
1352int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1353 int res;
1354 int i, j, k, START;
1355 int count = 0;
1356 int pixelfmt;
1357 struct v4l2_frmsizeenum frmsize;
1358 char property[PROPERTY_VALUE_MAX];
1359 unsigned int support_w,support_h;
1360
1361 support_w = 10000;
1362 support_h = 10000;
1363 memset(property, 0, sizeof(property));
1364 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1365 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1366 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1367 support_w = 10000;
1368 support_h = 10000;
1369 }
1370 }
1371
1372 memset(&frmsize,0,sizeof(frmsize));
1373 frmsize.pixel_format = getOutputFormat();
1374
1375 START = 0;
1376 for (i = 0; ; i++) {
1377 frmsize.index = i;
1378 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1379 if (res < 0){
1380 DBG_LOGB("index=%d, break\n", i);
1381 break;
1382 }
1383
1384 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1385
1386 if (0 != (frmsize.discrete.width%16))
1387 continue;
1388
1389 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1390 continue;
1391
1392 if (count >= size)
1393 break;
1394
1395 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1396 picSizes[count+1] = frmsize.discrete.width;
1397 picSizes[count+2] = frmsize.discrete.height;
1398 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1399
1400 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1401 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1402 if (0 == i) {
1403 count += 4;
1404 continue;
1405 }
1406
1407 for (k = count; k > START; k -= 4) {
1408 if (frmsize.discrete.width * frmsize.discrete.height >
1409 picSizes[k - 3] * picSizes[k - 2]) {
1410 picSizes[k + 1] = picSizes[k - 3];
1411 picSizes[k + 2] = picSizes[k - 2];
1412
1413 } else {
1414 break;
1415 }
1416 }
1417 picSizes[k + 1] = frmsize.discrete.width;
1418 picSizes[k + 2] = frmsize.discrete.height;
1419
1420 count += 4;
1421 }
1422 }
1423
1424 START = count;
1425 for (i = 0; ; i++) {
1426 frmsize.index = i;
1427 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1428 if (res < 0){
1429 DBG_LOGB("index=%d, break\n", i);
1430 break;
1431 }
1432
1433 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1434
1435 if (0 != (frmsize.discrete.width%16))
1436 continue;
1437
1438 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1439 continue;
1440
1441 if (count >= size)
1442 break;
1443
1444 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1445 picSizes[count+1] = frmsize.discrete.width;
1446 picSizes[count+2] = frmsize.discrete.height;
1447 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1448
1449 DBG_LOGB("get output width=%d, height=%d, format =\
1450 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1451 frmsize.discrete.height);
1452 if (0 == i) {
1453 count += 4;
1454 continue;
1455 }
1456
1457 for (k = count; k > START; k -= 4) {
1458 if (frmsize.discrete.width * frmsize.discrete.height >
1459 picSizes[k - 3] * picSizes[k - 2]) {
1460 picSizes[k + 1] = picSizes[k - 3];
1461 picSizes[k + 2] = picSizes[k - 2];
1462
1463 } else {
1464 break;
1465 }
1466 }
1467 picSizes[k + 1] = frmsize.discrete.width;
1468 picSizes[k + 2] = frmsize.discrete.height;
1469
1470 count += 4;
1471 }
1472 }
1473
1474#if 0
1475 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1476 START = count;
1477 for (i = 0; ; i++) {
1478 frmsize.index = i;
1479 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1480 if (res < 0){
1481 DBG_LOGB("index=%d, break\n", i);
1482 break;
1483 }
1484
1485 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1486
1487 if (0 != (frmsize.discrete.width%16))
1488 continue;
1489
1490 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1491 continue;
1492
1493 if (count >= size)
1494 break;
1495
1496 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1497 picSizes[count+1] = frmsize.discrete.width;
1498 picSizes[count+2] = frmsize.discrete.height;
1499 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1500
1501 DBG_LOGB("get output width=%d, height=%d, format =\
1502 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1503 frmsize.discrete.height);
1504 if (0 == i) {
1505 count += 4;
1506 continue;
1507 }
1508
1509 for (k = count; k > START; k -= 4) {
1510 if (frmsize.discrete.width * frmsize.discrete.height >
1511 picSizes[k - 3] * picSizes[k - 2]) {
1512 picSizes[k + 1] = picSizes[k - 3];
1513 picSizes[k + 2] = picSizes[k - 2];
1514
1515 } else {
1516 break;
1517 }
1518 }
1519 picSizes[k + 1] = frmsize.discrete.width;
1520 picSizes[k + 2] = frmsize.discrete.height;
1521
1522 count += 4;
1523 }
1524 }
1525 }
1526#endif
1527
1528 uint32_t jpgSrcfmt[] = {
1529 V4L2_PIX_FMT_RGB24,
1530 V4L2_PIX_FMT_MJPEG,
1531 V4L2_PIX_FMT_YUYV,
1532 };
1533
1534 START = count;
1535 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1536 memset(&frmsize,0,sizeof(frmsize));
1537 frmsize.pixel_format = jpgSrcfmt[j];
1538
1539 for (i = 0; ; i++) {
1540 frmsize.index = i;
1541 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1542 if (res < 0){
1543 DBG_LOGB("index=%d, break\n", i);
1544 break;
1545 }
1546
1547 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1548
1549 if (0 != (frmsize.discrete.width%16))
1550 continue;
1551
1552 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1553 // continue;
1554
1555 if (count >= size)
1556 break;
1557
1558 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1559 if (!IsUsbAvailablePictureSize(kUsbAvailablePictureSize, frmsize.discrete.width, frmsize.discrete.height))
1560 continue;
1561 }
1562
1563 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1564 picSizes[count+1] = frmsize.discrete.width;
1565 picSizes[count+2] = frmsize.discrete.height;
1566 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1567
1568 if (0 == i) {
1569 count += 4;
1570 continue;
1571 }
1572
1573 //TODO insert in descend order
1574 for (k = count; k > START; k -= 4) {
1575 if (frmsize.discrete.width * frmsize.discrete.height >
1576 picSizes[k - 3] * picSizes[k - 2]) {
1577 picSizes[k + 1] = picSizes[k - 3];
1578 picSizes[k + 2] = picSizes[k - 2];
1579
1580 } else {
1581 break;
1582 }
1583 }
1584
1585 picSizes[k + 1] = frmsize.discrete.width;
1586 picSizes[k + 2] = frmsize.discrete.height;
1587
1588 count += 4;
1589 }
1590 }
1591
1592 if (frmsize.index > 0)
1593 break;
1594 }
1595
1596 if (frmsize.index == 0)
1597 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1598
1599 return count;
1600
1601}
1602
1603int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1604{
1605 int ret=0; int framerate=0; int temp_rate=0;
1606 struct v4l2_frmivalenum fival;
1607 int i,j=0;
1608 int count = 0;
1609 int tmp_size = size;
1610 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1611 int pixelfmt_tbl[] = {
1612 V4L2_PIX_FMT_MJPEG,
1613 V4L2_PIX_FMT_YVU420,
1614 V4L2_PIX_FMT_NV21,
1615 V4L2_PIX_FMT_RGB24,
1616 V4L2_PIX_FMT_YUYV,
1617 //V4L2_PIX_FMT_YVU420
1618 };
1619
1620 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1621 {
1622 /* we got all duration for each resolution for prev format*/
1623 if (count >= tmp_size)
1624 break;
1625
1626 for( ; size > 0; size-=4)
1627 {
1628 memset(&fival, 0, sizeof(fival));
1629
1630 for (fival.index = 0;;fival.index++)
1631 {
1632 fival.pixel_format = pixelfmt_tbl[i];
1633 fival.width = picSizes[size-3];
1634 fival.height = picSizes[size-2];
1635 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1636 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1637 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1638 if(framerate < temp_rate)
1639 framerate = temp_rate;
1640 duration[count+0] = (int64_t)(picSizes[size-4]);
1641 duration[count+1] = (int64_t)(picSizes[size-3]);
1642 duration[count+2] = (int64_t)(picSizes[size-2]);
1643 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1644 j++;
1645 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1646 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1647 if(framerate < temp_rate)
1648 framerate = temp_rate;
1649 duration[count+0] = (int64_t)picSizes[size-4];
1650 duration[count+1] = (int64_t)picSizes[size-3];
1651 duration[count+2] = (int64_t)picSizes[size-2];
1652 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1653 j++;
1654 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1655 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1656 if(framerate < temp_rate)
1657 framerate = temp_rate;
1658 duration[count+0] = (int64_t)picSizes[size-4];
1659 duration[count+1] = (int64_t)picSizes[size-3];
1660 duration[count+2] = (int64_t)picSizes[size-2];
1661 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1662 j++;
1663 }
1664 } else {
1665 if (j > 0) {
1666 if (count >= tmp_size)
1667 break;
1668 duration[count+0] = (int64_t)(picSizes[size-4]);
1669 duration[count+1] = (int64_t)(picSizes[size-3]);
1670 duration[count+2] = (int64_t)(picSizes[size-2]);
1671 if (framerate == 5) {
1672 duration[count+3] = (int64_t)200000000L;
1673 } else if (framerate == 10) {
1674 duration[count+3] = (int64_t)100000000L;
1675 } else if (framerate == 15) {
1676 duration[count+3] = (int64_t)66666666L;
1677 } else if (framerate == 30) {
1678 duration[count+3] = (int64_t)33333333L;
1679 } else {
1680 duration[count+3] = (int64_t)66666666L;
1681 }
1682 count += 4;
1683 break;
1684 } else {
1685 break;
1686 }
1687 }
1688 }
1689 j=0;
1690 }
1691 size = tmp_size;
1692 }
1693
1694 return count;
1695
1696}
1697
1698int64_t Sensor::getMinFrameDuration()
1699{
1700 int64_t tmpDuration = 66666666L; // 1/15 s
1701 int64_t frameDuration = 66666666L; // 1/15 s
1702 struct v4l2_frmivalenum fival;
1703 int i,j;
1704
1705 uint32_t pixelfmt_tbl[]={
1706 V4L2_PIX_FMT_MJPEG,
1707 V4L2_PIX_FMT_YUYV,
1708 V4L2_PIX_FMT_NV21,
1709 };
1710 struct v4l2_frmsize_discrete resolution_tbl[]={
1711 {1920, 1080},
1712 {1280, 960},
1713 {640, 480},
1714 {320, 240},
1715 };
1716
1717 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1718 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1719 memset(&fival, 0, sizeof(fival));
1720 fival.index = 0;
1721 fival.pixel_format = pixelfmt_tbl[i];
1722 fival.width = resolution_tbl[j].width;
1723 fival.height = resolution_tbl[j].height;
1724
1725 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1726 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1727 tmpDuration =
1728 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1729
1730 if (frameDuration > tmpDuration)
1731 frameDuration = tmpDuration;
1732 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1733 frameDuration =
1734 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1735 break;
1736 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1737 frameDuration =
1738 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1739 break;
1740 }
1741 fival.index++;
1742 }
1743 }
1744
1745 if (fival.index > 0) {
1746 break;
1747 }
1748 }
1749
1750 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1751 return frameDuration;
1752}
1753
1754int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1755 int res;
1756 int i;
1757 int count = 0;
1758 struct v4l2_frmsizeenum frmsize;
1759 char property[PROPERTY_VALUE_MAX];
1760 unsigned int support_w,support_h;
1761 int preview_fmt;
1762
1763 support_w = 10000;
1764 support_h = 10000;
1765 memset(property, 0, sizeof(property));
1766 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1767 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1768 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1769 support_w = 10000;
1770 support_h = 10000;
1771 }
1772 }
1773
1774
1775 memset(&frmsize,0,sizeof(frmsize));
1776 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1777
1778 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1779 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1780 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1781 if (preview == true)
1782 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1783 else
1784 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1785 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1786 if (preview == true)
1787 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1788 else
1789 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1790 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1791 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1792
1793 for (i = 0; ; i++) {
1794 frmsize.index = i;
1795 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1796 if (res < 0){
1797 DBG_LOGB("index=%d, break\n", i);
1798 break;
1799 }
1800
1801
1802 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1803
1804 if (0 != (frmsize.discrete.width%16))
1805 continue;
1806
1807 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1808 continue;
1809
1810 if (count >= size)
1811 break;
1812
1813 picSizes[count] = frmsize.discrete.width;
1814 picSizes[count+1] = frmsize.discrete.height;
1815
1816 if (0 == i) {
1817 count += 2;
1818 continue;
1819 }
1820
1821 //TODO insert in descend order
1822 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1823 picSizes[count + 0] = picSizes[count - 2];
1824 picSizes[count + 1] = picSizes[count - 1];
1825
1826 picSizes[count - 2] = frmsize.discrete.width;
1827 picSizes[count - 1] = frmsize.discrete.height;
1828 }
1829
1830 count += 2;
1831 }
1832 }
1833
1834 return count;
1835
1836}
1837
1838bool Sensor::get_sensor_status() {
1839 return mSensorWorkFlag;
1840}
1841
1842void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1843 float totalGain = gain/100.0 * kBaseGainFactor;
1844 float noiseVarGain = totalGain * totalGain;
1845 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1846 + kReadNoiseVarAfterGain;
1847
1848 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1849 mScene.setReadoutPixel(0,0);
1850 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1851 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1852 uint16_t *px = (uint16_t*)img + y * stride;
1853 for (unsigned int x = 0; x < kResolution[0]; x++) {
1854 uint32_t electronCount;
1855 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1856
1857 // TODO: Better pixel saturation curve?
1858 electronCount = (electronCount < kSaturationElectrons) ?
1859 electronCount : kSaturationElectrons;
1860
1861 // TODO: Better A/D saturation curve?
1862 uint16_t rawCount = electronCount * totalGain;
1863 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1864
1865 // Calculate noise value
1866 // TODO: Use more-correct Gaussian instead of uniform noise
1867 float photonNoiseVar = electronCount * noiseVarGain;
1868 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1869 // Scaled to roughly match gaussian/uniform noise stddev
1870 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1871
1872 rawCount += kBlackLevel;
1873 rawCount += noiseStddev * noiseSample;
1874
1875 *px++ = rawCount;
1876 }
1877 // TODO: Handle this better
1878 //simulatedTime += kRowReadoutTime;
1879 }
1880 ALOGVV("Raw sensor image captured");
1881}
1882
1883void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1884 float totalGain = gain/100.0 * kBaseGainFactor;
1885 // In fixed-point math, calculate total scaling from electrons to 8bpp
1886 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1887 uint32_t inc = kResolution[0] / stride;
1888
1889 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1890 uint8_t *px = img + outY * stride * 4;
1891 mScene.setReadoutPixel(0, y);
1892 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1893 uint32_t rCount, gCount, bCount;
1894 // TODO: Perfect demosaicing is a cheat
1895 const uint32_t *pixel = mScene.getPixelElectrons();
1896 rCount = pixel[Scene::R] * scale64x;
1897 gCount = pixel[Scene::Gr] * scale64x;
1898 bCount = pixel[Scene::B] * scale64x;
1899
1900 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1901 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1902 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1903 *px++ = 255;
1904 for (unsigned int j = 1; j < inc; j++)
1905 mScene.getPixelElectrons();
1906 }
1907 // TODO: Handle this better
1908 //simulatedTime += kRowReadoutTime;
1909 }
1910 ALOGVV("RGBA sensor image captured");
1911}
1912
1913void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1914#if 0
1915 float totalGain = gain/100.0 * kBaseGainFactor;
1916 // In fixed-point math, calculate total scaling from electrons to 8bpp
1917 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1918 uint32_t inc = kResolution[0] / stride;
1919
1920 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1921 mScene.setReadoutPixel(0, y);
1922 uint8_t *px = img + outY * stride * 3;
1923 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1924 uint32_t rCount, gCount, bCount;
1925 // TODO: Perfect demosaicing is a cheat
1926 const uint32_t *pixel = mScene.getPixelElectrons();
1927 rCount = pixel[Scene::R] * scale64x;
1928 gCount = pixel[Scene::Gr] * scale64x;
1929 bCount = pixel[Scene::B] * scale64x;
1930
1931 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1932 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1933 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1934 for (unsigned int j = 1; j < inc; j++)
1935 mScene.getPixelElectrons();
1936 }
1937 // TODO: Handle this better
1938 //simulatedTime += kRowReadoutTime;
1939 }
1940#else
1941 uint8_t *src = NULL;
1942 int ret = 0, rotate = 0;
1943 uint32_t width = 0, height = 0;
1944 int dqTryNum = 3;
1945
1946 rotate = getPictureRotate();
1947 width = vinfo->picture.format.fmt.pix.width;
1948 height = vinfo->picture.format.fmt.pix.height;
1949
1950 if (mSensorType == SENSOR_USB) {
1951 releasebuf_and_stop_capturing(vinfo);
1952 } else {
1953 stop_capturing(vinfo);
1954 }
1955
1956 ret = start_picture(vinfo,rotate);
1957 if (ret < 0)
1958 {
1959 ALOGD("start picture failed!");
1960 }
1961 while(1)
1962 {
1963 src = (uint8_t *)get_picture(vinfo);
1964 if ((NULL != src) && (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)) {
1965 while (dqTryNum > 0) {
1966 if (NULL != src) {
1967 putback_picture_frame(vinfo);
1968 }
1969 usleep(10000);
1970 dqTryNum --;
1971 src = (uint8_t *)get_picture(vinfo);
1972 }
1973 }
1974
1975 if (NULL != src) {
1976 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
1977 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1978 if ( tmp_buffer == NULL) {
1979 ALOGE("new buffer failed!\n");
1980 return;
1981 }
1982#if ANDROID_PLATFORM_SDK_VERSION > 23
1983 uint8_t *vBuffer = new uint8_t[width * height / 4];
1984 if (vBuffer == NULL)
1985 ALOGE("alloc temperary v buffer failed\n");
1986 uint8_t *uBuffer = new uint8_t[width * height / 4];
1987 if (uBuffer == NULL)
1988 ALOGE("alloc temperary u buffer failed\n");
1989
1990 if (ConvertToI420(src, vinfo->picture.buf.bytesused, tmp_buffer, width, uBuffer, (width + 1) / 2,
1991 vBuffer, (width + 1) / 2, 0, 0, width, height,
1992 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
1993 DBG_LOGA("Decode MJPEG frame failed\n");
1994 putback_picture_frame(vinfo);
1995 usleep(5000);
1996 delete vBuffer;
1997 delete uBuffer;
1998 } else {
1999
2000 uint8_t *pUVBuffer = tmp_buffer + width * height;
2001 for (int i = 0; i < width * height / 4; i++) {
2002 *pUVBuffer++ = *(vBuffer + i);
2003 *pUVBuffer++ = *(uBuffer + i);
2004 }
2005
2006 delete vBuffer;
2007 delete uBuffer;
2008 nv21_to_rgb24(tmp_buffer,img,width,height);
2009 if (tmp_buffer != NULL)
2010 delete [] tmp_buffer;
2011 break;
2012 }
2013#else
2014 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
2015 width, tmp_buffer + width * height, (width + 1) / 2, width,
2016 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2017 DBG_LOGA("Decode MJPEG frame failed\n");
2018 putback_picture_frame(vinfo);
2019 usleep(5000);
2020 } else {
2021 nv21_to_rgb24(tmp_buffer,img,width,height);
2022 if (tmp_buffer != NULL)
2023 delete [] tmp_buffer;
2024 break;
2025 }
2026#endif
2027 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2028 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
2029 yuyv422_to_rgb24(src,img,width,height);
2030 break;
2031 } else {
2032 putback_picture_frame(vinfo);
2033 usleep(5000);
2034 }
2035 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
2036 if (vinfo->picture.buf.length == width * height * 3) {
2037 memcpy(img, src, vinfo->picture.buf.length);
2038 } else {
2039 rgb24_memcpy(img, src, width, height);
2040 }
2041 break;
2042 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2043 memcpy(img, src, vinfo->picture.buf.length);
2044 break;
2045 }
2046 }
2047 }
2048 ALOGD("get picture success !");
2049
2050 if (mSensorType == SENSOR_USB) {
2051 releasebuf_and_stop_picture(vinfo);
2052 } else {
2053 stop_picture(vinfo);
2054 }
2055
2056#endif
2057}
2058
2059void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
2060{
2061 for (int i = 0; i < width * height * 2; i += 2) {
2062 *dst++ = *(src + i);
2063 }
2064
2065 for (int y = 0; y < height - 1; y +=2) {
2066 for (int j = 0; j < width * 2; j += 4) {
2067 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
2068 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
2069 }
2070 src += width * 2 * 2;
2071 }
2072
2073 if (height & 1)
2074 for (int j = 0; j < width * 2; j += 4) {
2075 *dst++ = *(src + 3 + j); //v
2076 *dst++ = *(src + 1 + j); //u
2077 }
2078}
2079
2080void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
2081{
2082 //width should be an even number.
2083 //uv ALIGN 32.
2084 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
2085 unsigned char *dst_copy,*src_copy;
2086
2087 dst_copy = dst;
2088 src_copy = src;
2089
2090 y_size = width*height;
2091 c_stride = ALIGN(width/2, 16);
2092 c_size = c_stride * height/2;
2093 cr_offset = y_size;
2094 cb_offset = y_size+c_size;
2095
2096 for(i=0;i< y_size;i++){
2097 *dst++ = *src;
2098 src += 2;
2099 }
2100
2101 dst = dst_copy;
2102 src = src_copy;
2103
2104 for(i=0;i<height;i+=2){
2105 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
2106 //ceil(u1+u2)/2
2107 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
2108 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
2109 }
2110 dst += c_stride;
2111 src += width*4;
2112 }
2113}
2114
2115status_t Sensor::force_reset_sensor() {
2116 DBG_LOGA("force_reset_sensor");
2117 status_t ret;
2118 mTimeOutCount = 0;
2119 ret = streamOff();
2120 ret = setBuffersFormat(vinfo);
2121 ret = streamOn();
2122 DBG_LOGB("%s , ret = %d", __FUNCTION__, ret);
2123 return ret;
2124}
2125
2126void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
2127#if 0
2128 float totalGain = gain/100.0 * kBaseGainFactor;
2129 // Using fixed-point math with 6 bits of fractional precision.
2130 // In fixed-point math, calculate total scaling from electrons to 8bpp
2131 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2132 // In fixed-point math, saturation point of sensor after gain
2133 const int saturationPoint = 64 * 255;
2134 // Fixed-point coefficients for RGB-YUV transform
2135 // Based on JFIF RGB->YUV transform.
2136 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2137 const int rgbToY[] = {19, 37, 7};
2138 const int rgbToCb[] = {-10,-21, 32, 524288};
2139 const int rgbToCr[] = {32,-26, -5, 524288};
2140 // Scale back to 8bpp non-fixed-point
2141 const int scaleOut = 64;
2142 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2143
2144 uint32_t inc = kResolution[0] / stride;
2145 uint32_t outH = kResolution[1] / inc;
2146 for (unsigned int y = 0, outY = 0;
2147 y < kResolution[1]; y+=inc, outY++) {
2148 uint8_t *pxY = img + outY * stride;
2149 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2150 mScene.setReadoutPixel(0,y);
2151 for (unsigned int outX = 0; outX < stride; outX++) {
2152 int32_t rCount, gCount, bCount;
2153 // TODO: Perfect demosaicing is a cheat
2154 const uint32_t *pixel = mScene.getPixelElectrons();
2155 rCount = pixel[Scene::R] * scale64x;
2156 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2157 gCount = pixel[Scene::Gr] * scale64x;
2158 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2159 bCount = pixel[Scene::B] * scale64x;
2160 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2161
2162 *pxY++ = (rgbToY[0] * rCount +
2163 rgbToY[1] * gCount +
2164 rgbToY[2] * bCount) / scaleOutSq;
2165 if (outY % 2 == 0 && outX % 2 == 0) {
2166 *pxVU++ = (rgbToCr[0] * rCount +
2167 rgbToCr[1] * gCount +
2168 rgbToCr[2] * bCount +
2169 rgbToCr[3]) / scaleOutSq;
2170 *pxVU++ = (rgbToCb[0] * rCount +
2171 rgbToCb[1] * gCount +
2172 rgbToCb[2] * bCount +
2173 rgbToCb[3]) / scaleOutSq;
2174 }
2175 for (unsigned int j = 1; j < inc; j++)
2176 mScene.getPixelElectrons();
2177 }
2178 }
2179#else
2180 uint8_t *src;
2181
2182 if (mKernelBuffer) {
2183 src = mKernelBuffer;
2184 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2185 uint32_t width = vinfo->preview.format.fmt.pix.width;
2186 uint32_t height = vinfo->preview.format.fmt.pix.height;
2187 if ((width == b.width) && (height == b.height)) {
2188 memcpy(b.img, src, b.width * b.height * 3/2);
2189 } else {
2190 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2191 }
2192 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2193 uint32_t width = vinfo->preview.format.fmt.pix.width;
2194 uint32_t height = vinfo->preview.format.fmt.pix.height;
2195
2196 if ((width == b.width) && (height == b.height)) {
2197 memcpy(b.img, src, b.width * b.height * 3/2);
2198 } else {
2199 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2200 }
2201 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2202 uint32_t width = vinfo->preview.format.fmt.pix.width;
2203 uint32_t height = vinfo->preview.format.fmt.pix.height;
2204
2205 if ((width == b.width) && (height == b.height)) {
2206 memcpy(b.img, src, b.width * b.height * 3/2);
2207 } else {
2208 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2209 }
2210 } else {
2211 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2212 }
2213 return ;
2214 }
2215 while(1){
2216 if (mFlushFlag) {
2217 break;
2218 }
2219
2220 if (mExitSensorThread) {
2221 break;
2222 }
2223
2224 src = (uint8_t *)get_frame(vinfo);
2225 if (NULL == src) {
2226 if (get_device_status(vinfo)) {
2227 break;
2228 }
2229 ALOGVV("get frame NULL, sleep 5ms");
2230 usleep(5000);
2231 mTimeOutCount++;
2232 if (mTimeOutCount > 300) {
2233 DBG_LOGA("force sensor reset.\n");
2234 force_reset_sensor();
2235 }
2236 continue;
2237 }
2238 mTimeOutCount = 0;
2239 if (mSensorType == SENSOR_USB) {
2240 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2241 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2242 DBG_LOGB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2243 putback_frame(vinfo);
2244 continue;
2245 }
2246 }
2247 }
2248 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2249 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2250 memcpy(b.img, src, vinfo->preview.buf.length);
2251 } else {
2252 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2253 }
2254 mKernelBuffer = b.img;
2255 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2256 uint32_t width = vinfo->preview.format.fmt.pix.width;
2257 uint32_t height = vinfo->preview.format.fmt.pix.height;
2258 memset(mTemp_buffer, 0 , width * height * 3/2);
2259 YUYVToNV21(src, mTemp_buffer, width, height);
2260 if ((width == b.width) && (height == b.height)) {
2261 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2262 mKernelBuffer = b.img;
2263 } else {
2264 if ((b.height % 2) != 0) {
2265 DBG_LOGB("%d , b.height = %d", __LINE__, b.height);
2266 b.height = b.height - 1;
2267 }
2268 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2269 mKernelBuffer = mTemp_buffer;
2270 }
2271 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2272 uint32_t width = vinfo->preview.format.fmt.pix.width;
2273 uint32_t height = vinfo->preview.format.fmt.pix.height;
2274 memset(mTemp_buffer, 0 , width * height * 3/2);
2275#if ANDROID_PLATFORM_SDK_VERSION > 23
2276 uint8_t *vBuffer = new uint8_t[width * height / 4];
2277 if (vBuffer == NULL)
2278 ALOGE("alloc temperary v buffer failed\n");
2279 uint8_t *uBuffer = new uint8_t[width * height / 4];
2280 if (uBuffer == NULL)
2281 ALOGE("alloc temperary u buffer failed\n");
2282
2283 if (ConvertToI420(src, vinfo->preview.buf.bytesused, mTemp_buffer, width, uBuffer, (width + 1) / 2,
2284 vBuffer, (width + 1) / 2, 0, 0, width, height,
2285 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2286 DBG_LOGA("Decode MJPEG frame failed\n");
2287 putback_frame(vinfo);
2288 ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__);
2289 continue;
2290 }
2291 uint8_t *pUVBuffer = mTemp_buffer + width * height;
2292 for (int i = 0; i < width * height / 4; i++) {
2293 *pUVBuffer++ = *(vBuffer + i);
2294 *pUVBuffer++ = *(uBuffer + i);
2295 }
2296 delete vBuffer;
2297 delete uBuffer;
2298#else
2299 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, mTemp_buffer,
2300 width, mTemp_buffer + width * height, (width + 1) / 2, width,
2301 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2302 putback_frame(vinfo);
2303 ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__);
2304 continue;
2305 }
2306#endif
2307 if ((width == b.width) && (height == b.height)) {
2308 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2309 mKernelBuffer = b.img;
2310 } else {
2311 if ((b.height % 2) != 0) {
2312 DBG_LOGB("%d, b.height = %d", __LINE__, b.height);
2313 b.height = b.height - 1;
2314 }
2315 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2316 mKernelBuffer = mTemp_buffer;
2317 }
2318 }
2319 mSensorWorkFlag = true;
2320 break;
2321 }
2322#endif
2323
2324 ALOGVV("NV21 sensor image captured");
2325}
2326
2327void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2328#if 0
2329 float totalGain = gain/100.0 * kBaseGainFactor;
2330 // Using fixed-point math with 6 bits of fractional precision.
2331 // In fixed-point math, calculate total scaling from electrons to 8bpp
2332 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2333 // In fixed-point math, saturation point of sensor after gain
2334 const int saturationPoint = 64 * 255;
2335 // Fixed-point coefficients for RGB-YUV transform
2336 // Based on JFIF RGB->YUV transform.
2337 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2338 const int rgbToY[] = {19, 37, 7};
2339 const int rgbToCb[] = {-10,-21, 32, 524288};
2340 const int rgbToCr[] = {32,-26, -5, 524288};
2341 // Scale back to 8bpp non-fixed-point
2342 const int scaleOut = 64;
2343 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2344
2345 uint32_t inc = kResolution[0] / stride;
2346 uint32_t outH = kResolution[1] / inc;
2347 for (unsigned int y = 0, outY = 0;
2348 y < kResolution[1]; y+=inc, outY++) {
2349 uint8_t *pxY = img + outY * stride;
2350 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2351 mScene.setReadoutPixel(0,y);
2352 for (unsigned int outX = 0; outX < stride; outX++) {
2353 int32_t rCount, gCount, bCount;
2354 // TODO: Perfect demosaicing is a cheat
2355 const uint32_t *pixel = mScene.getPixelElectrons();
2356 rCount = pixel[Scene::R] * scale64x;
2357 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2358 gCount = pixel[Scene::Gr] * scale64x;
2359 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2360 bCount = pixel[Scene::B] * scale64x;
2361 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2362
2363 *pxY++ = (rgbToY[0] * rCount +
2364 rgbToY[1] * gCount +
2365 rgbToY[2] * bCount) / scaleOutSq;
2366 if (outY % 2 == 0 && outX % 2 == 0) {
2367 *pxVU++ = (rgbToCr[0] * rCount +
2368 rgbToCr[1] * gCount +
2369 rgbToCr[2] * bCount +
2370 rgbToCr[3]) / scaleOutSq;
2371 *pxVU++ = (rgbToCb[0] * rCount +
2372 rgbToCb[1] * gCount +
2373 rgbToCb[2] * bCount +
2374 rgbToCb[3]) / scaleOutSq;
2375 }
2376 for (unsigned int j = 1; j < inc; j++)
2377 mScene.getPixelElectrons();
2378 }
2379 }
2380#else
2381 uint8_t *src;
2382 if (mKernelBuffer) {
2383 src = mKernelBuffer;
2384 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2385 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2386 ALOGI("Sclale YV12 frame down \n");
2387
2388 int width = vinfo->preview.format.fmt.pix.width;
2389 int height = vinfo->preview.format.fmt.pix.height;
2390 int ret = libyuv::I420Scale(src, width,
2391 src + width * height, width / 2,
2392 src + width * height + width * height / 4, width / 2,
2393 width, height,
2394 b.img, b.width,
2395 b.img + b.width * b.height, b.width / 2,
2396 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2397 b.width, b.height,
2398 libyuv::kFilterNone);
2399 if (ret < 0)
2400 ALOGE("Sclale YV12 frame down failed!\n");
2401 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2402 int width = vinfo->preview.format.fmt.pix.width;
2403 int height = vinfo->preview.format.fmt.pix.height;
2404 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2405
2406 if ( tmp_buffer == NULL) {
2407 ALOGE("new buffer failed!\n");
2408 return;
2409 }
2410
2411 YUYVToYV12(src, tmp_buffer, width, height);
2412
2413 int ret = libyuv::I420Scale(tmp_buffer, width,
2414 tmp_buffer + width * height, width / 2,
2415 tmp_buffer + width * height + width * height / 4, width / 2,
2416 width, height,
2417 b.img, b.width,
2418 b.img + b.width * b.height, b.width / 2,
2419 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2420 b.width, b.height,
2421 libyuv::kFilterNone);
2422 if (ret < 0)
2423 ALOGE("Sclale YV12 frame down failed!\n");
2424 delete [] tmp_buffer;
2425 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2426 int width = vinfo->preview.format.fmt.pix.width;
2427 int height = vinfo->preview.format.fmt.pix.height;
2428 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2429
2430 if ( tmp_buffer == NULL) {
2431 ALOGE("new buffer failed!\n");
2432 return;
2433 }
2434
2435 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2436 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2437 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2438 DBG_LOGA("Decode MJPEG frame failed\n");
2439 }
2440
2441 int ret = libyuv::I420Scale(tmp_buffer, width,
2442 tmp_buffer + width * height, width / 2,
2443 tmp_buffer + width * height + width * height / 4, width / 2,
2444 width, height,
2445 b.img, b.width,
2446 b.img + b.width * b.height, b.width / 2,
2447 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2448 b.width, b.height,
2449 libyuv::kFilterNone);
2450 if (ret < 0)
2451 ALOGE("Sclale YV12 frame down failed!\n");
2452
2453 delete [] tmp_buffer;
2454 } else {
2455 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2456 }
2457 return ;
2458 }
2459 while(1){
2460 if (mFlushFlag) {
2461 break;
2462 }
2463 if (mExitSensorThread) {
2464 break;
2465 }
2466 src = (uint8_t *)get_frame(vinfo);
2467
2468 if (NULL == src) {
2469 if (get_device_status(vinfo)) {
2470 break;
2471 }
2472 ALOGVV("get frame NULL, sleep 5ms");
2473 usleep(5000);
2474 mTimeOutCount++;
2475 if (mTimeOutCount > 300) {
2476 force_reset_sensor();
2477 }
2478 continue;
2479 }
2480 mTimeOutCount = 0;
2481 if (mSensorType == SENSOR_USB) {
2482 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2483 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2484 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2485 putback_frame(vinfo);
2486 continue;
2487 }
2488 }
2489 }
2490 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2491 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2492 memcpy(b.img, src, vinfo->preview.buf.length);
2493 } else {
2494 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2495 }
2496 mKernelBuffer = b.img;
2497 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2498 int width = vinfo->preview.format.fmt.pix.width;
2499 int height = vinfo->preview.format.fmt.pix.height;
2500 YUYVToYV12(src, b.img, width, height);
2501 mKernelBuffer = b.img;
2502 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2503 int width = vinfo->preview.format.fmt.pix.width;
2504 int height = vinfo->preview.format.fmt.pix.height;
2505 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2506 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2507 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2508 putback_frame(vinfo);
2509 DBG_LOGA("Decode MJPEG frame failed\n");
2510 continue;
2511 }
2512 mKernelBuffer = b.img;
2513 } else {
2514 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2515 }
2516 mSensorWorkFlag = true;
2517 break;
2518 }
2519#endif
2520 //mKernelBuffer = src;
2521 ALOGVV("YV12 sensor image captured");
2522}
2523
2524void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2525#if 0
2526 float totalGain = gain/100.0 * kBaseGainFactor;
2527 // Using fixed-point math with 6 bits of fractional precision.
2528 // In fixed-point math, calculate total scaling from electrons to 8bpp
2529 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2530 // In fixed-point math, saturation point of sensor after gain
2531 const int saturationPoint = 64 * 255;
2532 // Fixed-point coefficients for RGB-YUV transform
2533 // Based on JFIF RGB->YUV transform.
2534 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2535 const int rgbToY[] = {19, 37, 7};
2536 const int rgbToCb[] = {-10,-21, 32, 524288};
2537 const int rgbToCr[] = {32,-26, -5, 524288};
2538 // Scale back to 8bpp non-fixed-point
2539 const int scaleOut = 64;
2540 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2541
2542 uint32_t inc = kResolution[0] / stride;
2543 uint32_t outH = kResolution[1] / inc;
2544 for (unsigned int y = 0, outY = 0;
2545 y < kResolution[1]; y+=inc, outY++) {
2546 uint8_t *pxY = img + outY * stride;
2547 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2548 mScene.setReadoutPixel(0,y);
2549 for (unsigned int outX = 0; outX < stride; outX++) {
2550 int32_t rCount, gCount, bCount;
2551 // TODO: Perfect demosaicing is a cheat
2552 const uint32_t *pixel = mScene.getPixelElectrons();
2553 rCount = pixel[Scene::R] * scale64x;
2554 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2555 gCount = pixel[Scene::Gr] * scale64x;
2556 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2557 bCount = pixel[Scene::B] * scale64x;
2558 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2559
2560 *pxY++ = (rgbToY[0] * rCount +
2561 rgbToY[1] * gCount +
2562 rgbToY[2] * bCount) / scaleOutSq;
2563 if (outY % 2 == 0 && outX % 2 == 0) {
2564 *pxVU++ = (rgbToCr[0] * rCount +
2565 rgbToCr[1] * gCount +
2566 rgbToCr[2] * bCount +
2567 rgbToCr[3]) / scaleOutSq;
2568 *pxVU++ = (rgbToCb[0] * rCount +
2569 rgbToCb[1] * gCount +
2570 rgbToCb[2] * bCount +
2571 rgbToCb[3]) / scaleOutSq;
2572 }
2573 for (unsigned int j = 1; j < inc; j++)
2574 mScene.getPixelElectrons();
2575 }
2576 }
2577#else
2578 uint8_t *src;
2579 if (mKernelBuffer) {
2580 src = mKernelBuffer;
2581 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2582 //TODO YUYV scale
2583 //memcpy(img, src, vinfo->preview.buf.length);
2584
2585 } else
2586 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2587
2588 return ;
2589 }
2590
2591 while(1) {
2592 if (mFlushFlag) {
2593 break;
2594 }
2595 if (mExitSensorThread) {
2596 break;
2597 }
2598 src = (uint8_t *)get_frame(vinfo);
2599 if (NULL == src) {
2600 if (get_device_status(vinfo)) {
2601 break;
2602 }
2603 ALOGVV("get frame NULL, sleep 5ms");
2604 usleep(5000);
2605 mTimeOutCount++;
2606 if (mTimeOutCount > 300) {
2607 force_reset_sensor();
2608 }
2609 continue;
2610 }
2611 mTimeOutCount = 0;
2612 if (mSensorType == SENSOR_USB) {
2613 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2614 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2615 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2616 putback_frame(vinfo);
2617 continue;
2618 }
2619 }
2620 }
2621 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2622 memcpy(img, src, vinfo->preview.buf.length);
2623 mKernelBuffer = src;
2624 } else {
2625 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2626 }
2627 mSensorWorkFlag = true;
2628 break;
2629 }
2630#endif
2631 //mKernelBuffer = src;
2632 ALOGVV("YUYV sensor image captured");
2633}
2634
2635void Sensor::dump(int fd) {
2636 String8 result;
2637 result = String8::format("%s, sensor preview information: \n", __FILE__);
2638 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2639 result.appendFormat("camera preview width: %d , height =%d\n",
2640 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2641
2642 result.appendFormat("camera preview format: %.4s\n\n",
2643 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2644
2645 write(fd, result.string(), result.size());
2646}
2647
2648} // namespace android
2649
2650