summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: 8554c017f022e4a62acffac00db26bee5edae70c
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44
45#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
46
47namespace android {
48
49const unsigned int Sensor::kResolution[2] = {1600, 1200};
50
51const nsecs_t Sensor::kExposureTimeRange[2] =
52 {1000L, 30000000000L} ; // 1 us - 30 sec
53const nsecs_t Sensor::kFrameDurationRange[2] =
54 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
55const nsecs_t Sensor::kMinVerticalBlank = 10000L;
56
57const uint8_t Sensor::kColorFilterArrangement =
58 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
59
60// Output image data characteristics
61const uint32_t Sensor::kMaxRawValue = 4000;
62const uint32_t Sensor::kBlackLevel = 1000;
63
64// Sensor sensitivity
65const float Sensor::kSaturationVoltage = 0.520f;
66const uint32_t Sensor::kSaturationElectrons = 2000;
67const float Sensor::kVoltsPerLuxSecond = 0.100f;
68
69const float Sensor::kElectronsPerLuxSecond =
70 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
71 * Sensor::kVoltsPerLuxSecond;
72
73const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
74 Sensor::kSaturationElectrons;
75
76const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
77const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
78const float Sensor::kReadNoiseVarBeforeGain =
79 Sensor::kReadNoiseStddevBeforeGain *
80 Sensor::kReadNoiseStddevBeforeGain;
81const float Sensor::kReadNoiseVarAfterGain =
82 Sensor::kReadNoiseStddevAfterGain *
83 Sensor::kReadNoiseStddevAfterGain;
84
85// While each row has to read out, reset, and then expose, the (reset +
86// expose) sequence can be overlapped by other row readouts, so the final
87// minimum frame duration is purely a function of row readout time, at least
88// if there's a reasonable number of rows.
89const nsecs_t Sensor::kRowReadoutTime =
90 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
91
92const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
93const uint32_t Sensor::kDefaultSensitivity = 100;
94
95const usb_frmsize_discrete_t kUsbAvailablePictureSize[] = {
96 {4128, 3096},
97 {3264, 2448},
98 {2592, 1944},
99 {2592, 1936},
100 {2560, 1920},
101 {2688, 1520},
102 {2048, 1536},
103 {1600, 1200},
104 {1920, 1088},
105 {1920, 1080},
106 {1440, 1080},
107 {1280, 960},
108 {1280, 720},
109 {1024, 768},
110 {960, 720},
111 {720, 480},
112 {640, 480},
113 {320, 240},
114};
115
116/** A few utility functions for math, normal distributions */
117
118// Take advantage of IEEE floating-point format to calculate an approximate
119// square root. Accurate to within +-3.6%
120float sqrtf_approx(float r) {
121 // Modifier is based on IEEE floating-point representation; the
122 // manipulations boil down to finding approximate log2, dividing by two, and
123 // then inverting the log2. A bias is added to make the relative error
124 // symmetric about the real answer.
125 const int32_t modifier = 0x1FBB4000;
126
127 int32_t r_i = *(int32_t*)(&r);
128 r_i = (r_i >> 1) + modifier;
129
130 return *(float*)(&r_i);
131}
132
133void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
134{
135 int stride = (width + 31) & ( ~31);
136 int w, h;
137 for (h=0; h<height; h++)
138 {
139 memcpy( dst, src, width*3);
140 dst += width*3;
141 src += stride*3;
142 }
143}
144
145static int ALIGN(int x, int y) {
146 // y must be a power of 2.
147 return (x + y - 1) & ~(y - 1);
148}
149
150bool IsUsbAvailablePictureSize(const usb_frmsize_discrete_t AvailablePictureSize[], uint32_t width, uint32_t height)
151{
152 int i;
153 bool ret = false;
154 int count = sizeof(kUsbAvailablePictureSize)/sizeof(kUsbAvailablePictureSize[0]);
155 for (i = 0; i < count; i++) {
156 if ((width == AvailablePictureSize[i].width) && (height == AvailablePictureSize[i].height)) {
157 ret = true;
158 } else {
159 continue;
160 }
161 }
162 return ret;
163}
164
165void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
166{
167 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
168 (mmInt32)vinfo->preview.format.fmt.pix.height,
169 (mmInt32)vinfo->preview.format.fmt.pix.width,
170 IC_FORMAT_YCbCr420_lp,
171 (mmByte *) src,
172 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
173 0};
174
175 structConvImage output = {(mmInt32)width,
176 (mmInt32)height,
177 (mmInt32)width,
178 IC_FORMAT_YCbCr420_lp,
179 (mmByte *) img,
180 (mmByte *) img + width * height,
181 0};
182
183 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
184 ALOGE("Sclale NV21 frame down failed!\n");
185}
186
187Sensor::Sensor():
188 Thread(false),
189 mGotVSync(false),
190 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
191 mFrameDuration(kFrameDurationRange[0]),
192 mGainFactor(kDefaultSensitivity),
193 mNextBuffers(NULL),
194 mFrameNumber(0),
195 mCapturedBuffers(NULL),
196 mListener(NULL),
197 mTemp_buffer(NULL),
198 mExitSensorThread(false),
199 mIoctlSupport(0),
200 msupportrotate(0),
201 mTimeOutCount(0),
202 mWait(false),
203 mPre_width(0),
204 mPre_height(0),
205 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
206{
207
208}
209
210Sensor::~Sensor() {
211 //shutDown();
212}
213
214status_t Sensor::startUp(int idx) {
215 ALOGV("%s: E", __FUNCTION__);
216 DBG_LOGA("ddd");
217
218 int res;
219 mCapturedBuffers = NULL;
220 res = run("EmulatedFakeCamera3::Sensor",
221 ANDROID_PRIORITY_URGENT_DISPLAY);
222
223 if (res != OK) {
224 ALOGE("Unable to start up sensor capture thread: %d", res);
225 }
226
227 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
228 vinfo->idx = idx;
229
230 res = camera_open(vinfo);
231 if (res < 0) {
232 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
233 }
234
235 mSensorType = SENSOR_MMAP;
236 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
237 mSensorType = SENSOR_USB;
238 }
239
240 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
241 mSensorType = SENSOR_SHARE_FD;
242 }
243
244 if (strstr((const char *)vinfo->cap.card, "front"))
245 mSensorFace = SENSOR_FACE_FRONT;
246 else if (strstr((const char *)vinfo->cap.card, "back"))
247 mSensorFace = SENSOR_FACE_BACK;
248 else
249 mSensorFace = SENSOR_FACE_NONE;
250
251 return res;
252}
253
254sensor_type_e Sensor::getSensorType(void)
255{
256 return mSensorType;
257}
258status_t Sensor::IoctlStateProbe(void) {
259 struct v4l2_queryctrl qc;
260 int ret = 0;
261 mIoctlSupport = 0;
262 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
263 qc.id = V4L2_ROTATE_ID;
264 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
265 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
266 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
267 }else{
268 mIoctlSupport |= IOCTL_MASK_ROTATE;
269 }
270
271 if(mIoctlSupport & IOCTL_MASK_ROTATE){
272 msupportrotate = true;
273 DBG_LOGA("camera support capture rotate");
274 }
275 return mIoctlSupport;
276}
277
278uint32_t Sensor::getStreamUsage(int stream_type)
279{
280 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
281
282 switch (stream_type) {
283 case CAMERA3_STREAM_OUTPUT:
284 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
285 break;
286 case CAMERA3_STREAM_INPUT:
287 usage = GRALLOC_USAGE_HW_CAMERA_READ;
288 break;
289 case CAMERA3_STREAM_BIDIRECTIONAL:
290 usage = GRALLOC_USAGE_HW_CAMERA_READ |
291 GRALLOC_USAGE_HW_CAMERA_WRITE;
292 break;
293 }
294 if ((mSensorType == SENSOR_MMAP)
295 || (mSensorType == SENSOR_USB)) {
296 usage = (GRALLOC_USAGE_HW_TEXTURE
297 | GRALLOC_USAGE_HW_RENDER
298 | GRALLOC_USAGE_SW_READ_MASK
299 | GRALLOC_USAGE_SW_WRITE_MASK
300 );
301 }
302
303 return usage;
304}
305
306status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
307{
308 int res;
309
310 mFramecount = 0;
311 mCurFps = 0;
312 gettimeofday(&mTimeStart, NULL);
313
314 if (isjpeg) {
315 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
316 vinfo->picture.format.fmt.pix.width = width;
317 vinfo->picture.format.fmt.pix.height = height;
318 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
319 } else {
320 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
321 vinfo->preview.format.fmt.pix.width = width;
322 vinfo->preview.format.fmt.pix.height = height;
323 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
324
325 res = setBuffersFormat(vinfo);
326 if (res < 0) {
327 ALOGE("set buffer failed\n");
328 return res;
329 }
330 }
331
332 if (NULL == mTemp_buffer) {
333 mPre_width = vinfo->preview.format.fmt.pix.width;
334 mPre_height = vinfo->preview.format.fmt.pix.height;
335 DBG_LOGB("setOutputFormat :: pre_width = %d, pre_height = %d \n" , mPre_width , mPre_height);
336 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
337 if (mTemp_buffer == NULL) {
338 ALOGE("first time allocate mTemp_buffer failed !");
339 return -1;
340 }
341 }
342
343 if ((mPre_width != vinfo->preview.format.fmt.pix.width) && (mPre_height != vinfo->preview.format.fmt.pix.height)) {
344 if (mTemp_buffer) {
345 delete [] mTemp_buffer;
346 mTemp_buffer = NULL;
347 }
348 mPre_width = vinfo->preview.format.fmt.pix.width;
349 mPre_height = vinfo->preview.format.fmt.pix.height;
350 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
351 if (mTemp_buffer == NULL) {
352 ALOGE("allocate mTemp_buffer failed !");
353 return -1;
354 }
355 }
356
357 return OK;
358
359}
360
361status_t Sensor::streamOn() {
362
363 return start_capturing(vinfo);
364}
365
366bool Sensor::isStreaming() {
367
368 return vinfo->isStreaming;
369}
370
371bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
372{
373 if ((vinfo->preview.format.fmt.pix.width != width)
374 ||(vinfo->preview.format.fmt.pix.height != height)
375 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
376 ) {
377
378 return true;
379
380 }
381
382 return false;
383}
384status_t Sensor::streamOff() {
385 if (mSensorType == SENSOR_USB) {
386 return releasebuf_and_stop_capturing(vinfo);
387 } else {
388 return stop_capturing(vinfo);
389 }
390}
391
392int Sensor::getOutputFormat()
393{
394 struct v4l2_fmtdesc fmt;
395 int ret;
396 memset(&fmt,0,sizeof(fmt));
397 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
398
399 fmt.index = 0;
400 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
401 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
402 return V4L2_PIX_FMT_MJPEG;
403 fmt.index++;
404 }
405
406 fmt.index = 0;
407 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
408 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
409 return V4L2_PIX_FMT_NV21;
410 fmt.index++;
411 }
412
413 fmt.index = 0;
414 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
415 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
416 return V4L2_PIX_FMT_YUYV;
417 fmt.index++;
418 }
419
420 ALOGE("Unable to find a supported sensor format!");
421 return BAD_VALUE;
422}
423
424/* if sensor supports MJPEG, return it first, otherwise
425 * trasform HAL format to v4l2 format then check whether
426 * it is supported.
427 */
428int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
429{
430 struct v4l2_fmtdesc fmt;
431 int ret;
432 memset(&fmt,0,sizeof(fmt));
433 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
434
435 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
436 pixelfmt = V4L2_PIX_FMT_YVU420;
437 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
438 pixelfmt = V4L2_PIX_FMT_NV21;
439 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
440 pixelfmt = V4L2_PIX_FMT_YUYV;
441 } else {
442 pixelfmt = V4L2_PIX_FMT_NV21;
443 }
444
445 fmt.index = 0;
446 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
447 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
448 return V4L2_PIX_FMT_MJPEG;
449 fmt.index++;
450 }
451
452 fmt.index = 0;
453 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
454 if (fmt.pixelformat == pixelfmt)
455 return pixelfmt;
456 fmt.index++;
457 }
458
459 fmt.index = 0;
460 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0) {
461 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
462 return V4L2_PIX_FMT_YUYV;
463 fmt.index++;
464 }
465 ALOGE("%s, Unable to find a supported sensor format!", __FUNCTION__);
466 return BAD_VALUE;
467}
468
469void Sensor::setPictureRotate(int rotate)
470{
471 mRotateValue = rotate;
472}
473int Sensor::getPictureRotate()
474{
475 return mRotateValue;
476}
477status_t Sensor::shutDown() {
478 ALOGV("%s: E", __FUNCTION__);
479
480 int res;
481
482 mTimeOutCount = 0;
483
484 res = requestExitAndWait();
485 if (res != OK) {
486 ALOGE("Unable to shut down sensor capture thread: %d", res);
487 }
488
489 if (vinfo != NULL) {
490 if (mSensorType == SENSOR_USB) {
491 releasebuf_and_stop_capturing(vinfo);
492 } else {
493 stop_capturing(vinfo);
494 }
495 }
496
497 camera_close(vinfo);
498
499 if (vinfo){
500 free(vinfo);
501 vinfo = NULL;
502 }
503
504 if (mTemp_buffer) {
505 delete [] mTemp_buffer;
506 mTemp_buffer = NULL;
507 }
508
509 ALOGD("%s: Exit", __FUNCTION__);
510 return res;
511}
512
513void Sensor::sendExitSingalToSensor() {
514 {
515 Mutex::Autolock lock(mReadoutMutex);
516 mExitSensorThread = true;
517 mReadoutComplete.signal();
518 }
519
520 {
521 Mutex::Autolock lock(mControlMutex);
522 mVSync.signal();
523 }
524
525 {
526 Mutex::Autolock lock(mReadoutMutex);
527 mReadoutAvailable.signal();
528 }
529}
530
531Scene &Sensor::getScene() {
532 return mScene;
533}
534
535int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
536{
537 int ret = 0;
538 struct v4l2_queryctrl qc;
539
540 memset(&qc, 0, sizeof(qc));
541 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
542 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
543
544 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
545 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
546 ret = -1;
547 *zoomMin = 0;
548 *zoomMax = 0;
549 *zoomStep = 1;
550 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
551 } else {
552 *zoomMin = qc.minimum;
553 *zoomMax = qc.maximum;
554 *zoomStep = qc.step;
555 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
556 }
557
558 return ret ;
559}
560
561int Sensor::setZoom(int zoomValue)
562{
563 int ret = 0;
564 struct v4l2_control ctl;
565
566 memset( &ctl, 0, sizeof(ctl));
567 ctl.value = zoomValue;
568 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
569 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
570 if (ret < 0) {
571 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
572 }
573 return ret ;
574}
575
576status_t Sensor::setEffect(uint8_t effect)
577{
578 int ret = 0;
579 struct v4l2_control ctl;
580 ctl.id = V4L2_CID_COLORFX;
581
582 switch (effect) {
583 case ANDROID_CONTROL_EFFECT_MODE_OFF:
584 ctl.value= CAM_EFFECT_ENC_NORMAL;
585 break;
586 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
587 ctl.value= CAM_EFFECT_ENC_COLORINV;
588 break;
589 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
590 ctl.value= CAM_EFFECT_ENC_SEPIA;
591 break;
592 default:
593 ALOGE("%s: Doesn't support effect mode %d",
594 __FUNCTION__, effect);
595 return BAD_VALUE;
596 }
597
598 DBG_LOGB("set effect mode:%d", effect);
599 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
600 if (ret < 0) {
601 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
602 }
603 return ret ;
604}
605
606#define MAX_LEVEL_FOR_EXPOSURE 16
607#define MIN_LEVEL_FOR_EXPOSURE 3
608
609int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
610{
611 struct v4l2_queryctrl qc;
612 int ret=0;
613 int level = 0;
614 int middle = 0;
615
616 memset( &qc, 0, sizeof(qc));
617
618 DBG_LOGA("getExposure\n");
619 qc.id = V4L2_CID_EXPOSURE;
620 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
621 if(ret < 0) {
622 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
623 *minExp = -4;
624 *maxExp = 4;
625 *def = 0;
626 step->numerator = 1;
627 step->denominator = 1;
628 return ret;
629 }
630
631 if(0 < qc.step)
632 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
633
634 if((level > MAX_LEVEL_FOR_EXPOSURE)
635 || (level < MIN_LEVEL_FOR_EXPOSURE)){
636 *minExp = -4;
637 *maxExp = 4;
638 *def = 0;
639 step->numerator = 1;
640 step->denominator = 1;
641 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
642 *minExp, *maxExp, *def);
643 return true;
644 }
645
646 middle = (qc.minimum+qc.maximum)/2;
647 *minExp = qc.minimum - middle;
648 *maxExp = qc.maximum - middle;
649 *def = qc.default_value - middle;
650 step->numerator = 1;
651 step->denominator = 2;//qc.step;
652 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
653 return ret;
654}
655
656status_t Sensor::setExposure(int expCmp)
657{
658 int ret = 0;
659 struct v4l2_control ctl;
660 struct v4l2_queryctrl qc;
661
662 if(mEV == expCmp){
663 return 0;
664 }else{
665 mEV = expCmp;
666 }
667 memset(&ctl, 0, sizeof(ctl));
668 memset(&qc, 0, sizeof(qc));
669
670 qc.id = V4L2_CID_EXPOSURE;
671
672 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
673 if (ret < 0) {
674 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
675 }
676
677 ctl.id = V4L2_CID_EXPOSURE;
678 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
679
680 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
681 if (ret < 0) {
682 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
683 }
684 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
685 return ret ;
686}
687
688int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
689{
690 struct v4l2_queryctrl qc;
691 struct v4l2_querymenu qm;
692 int ret;
693 int mode_count = -1;
694
695 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
696 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
697 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
698 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
699 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
700 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
701 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
702 } else {
703 memset(&qm, 0, sizeof(qm));
704
705 int index = 0;
706 mode_count = 1;
707 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
708
709 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
710 if (mode_count >= maxCont)
711 break;
712
713 memset(&qm, 0, sizeof(struct v4l2_querymenu));
714 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
715 qm.index = index;
716 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
717 continue;
718 } else {
719 if (strcmp((char*)qm.name,"50hz") == 0) {
720 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
721 mode_count++;
722 } else if (strcmp((char*)qm.name,"60hz") == 0) {
723 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
724 mode_count++;
725 } else if (strcmp((char*)qm.name,"auto") == 0) {
726 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
727 mode_count++;
728 }
729
730 }
731 }
732 }
733
734 return mode_count;
735}
736
737status_t Sensor::setAntiBanding(uint8_t antiBanding)
738{
739 int ret = 0;
740 struct v4l2_control ctl;
741 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
742
743 switch (antiBanding) {
744 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
745 ctl.value= CAM_ANTIBANDING_OFF;
746 break;
747 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
748 ctl.value= CAM_ANTIBANDING_50HZ;
749 break;
750 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
751 ctl.value= CAM_ANTIBANDING_60HZ;
752 break;
753 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
754 ctl.value= CAM_ANTIBANDING_AUTO;
755 break;
756 default:
757 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
758 __FUNCTION__, antiBanding);
759 return BAD_VALUE;
760 }
761
762 DBG_LOGB("anti banding mode:%d", antiBanding);
763 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
764 if ( ret < 0) {
765 CAMHAL_LOGDA("failed to set anti banding mode!\n");
766 return BAD_VALUE;
767 }
768 return ret;
769}
770
771status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
772{
773 int ret = 0;
774 struct v4l2_control ctl;
775 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
776 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
777 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
778
779 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
780 return ret;
781}
782
783
784int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
785{
786 struct v4l2_queryctrl qc;
787 struct v4l2_querymenu qm;
788 int ret;
789 int mode_count = -1;
790
791 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
792 qc.id = V4L2_CID_FOCUS_AUTO;
793 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
794 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
795 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
796 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
797 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
798 }else{
799 memset(&qm, 0, sizeof(qm));
800
801 int index = 0;
802 mode_count = 1;
803 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
804
805 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
806 if (mode_count >= maxCount)
807 break;
808
809 memset(&qm, 0, sizeof(struct v4l2_querymenu));
810 qm.id = V4L2_CID_FOCUS_AUTO;
811 qm.index = index;
812 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
813 continue;
814 } else {
815 if (strcmp((char*)qm.name,"auto") == 0) {
816 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
817 mode_count++;
818 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
819 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
820 mode_count++;
821 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
822 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
823 mode_count++;
824 }
825
826 }
827 }
828 }
829
830 return mode_count;
831}
832
833status_t Sensor::setAutoFocuas(uint8_t afMode)
834{
835 struct v4l2_control ctl;
836 ctl.id = V4L2_CID_FOCUS_AUTO;
837
838 switch (afMode) {
839 case ANDROID_CONTROL_AF_MODE_AUTO:
840 ctl.value = CAM_FOCUS_MODE_AUTO;
841 break;
842 case ANDROID_CONTROL_AF_MODE_MACRO:
843 ctl.value = CAM_FOCUS_MODE_MACRO;
844 break;
845 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
846 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
847 break;
848 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
849 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
850 break;
851 default:
852 ALOGE("%s: Emulator doesn't support AF mode %d",
853 __FUNCTION__, afMode);
854 return BAD_VALUE;
855 }
856
857 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
858 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
859 return BAD_VALUE;
860 }
861
862 return OK;
863}
864
865int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
866{
867 struct v4l2_queryctrl qc;
868 struct v4l2_querymenu qm;
869 int ret;
870 int mode_count = -1;
871
872 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
873 qc.id = V4L2_CID_DO_WHITE_BALANCE;
874 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
875 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
876 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
877 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
878 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
879 }else{
880 memset(&qm, 0, sizeof(qm));
881
882 int index = 0;
883 mode_count = 1;
884 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
885
886 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
887 if (mode_count >= maxCount)
888 break;
889
890 memset(&qm, 0, sizeof(struct v4l2_querymenu));
891 qm.id = V4L2_CID_DO_WHITE_BALANCE;
892 qm.index = index;
893 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
894 continue;
895 } else {
896 if (strcmp((char*)qm.name,"auto") == 0) {
897 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
898 mode_count++;
899 } else if (strcmp((char*)qm.name,"daylight") == 0) {
900 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
901 mode_count++;
902 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
903 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
904 mode_count++;
905 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
906 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
907 mode_count++;
908 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
909 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
910 mode_count++;
911 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
912 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
913 mode_count++;
914 } else if (strcmp((char*)qm.name,"twilight") == 0) {
915 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
916 mode_count++;
917 } else if (strcmp((char*)qm.name,"shade") == 0) {
918 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
919 mode_count++;
920 }
921
922 }
923 }
924 }
925
926 return mode_count;
927}
928
929status_t Sensor::setAWB(uint8_t awbMode)
930{
931 int ret = 0;
932 struct v4l2_control ctl;
933 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
934
935 switch (awbMode) {
936 case ANDROID_CONTROL_AWB_MODE_AUTO:
937 ctl.value = CAM_WB_AUTO;
938 break;
939 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
940 ctl.value = CAM_WB_INCANDESCENCE;
941 break;
942 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
943 ctl.value = CAM_WB_FLUORESCENT;
944 break;
945 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
946 ctl.value = CAM_WB_DAYLIGHT;
947 break;
948 case ANDROID_CONTROL_AWB_MODE_SHADE:
949 ctl.value = CAM_WB_SHADE;
950 break;
951 default:
952 ALOGE("%s: Emulator doesn't support AWB mode %d",
953 __FUNCTION__, awbMode);
954 return BAD_VALUE;
955 }
956 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
957 return ret;
958}
959
960void Sensor::setExposureTime(uint64_t ns) {
961 Mutex::Autolock lock(mControlMutex);
962 ALOGVV("Exposure set to %f", ns/1000000.f);
963 mExposureTime = ns;
964}
965
966void Sensor::setFrameDuration(uint64_t ns) {
967 Mutex::Autolock lock(mControlMutex);
968 ALOGVV("Frame duration set to %f", ns/1000000.f);
969 mFrameDuration = ns;
970}
971
972void Sensor::setSensitivity(uint32_t gain) {
973 Mutex::Autolock lock(mControlMutex);
974 ALOGVV("Gain set to %d", gain);
975 mGainFactor = gain;
976}
977
978void Sensor::setDestinationBuffers(Buffers *buffers) {
979 Mutex::Autolock lock(mControlMutex);
980 mNextBuffers = buffers;
981}
982
983void Sensor::setFrameNumber(uint32_t frameNumber) {
984 Mutex::Autolock lock(mControlMutex);
985 mFrameNumber = frameNumber;
986}
987
988status_t Sensor::waitForVSync(nsecs_t reltime) {
989 int res;
990 Mutex::Autolock lock(mControlMutex);
991 CAMHAL_LOGDB("%s , E mControlMutex" , __FUNCTION__);
992 if (mExitSensorThread) {
993 return -1;
994 }
995
996 mGotVSync = false;
997 res = mVSync.waitRelative(mControlMutex, reltime);
998 if (res != OK && res != TIMED_OUT) {
999 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
1000 return false;
1001 }
1002 CAMHAL_LOGDB("%s , X mControlMutex , mGotVSync = %d " , __FUNCTION__ , mGotVSync);
1003 return mGotVSync;
1004}
1005
1006status_t Sensor::waitForNewFrame(nsecs_t reltime,
1007 nsecs_t *captureTime) {
1008 Mutex::Autolock lock(mReadoutMutex);
1009 if (mExitSensorThread) {
1010 return -1;
1011 }
1012
1013 if (mCapturedBuffers == NULL) {
1014 int res;
1015 CAMHAL_LOGDB("%s , E mReadoutMutex , reltime = %d" , __FUNCTION__, reltime);
1016 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
1017 if (res == TIMED_OUT) {
1018 return false;
1019 } else if (res != OK || mCapturedBuffers == NULL) {
1020 ALOGE("Error waiting for sensor readout signal: %d", res);
1021 return false;
1022 }
1023 }
1024 if (mWait) {
1025 mWait = false;
1026 *captureTime = mCaptureTime;
1027 mCapturedBuffers = NULL;
1028 mReadoutComplete.signal();
1029 } else {
1030 *captureTime = mCaptureTime;
1031 mCapturedBuffers = NULL;
1032 }
1033 CAMHAL_LOGDB("%s , X" , __FUNCTION__);
1034 return true;
1035}
1036
1037Sensor::SensorListener::~SensorListener() {
1038}
1039
1040void Sensor::setSensorListener(SensorListener *listener) {
1041 Mutex::Autolock lock(mControlMutex);
1042 mListener = listener;
1043}
1044
1045status_t Sensor::readyToRun() {
1046 int res;
1047 ALOGV("Starting up sensor thread");
1048 mStartupTime = systemTime();
1049 mNextCaptureTime = 0;
1050 mNextCapturedBuffers = NULL;
1051
1052 DBG_LOGA("");
1053
1054 return OK;
1055}
1056
1057bool Sensor::threadLoop() {
1058 /**
1059 * Sensor capture operation main loop.
1060 *
1061 * Stages are out-of-order relative to a single frame's processing, but
1062 * in-order in time.
1063 */
1064
1065 if (mExitSensorThread) {
1066 return false;
1067 }
1068 /**
1069 * Stage 1: Read in latest control parameters
1070 */
1071 uint64_t exposureDuration;
1072 uint64_t frameDuration;
1073 uint32_t gain;
1074 Buffers *nextBuffers;
1075 uint32_t frameNumber;
1076 SensorListener *listener = NULL;
1077 {
1078 Mutex::Autolock lock(mControlMutex);
1079 CAMHAL_LOGDB("%s , E mControlMutex" , __FUNCTION__);
1080 exposureDuration = mExposureTime;
1081 frameDuration = mFrameDuration;
1082 gain = mGainFactor;
1083 nextBuffers = mNextBuffers;
1084 frameNumber = mFrameNumber;
1085 listener = mListener;
1086 // Don't reuse a buffer set
1087 mNextBuffers = NULL;
1088
1089 // Signal VSync for start of readout
1090 ALOGVV("Sensor VSync");
1091 mGotVSync = true;
1092 mVSync.signal();
1093 }
1094
1095 /**
1096 * Stage 3: Read out latest captured image
1097 */
1098
1099 Buffers *capturedBuffers = NULL;
1100 nsecs_t captureTime = 0;
1101
1102 nsecs_t startRealTime = systemTime();
1103 // Stagefright cares about system time for timestamps, so base simulated
1104 // time on that.
1105 nsecs_t simulatedTime = startRealTime;
1106 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1107 nsecs_t frameReadoutEndRealTime = startRealTime +
1108 kRowReadoutTime * kResolution[1];
1109
1110 if (mNextCapturedBuffers != NULL) {
1111 ALOGVV("Sensor starting readout");
1112 // Pretend we're doing readout now; will signal once enough time has elapsed
1113 capturedBuffers = mNextCapturedBuffers;
1114 captureTime = mNextCaptureTime;
1115 }
1116 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1117
1118 // TODO: Move this signal to another thread to simulate readout
1119 // time properly
1120 if (capturedBuffers != NULL) {
1121 ALOGVV("Sensor readout complete");
1122 Mutex::Autolock lock(mReadoutMutex);
1123 CAMHAL_LOGDB("%s , E mReadoutMutex" , __FUNCTION__);
1124 if (mCapturedBuffers != NULL) {
1125 ALOGE("Waiting for readout thread to catch up!");
1126 mWait = true;
1127 mReadoutComplete.wait(mReadoutMutex);
1128 }
1129
1130 mCapturedBuffers = capturedBuffers;
1131 mCaptureTime = captureTime;
1132 mReadoutAvailable.signal();
1133 capturedBuffers = NULL;
1134 }
1135 CAMHAL_LOGDB("%s , X mReadoutMutex" , __FUNCTION__);
1136
1137 if (mExitSensorThread) {
1138 return false;
1139 }
1140 /**
1141 * Stage 2: Capture new image
1142 */
1143 mNextCaptureTime = simulatedTime;
1144 mNextCapturedBuffers = nextBuffers;
1145
1146 if (mNextCapturedBuffers != NULL) {
1147 if (listener != NULL) {
1148#if 0
1149 if (get_device_status(vinfo)) {
1150 listener->onSensorEvent(frameNumber, SensorListener::ERROR_CAMERA_DEVICE, mNextCaptureTime);
1151 }
1152#endif
1153 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1154 mNextCaptureTime);
1155 }
1156
1157 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1158 (float)exposureDuration/1e6, gain);
1159 mScene.setExposureDuration((float)exposureDuration/1e9);
1160 mScene.calculateScene(mNextCaptureTime);
1161
1162 if ( mSensorType == SENSOR_SHARE_FD) {
1163 captureNewImageWithGe2d();
1164 } else {
1165 captureNewImage();
1166 }
1167 mFramecount ++;
1168 }
1169
1170 if (mExitSensorThread) {
1171 return false;
1172 }
1173
1174 if (mFramecount == 100) {
1175 gettimeofday(&mTimeEnd, NULL);
1176 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1177 mCurFps = mFramecount/(interval/1000000.0f);
1178 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1179 mFramecount = 0;
1180 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1181 }
1182 ALOGVV("Sensor vertical blanking interval");
1183 nsecs_t workDoneRealTime = systemTime();
1184 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1185 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1186 timespec t;
1187 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1188 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1189
1190 int ret;
1191 do {
1192 ret = nanosleep(&t, &t);
1193 } while (ret != 0);
1194 }
1195 nsecs_t endRealTime = systemTime();
1196 ALOGVV("Frame cycle took %d ms, target %d ms",
1197 (int)((endRealTime - startRealTime)/1000000),
1198 (int)(frameDuration / 1000000));
1199 CAMHAL_LOGDB("%s , X" , __FUNCTION__);
1200 return true;
1201};
1202
1203int Sensor::captureNewImageWithGe2d() {
1204
1205 uint32_t gain = mGainFactor;
1206 mKernelPhysAddr = 0;
1207
1208
1209 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1210 usleep(5000);
1211 }
1212
1213 // Might be adding more buffers, so size isn't constant
1214 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1215 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1216 fillStream(vinfo, mKernelPhysAddr, b);
1217 }
1218 putback_frame(vinfo);
1219 mKernelPhysAddr = 0;
1220
1221 return 0;
1222
1223}
1224
1225int Sensor::captureNewImage() {
1226 bool isjpeg = false;
1227 uint32_t gain = mGainFactor;
1228 mKernelBuffer = NULL;
1229
1230 // Might be adding more buffers, so size isn't constant
1231 ALOGVV("size=%d\n", mNextCapturedBuffers->size());
1232 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1233 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1234 ALOGVV("Sensor capturing buffer %d: stream %d,"
1235 " %d x %d, format %x, stride %d, buf %p, img %p",
1236 i, b.streamId, b.width, b.height, b.format, b.stride,
1237 b.buffer, b.img);
1238 switch (b.format) {
1239#if PLATFORM_SDK_VERSION <= 22
1240 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1241 captureRaw(b.img, gain, b.stride);
1242 break;
1243#endif
1244 case HAL_PIXEL_FORMAT_RGB_888:
1245 captureRGB(b.img, gain, b.stride);
1246 break;
1247 case HAL_PIXEL_FORMAT_RGBA_8888:
1248 captureRGBA(b.img, gain, b.stride);
1249 break;
1250 case HAL_PIXEL_FORMAT_BLOB:
1251 // Add auxillary buffer of the right size
1252 // Assumes only one BLOB (JPEG) buffer in
1253 // mNextCapturedBuffers
1254 StreamBuffer bAux;
1255 int orientation;
1256 orientation = getPictureRotate();
1257 ALOGD("bAux orientation=%d",orientation);
1258 uint32_t pixelfmt;
1259 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1260 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1261
1262 pixelfmt = getOutputFormat();
1263 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1264 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1265 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1266 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1267 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1268 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1269 } else {
1270 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1271 }
1272 } else {
1273 isjpeg = true;
1274 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1275 }
1276
1277 if (!msupportrotate) {
1278 bAux.streamId = 0;
1279 bAux.width = b.width;
1280 bAux.height = b.height;
1281 bAux.format = pixelfmt;
1282 bAux.stride = b.width;
1283 bAux.buffer = NULL;
1284 } else {
1285 if ((orientation == 90) || (orientation == 270)) {
1286 bAux.streamId = 0;
1287 bAux.width = b.height;
1288 bAux.height = b.width;
1289 bAux.format = pixelfmt;
1290 bAux.stride = b.height;
1291 bAux.buffer = NULL;
1292 } else {
1293 bAux.streamId = 0;
1294 bAux.width = b.width;
1295 bAux.height = b.height;
1296 bAux.format = pixelfmt;
1297 bAux.stride = b.width;
1298 bAux.buffer = NULL;
1299 }
1300 }
1301 // TODO: Reuse these
1302 bAux.img = new uint8_t[b.width * b.height * 3];
1303 mNextCapturedBuffers->push_back(bAux);
1304 break;
1305 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1306 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1307 captureNV21(b, gain);
1308 break;
1309 case HAL_PIXEL_FORMAT_YV12:
1310 captureYV12(b, gain);
1311 break;
1312 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1313 captureYUYV(b.img, gain, b.stride);
1314 break;
1315 default:
1316 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1317 b.format);
1318 break;
1319 }
1320 }
1321 if (!isjpeg) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1322 // whose buffer putback separately.
1323 putback_frame(vinfo);
1324 }
1325 mKernelBuffer = NULL;
1326
1327 return 0;
1328}
1329
1330int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1331 int res;
1332 int i, j, k, START;
1333 int count = 0;
1334 int pixelfmt;
1335 struct v4l2_frmsizeenum frmsize;
1336 char property[PROPERTY_VALUE_MAX];
1337 unsigned int support_w,support_h;
1338
1339 support_w = 10000;
1340 support_h = 10000;
1341 memset(property, 0, sizeof(property));
1342 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1343 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1344 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1345 support_w = 10000;
1346 support_h = 10000;
1347 }
1348 }
1349
1350 memset(&frmsize,0,sizeof(frmsize));
1351 frmsize.pixel_format = getOutputFormat();
1352
1353 START = 0;
1354 for (i = 0; ; i++) {
1355 frmsize.index = i;
1356 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1357 if (res < 0){
1358 DBG_LOGB("index=%d, break\n", i);
1359 break;
1360 }
1361
1362 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1363
1364 if (0 != (frmsize.discrete.width%16))
1365 continue;
1366
1367 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1368 continue;
1369
1370 if (count >= size)
1371 break;
1372
1373 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1374 picSizes[count+1] = frmsize.discrete.width;
1375 picSizes[count+2] = frmsize.discrete.height;
1376 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1377
1378 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1379 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1380 if (0 == i) {
1381 count += 4;
1382 continue;
1383 }
1384
1385 for (k = count; k > START; k -= 4) {
1386 if (frmsize.discrete.width * frmsize.discrete.height >
1387 picSizes[k - 3] * picSizes[k - 2]) {
1388 picSizes[k + 1] = picSizes[k - 3];
1389 picSizes[k + 2] = picSizes[k - 2];
1390
1391 } else {
1392 break;
1393 }
1394 }
1395 picSizes[k + 1] = frmsize.discrete.width;
1396 picSizes[k + 2] = frmsize.discrete.height;
1397
1398 count += 4;
1399 }
1400 }
1401
1402 START = count;
1403 for (i = 0; ; i++) {
1404 frmsize.index = i;
1405 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1406 if (res < 0){
1407 DBG_LOGB("index=%d, break\n", i);
1408 break;
1409 }
1410
1411 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1412
1413 if (0 != (frmsize.discrete.width%16))
1414 continue;
1415
1416 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1417 continue;
1418
1419 if (count >= size)
1420 break;
1421
1422 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1423 picSizes[count+1] = frmsize.discrete.width;
1424 picSizes[count+2] = frmsize.discrete.height;
1425 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1426
1427 DBG_LOGB("get output width=%d, height=%d, format =\
1428 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1429 frmsize.discrete.height);
1430 if (0 == i) {
1431 count += 4;
1432 continue;
1433 }
1434
1435 for (k = count; k > START; k -= 4) {
1436 if (frmsize.discrete.width * frmsize.discrete.height >
1437 picSizes[k - 3] * picSizes[k - 2]) {
1438 picSizes[k + 1] = picSizes[k - 3];
1439 picSizes[k + 2] = picSizes[k - 2];
1440
1441 } else {
1442 break;
1443 }
1444 }
1445 picSizes[k + 1] = frmsize.discrete.width;
1446 picSizes[k + 2] = frmsize.discrete.height;
1447
1448 count += 4;
1449 }
1450 }
1451
1452#if 0
1453 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1454 START = count;
1455 for (i = 0; ; i++) {
1456 frmsize.index = i;
1457 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1458 if (res < 0){
1459 DBG_LOGB("index=%d, break\n", i);
1460 break;
1461 }
1462
1463 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1464
1465 if (0 != (frmsize.discrete.width%16))
1466 continue;
1467
1468 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1469 continue;
1470
1471 if (count >= size)
1472 break;
1473
1474 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1475 picSizes[count+1] = frmsize.discrete.width;
1476 picSizes[count+2] = frmsize.discrete.height;
1477 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1478
1479 DBG_LOGB("get output width=%d, height=%d, format =\
1480 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1481 frmsize.discrete.height);
1482 if (0 == i) {
1483 count += 4;
1484 continue;
1485 }
1486
1487 for (k = count; k > START; k -= 4) {
1488 if (frmsize.discrete.width * frmsize.discrete.height >
1489 picSizes[k - 3] * picSizes[k - 2]) {
1490 picSizes[k + 1] = picSizes[k - 3];
1491 picSizes[k + 2] = picSizes[k - 2];
1492
1493 } else {
1494 break;
1495 }
1496 }
1497 picSizes[k + 1] = frmsize.discrete.width;
1498 picSizes[k + 2] = frmsize.discrete.height;
1499
1500 count += 4;
1501 }
1502 }
1503 }
1504#endif
1505
1506 uint32_t jpgSrcfmt[] = {
1507 V4L2_PIX_FMT_RGB24,
1508 V4L2_PIX_FMT_MJPEG,
1509 V4L2_PIX_FMT_YUYV,
1510 };
1511
1512 START = count;
1513 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1514 memset(&frmsize,0,sizeof(frmsize));
1515 frmsize.pixel_format = jpgSrcfmt[j];
1516
1517 for (i = 0; ; i++) {
1518 frmsize.index = i;
1519 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1520 if (res < 0){
1521 DBG_LOGB("index=%d, break\n", i);
1522 break;
1523 }
1524
1525 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1526
1527 if (0 != (frmsize.discrete.width%16))
1528 continue;
1529
1530 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1531 // continue;
1532
1533 if (count >= size)
1534 break;
1535
1536 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1537 if (!IsUsbAvailablePictureSize(kUsbAvailablePictureSize, frmsize.discrete.width, frmsize.discrete.height))
1538 continue;
1539 }
1540
1541 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1542 picSizes[count+1] = frmsize.discrete.width;
1543 picSizes[count+2] = frmsize.discrete.height;
1544 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1545
1546 if (0 == i) {
1547 count += 4;
1548 continue;
1549 }
1550
1551 //TODO insert in descend order
1552 for (k = count; k > START; k -= 4) {
1553 if (frmsize.discrete.width * frmsize.discrete.height >
1554 picSizes[k - 3] * picSizes[k - 2]) {
1555 picSizes[k + 1] = picSizes[k - 3];
1556 picSizes[k + 2] = picSizes[k - 2];
1557
1558 } else {
1559 break;
1560 }
1561 }
1562
1563 picSizes[k + 1] = frmsize.discrete.width;
1564 picSizes[k + 2] = frmsize.discrete.height;
1565
1566 count += 4;
1567 }
1568 }
1569
1570 if (frmsize.index > 0)
1571 break;
1572 }
1573
1574 if (frmsize.index == 0)
1575 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1576
1577 return count;
1578
1579}
1580
1581int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1582{
1583 int ret=0; int framerate=0; int temp_rate=0;
1584 struct v4l2_frmivalenum fival;
1585 int i,j=0;
1586 int count = 0;
1587 int tmp_size = size;
1588 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1589 int pixelfmt_tbl[] = {
1590 V4L2_PIX_FMT_MJPEG,
1591 V4L2_PIX_FMT_YVU420,
1592 V4L2_PIX_FMT_NV21,
1593 V4L2_PIX_FMT_RGB24,
1594 V4L2_PIX_FMT_YUYV,
1595 //V4L2_PIX_FMT_YVU420
1596 };
1597
1598 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1599 {
1600 /* we got all duration for each resolution for prev format*/
1601 if (count >= tmp_size)
1602 break;
1603
1604 for( ; size > 0; size-=4)
1605 {
1606 memset(&fival, 0, sizeof(fival));
1607
1608 for (fival.index = 0;;fival.index++)
1609 {
1610 fival.pixel_format = pixelfmt_tbl[i];
1611 fival.width = picSizes[size-3];
1612 fival.height = picSizes[size-2];
1613 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1614 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1615 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1616 if(framerate < temp_rate)
1617 framerate = temp_rate;
1618 duration[count+0] = (int64_t)(picSizes[size-4]);
1619 duration[count+1] = (int64_t)(picSizes[size-3]);
1620 duration[count+2] = (int64_t)(picSizes[size-2]);
1621 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1622 j++;
1623 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1624 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1625 if(framerate < temp_rate)
1626 framerate = temp_rate;
1627 duration[count+0] = (int64_t)picSizes[size-4];
1628 duration[count+1] = (int64_t)picSizes[size-3];
1629 duration[count+2] = (int64_t)picSizes[size-2];
1630 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1631 j++;
1632 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1633 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1634 if(framerate < temp_rate)
1635 framerate = temp_rate;
1636 duration[count+0] = (int64_t)picSizes[size-4];
1637 duration[count+1] = (int64_t)picSizes[size-3];
1638 duration[count+2] = (int64_t)picSizes[size-2];
1639 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1640 j++;
1641 }
1642 } else {
1643 if (j > 0) {
1644 if (count >= tmp_size)
1645 break;
1646 duration[count+0] = (int64_t)(picSizes[size-4]);
1647 duration[count+1] = (int64_t)(picSizes[size-3]);
1648 duration[count+2] = (int64_t)(picSizes[size-2]);
1649 if (framerate == 5) {
1650 duration[count+3] = (int64_t)200000000L;
1651 } else if (framerate == 10) {
1652 duration[count+3] = (int64_t)100000000L;
1653 } else if (framerate == 15) {
1654 duration[count+3] = (int64_t)66666666L;
1655 } else if (framerate == 30) {
1656 duration[count+3] = (int64_t)33333333L;
1657 } else {
1658 duration[count+3] = (int64_t)66666666L;
1659 }
1660 count += 4;
1661 break;
1662 } else {
1663 break;
1664 }
1665 }
1666 }
1667 j=0;
1668 }
1669 size = tmp_size;
1670 }
1671
1672 return count;
1673
1674}
1675
1676int64_t Sensor::getMinFrameDuration()
1677{
1678 int64_t tmpDuration = 66666666L; // 1/15 s
1679 int64_t frameDuration = 66666666L; // 1/15 s
1680 struct v4l2_frmivalenum fival;
1681 int i,j;
1682
1683 uint32_t pixelfmt_tbl[]={
1684 V4L2_PIX_FMT_MJPEG,
1685 V4L2_PIX_FMT_YUYV,
1686 V4L2_PIX_FMT_NV21,
1687 };
1688 struct v4l2_frmsize_discrete resolution_tbl[]={
1689 {1920, 1080},
1690 {1280, 960},
1691 {640, 480},
1692 {320, 240},
1693 };
1694
1695 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1696 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1697 memset(&fival, 0, sizeof(fival));
1698 fival.index = 0;
1699 fival.pixel_format = pixelfmt_tbl[i];
1700 fival.width = resolution_tbl[j].width;
1701 fival.height = resolution_tbl[j].height;
1702
1703 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1704 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1705 tmpDuration =
1706 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1707
1708 if (frameDuration > tmpDuration)
1709 frameDuration = tmpDuration;
1710 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1711 frameDuration =
1712 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1713 break;
1714 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1715 frameDuration =
1716 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1717 break;
1718 }
1719 fival.index++;
1720 }
1721 }
1722
1723 if (fival.index > 0) {
1724 break;
1725 }
1726 }
1727
1728 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1729 return frameDuration;
1730}
1731
1732int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1733 int res;
1734 int i;
1735 int count = 0;
1736 struct v4l2_frmsizeenum frmsize;
1737 char property[PROPERTY_VALUE_MAX];
1738 unsigned int support_w,support_h;
1739 int preview_fmt;
1740
1741 support_w = 10000;
1742 support_h = 10000;
1743 memset(property, 0, sizeof(property));
1744 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1745 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1746 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1747 support_w = 10000;
1748 support_h = 10000;
1749 }
1750 }
1751
1752
1753 memset(&frmsize,0,sizeof(frmsize));
1754 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1755
1756 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1757 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1758 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1759 if (preview == true)
1760 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1761 else
1762 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1763 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1764 if (preview == true)
1765 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1766 else
1767 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1768 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1769 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1770
1771 for (i = 0; ; i++) {
1772 frmsize.index = i;
1773 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1774 if (res < 0){
1775 DBG_LOGB("index=%d, break\n", i);
1776 break;
1777 }
1778
1779
1780 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1781
1782 if (0 != (frmsize.discrete.width%16))
1783 continue;
1784
1785 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1786 continue;
1787
1788 if (count >= size)
1789 break;
1790
1791 picSizes[count] = frmsize.discrete.width;
1792 picSizes[count+1] = frmsize.discrete.height;
1793
1794 if (0 == i) {
1795 count += 2;
1796 continue;
1797 }
1798
1799 //TODO insert in descend order
1800 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1801 picSizes[count + 0] = picSizes[count - 2];
1802 picSizes[count + 1] = picSizes[count - 1];
1803
1804 picSizes[count - 2] = frmsize.discrete.width;
1805 picSizes[count - 1] = frmsize.discrete.height;
1806 }
1807
1808 count += 2;
1809 }
1810 }
1811
1812 return count;
1813
1814}
1815
1816void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1817 float totalGain = gain/100.0 * kBaseGainFactor;
1818 float noiseVarGain = totalGain * totalGain;
1819 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1820 + kReadNoiseVarAfterGain;
1821
1822 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1823 mScene.setReadoutPixel(0,0);
1824 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1825 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1826 uint16_t *px = (uint16_t*)img + y * stride;
1827 for (unsigned int x = 0; x < kResolution[0]; x++) {
1828 uint32_t electronCount;
1829 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1830
1831 // TODO: Better pixel saturation curve?
1832 electronCount = (electronCount < kSaturationElectrons) ?
1833 electronCount : kSaturationElectrons;
1834
1835 // TODO: Better A/D saturation curve?
1836 uint16_t rawCount = electronCount * totalGain;
1837 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1838
1839 // Calculate noise value
1840 // TODO: Use more-correct Gaussian instead of uniform noise
1841 float photonNoiseVar = electronCount * noiseVarGain;
1842 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1843 // Scaled to roughly match gaussian/uniform noise stddev
1844 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1845
1846 rawCount += kBlackLevel;
1847 rawCount += noiseStddev * noiseSample;
1848
1849 *px++ = rawCount;
1850 }
1851 // TODO: Handle this better
1852 //simulatedTime += kRowReadoutTime;
1853 }
1854 ALOGVV("Raw sensor image captured");
1855}
1856
1857void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1858 float totalGain = gain/100.0 * kBaseGainFactor;
1859 // In fixed-point math, calculate total scaling from electrons to 8bpp
1860 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1861 uint32_t inc = kResolution[0] / stride;
1862
1863 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1864 uint8_t *px = img + outY * stride * 4;
1865 mScene.setReadoutPixel(0, y);
1866 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1867 uint32_t rCount, gCount, bCount;
1868 // TODO: Perfect demosaicing is a cheat
1869 const uint32_t *pixel = mScene.getPixelElectrons();
1870 rCount = pixel[Scene::R] * scale64x;
1871 gCount = pixel[Scene::Gr] * scale64x;
1872 bCount = pixel[Scene::B] * scale64x;
1873
1874 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1875 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1876 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1877 *px++ = 255;
1878 for (unsigned int j = 1; j < inc; j++)
1879 mScene.getPixelElectrons();
1880 }
1881 // TODO: Handle this better
1882 //simulatedTime += kRowReadoutTime;
1883 }
1884 ALOGVV("RGBA sensor image captured");
1885}
1886
1887void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1888#if 0
1889 float totalGain = gain/100.0 * kBaseGainFactor;
1890 // In fixed-point math, calculate total scaling from electrons to 8bpp
1891 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1892 uint32_t inc = kResolution[0] / stride;
1893
1894 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1895 mScene.setReadoutPixel(0, y);
1896 uint8_t *px = img + outY * stride * 3;
1897 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1898 uint32_t rCount, gCount, bCount;
1899 // TODO: Perfect demosaicing is a cheat
1900 const uint32_t *pixel = mScene.getPixelElectrons();
1901 rCount = pixel[Scene::R] * scale64x;
1902 gCount = pixel[Scene::Gr] * scale64x;
1903 bCount = pixel[Scene::B] * scale64x;
1904
1905 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1906 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1907 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1908 for (unsigned int j = 1; j < inc; j++)
1909 mScene.getPixelElectrons();
1910 }
1911 // TODO: Handle this better
1912 //simulatedTime += kRowReadoutTime;
1913 }
1914#else
1915 uint8_t *src = NULL;
1916 int ret = 0, rotate = 0;
1917 uint32_t width = 0, height = 0;
1918 int dqTryNum = 3;
1919
1920 rotate = getPictureRotate();
1921 width = vinfo->picture.format.fmt.pix.width;
1922 height = vinfo->picture.format.fmt.pix.height;
1923
1924 if (mSensorType == SENSOR_USB) {
1925 releasebuf_and_stop_capturing(vinfo);
1926 } else {
1927 stop_capturing(vinfo);
1928 }
1929
1930 ret = start_picture(vinfo,rotate);
1931 if (ret < 0)
1932 {
1933 ALOGD("start picture failed!");
1934 }
1935 while(1)
1936 {
1937 src = (uint8_t *)get_picture(vinfo);
1938 if ((NULL != src) && (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)) {
1939 while (dqTryNum > 0) {
1940 if (NULL != src) {
1941 putback_picture_frame(vinfo);
1942 }
1943 usleep(10000);
1944 dqTryNum --;
1945 src = (uint8_t *)get_picture(vinfo);
1946 }
1947 }
1948
1949 if (NULL != src) {
1950 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
1951 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1952 if ( tmp_buffer == NULL) {
1953 ALOGE("new buffer failed!\n");
1954 return;
1955 }
1956 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
1957 width, tmp_buffer + width * height, (width + 1) / 2, width,
1958 height, width, height, libyuv::FOURCC_MJPG) != 0) {
1959 DBG_LOGA("Decode MJPEG frame failed\n");
1960 putback_picture_frame(vinfo);
1961 usleep(5000);
1962 } else {
1963 nv21_to_rgb24(tmp_buffer,img,width,height);
1964 if (tmp_buffer != NULL)
1965 delete [] tmp_buffer;
1966 break;
1967 }
1968 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1969 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
1970 yuyv422_to_rgb24(src,img,width,height);
1971 break;
1972 } else {
1973 putback_picture_frame(vinfo);
1974 usleep(5000);
1975 }
1976 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
1977 if (vinfo->picture.buf.length == width * height * 3) {
1978 memcpy(img, src, vinfo->picture.buf.length);
1979 } else {
1980 rgb24_memcpy(img, src, width, height);
1981 }
1982 break;
1983 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
1984 memcpy(img, src, vinfo->picture.buf.length);
1985 break;
1986 }
1987 }
1988 }
1989 ALOGD("get picture success !");
1990
1991 if (mSensorType == SENSOR_USB) {
1992 releasebuf_and_stop_picture(vinfo);
1993 } else {
1994 stop_picture(vinfo);
1995 }
1996
1997#endif
1998}
1999
2000void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
2001{
2002 for (int i = 0; i < width * height * 2; i += 2) {
2003 *dst++ = *(src + i);
2004 }
2005
2006 for (int y = 0; y < height - 1; y +=2) {
2007 for (int j = 0; j < width * 2; j += 4) {
2008 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
2009 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
2010 }
2011 src += width * 2 * 2;
2012 }
2013
2014 if (height & 1)
2015 for (int j = 0; j < width * 2; j += 4) {
2016 *dst++ = *(src + 3 + j); //v
2017 *dst++ = *(src + 1 + j); //u
2018 }
2019}
2020
2021void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
2022{
2023 //width should be an even number.
2024 //uv ALIGN 32.
2025 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
2026 unsigned char *dst_copy,*src_copy;
2027
2028 dst_copy = dst;
2029 src_copy = src;
2030
2031 y_size = width*height;
2032 c_stride = ALIGN(width/2, 16);
2033 c_size = c_stride * height/2;
2034 cr_offset = y_size;
2035 cb_offset = y_size+c_size;
2036
2037 for(i=0;i< y_size;i++){
2038 *dst++ = *src;
2039 src += 2;
2040 }
2041
2042 dst = dst_copy;
2043 src = src_copy;
2044
2045 for(i=0;i<height;i+=2){
2046 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
2047 //ceil(u1+u2)/2
2048 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
2049 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
2050 }
2051 dst += c_stride;
2052 src += width*4;
2053 }
2054}
2055
2056status_t Sensor::force_reset_sensor() {
2057 DBG_LOGA("force_reset_sensor");
2058 status_t ret;
2059 mTimeOutCount = 0;
2060 ret = streamOff();
2061 ret = setBuffersFormat(vinfo);
2062 ret = streamOn();
2063 DBG_LOGB("%s , ret = %d", __FUNCTION__, ret);
2064 return ret;
2065}
2066
2067void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
2068#if 0
2069 float totalGain = gain/100.0 * kBaseGainFactor;
2070 // Using fixed-point math with 6 bits of fractional precision.
2071 // In fixed-point math, calculate total scaling from electrons to 8bpp
2072 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2073 // In fixed-point math, saturation point of sensor after gain
2074 const int saturationPoint = 64 * 255;
2075 // Fixed-point coefficients for RGB-YUV transform
2076 // Based on JFIF RGB->YUV transform.
2077 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2078 const int rgbToY[] = {19, 37, 7};
2079 const int rgbToCb[] = {-10,-21, 32, 524288};
2080 const int rgbToCr[] = {32,-26, -5, 524288};
2081 // Scale back to 8bpp non-fixed-point
2082 const int scaleOut = 64;
2083 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2084
2085 uint32_t inc = kResolution[0] / stride;
2086 uint32_t outH = kResolution[1] / inc;
2087 for (unsigned int y = 0, outY = 0;
2088 y < kResolution[1]; y+=inc, outY++) {
2089 uint8_t *pxY = img + outY * stride;
2090 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2091 mScene.setReadoutPixel(0,y);
2092 for (unsigned int outX = 0; outX < stride; outX++) {
2093 int32_t rCount, gCount, bCount;
2094 // TODO: Perfect demosaicing is a cheat
2095 const uint32_t *pixel = mScene.getPixelElectrons();
2096 rCount = pixel[Scene::R] * scale64x;
2097 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2098 gCount = pixel[Scene::Gr] * scale64x;
2099 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2100 bCount = pixel[Scene::B] * scale64x;
2101 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2102
2103 *pxY++ = (rgbToY[0] * rCount +
2104 rgbToY[1] * gCount +
2105 rgbToY[2] * bCount) / scaleOutSq;
2106 if (outY % 2 == 0 && outX % 2 == 0) {
2107 *pxVU++ = (rgbToCr[0] * rCount +
2108 rgbToCr[1] * gCount +
2109 rgbToCr[2] * bCount +
2110 rgbToCr[3]) / scaleOutSq;
2111 *pxVU++ = (rgbToCb[0] * rCount +
2112 rgbToCb[1] * gCount +
2113 rgbToCb[2] * bCount +
2114 rgbToCb[3]) / scaleOutSq;
2115 }
2116 for (unsigned int j = 1; j < inc; j++)
2117 mScene.getPixelElectrons();
2118 }
2119 }
2120#else
2121 uint8_t *src;
2122
2123 if (mKernelBuffer) {
2124 src = mKernelBuffer;
2125 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2126 uint32_t width = vinfo->preview.format.fmt.pix.width;
2127 uint32_t height = vinfo->preview.format.fmt.pix.height;
2128 if ((width == b.width) && (height == b.height)) {
2129 memcpy(b.img, src, b.width * b.height * 3/2);
2130 } else {
2131 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2132 }
2133 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2134 uint32_t width = vinfo->preview.format.fmt.pix.width;
2135 uint32_t height = vinfo->preview.format.fmt.pix.height;
2136
2137 if ((width == b.width) && (height == b.height)) {
2138 memcpy(b.img, src, b.width * b.height * 3/2);
2139 } else {
2140 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2141 }
2142 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2143 uint32_t width = vinfo->preview.format.fmt.pix.width;
2144 uint32_t height = vinfo->preview.format.fmt.pix.height;
2145
2146 if ((width == b.width) && (height == b.height)) {
2147 memcpy(b.img, src, b.width * b.height * 3/2);
2148 } else {
2149 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2150 }
2151 } else {
2152 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2153 }
2154 return ;
2155 }
2156 while(1){
2157 if (mExitSensorThread) {
2158 break;
2159 }
2160
2161 src = (uint8_t *)get_frame(vinfo);
2162 if (NULL == src) {
2163 if (get_device_status(vinfo)) {
2164 break;
2165 }
2166 ALOGVV("get frame NULL, sleep 5ms");
2167 usleep(5000);
2168 mTimeOutCount++;
2169 if (mTimeOutCount > 300) {
2170 force_reset_sensor();
2171 }
2172 continue;
2173 }
2174 mTimeOutCount = 0;
2175 if (mSensorType == SENSOR_USB) {
2176 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2177 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2178 DBG_LOGB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2179 putback_frame(vinfo);
2180 continue;
2181 }
2182 }
2183 }
2184 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2185 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2186 memcpy(b.img, src, vinfo->preview.buf.length);
2187 } else {
2188 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2189 }
2190 mKernelBuffer = b.img;
2191 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2192 uint32_t width = vinfo->preview.format.fmt.pix.width;
2193 uint32_t height = vinfo->preview.format.fmt.pix.height;
2194 memset(mTemp_buffer, 0 , width * height * 3/2);
2195 YUYVToNV21(src, mTemp_buffer, width, height);
2196 if ((width == b.width) && (height == b.height)) {
2197 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2198 mKernelBuffer = b.img;
2199 } else {
2200 if ((b.height % 2) != 0) {
2201 DBG_LOGB("%d , b.height = %d", __LINE__, b.height);
2202 b.height = b.height - 1;
2203 }
2204 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2205 mKernelBuffer = mTemp_buffer;
2206 }
2207 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2208 uint32_t width = vinfo->preview.format.fmt.pix.width;
2209 uint32_t height = vinfo->preview.format.fmt.pix.height;
2210 memset(mTemp_buffer, 0 , width * height * 3/2);
2211 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, mTemp_buffer,
2212 width, mTemp_buffer + width * height, (width + 1) / 2, width,
2213 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2214 putback_frame(vinfo);
2215 ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__);
2216 continue;
2217 }
2218 if ((width == b.width) && (height == b.height)) {
2219 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2220 mKernelBuffer = b.img;
2221 } else {
2222 if ((b.height % 2) != 0) {
2223 DBG_LOGB("%d, b.height = %d", __LINE__, b.height);
2224 b.height = b.height - 1;
2225 }
2226 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2227 mKernelBuffer = mTemp_buffer;
2228 }
2229 }
2230
2231 break;
2232 }
2233#endif
2234
2235 ALOGVV("NV21 sensor image captured");
2236}
2237
2238void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2239#if 0
2240 float totalGain = gain/100.0 * kBaseGainFactor;
2241 // Using fixed-point math with 6 bits of fractional precision.
2242 // In fixed-point math, calculate total scaling from electrons to 8bpp
2243 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2244 // In fixed-point math, saturation point of sensor after gain
2245 const int saturationPoint = 64 * 255;
2246 // Fixed-point coefficients for RGB-YUV transform
2247 // Based on JFIF RGB->YUV transform.
2248 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2249 const int rgbToY[] = {19, 37, 7};
2250 const int rgbToCb[] = {-10,-21, 32, 524288};
2251 const int rgbToCr[] = {32,-26, -5, 524288};
2252 // Scale back to 8bpp non-fixed-point
2253 const int scaleOut = 64;
2254 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2255
2256 uint32_t inc = kResolution[0] / stride;
2257 uint32_t outH = kResolution[1] / inc;
2258 for (unsigned int y = 0, outY = 0;
2259 y < kResolution[1]; y+=inc, outY++) {
2260 uint8_t *pxY = img + outY * stride;
2261 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2262 mScene.setReadoutPixel(0,y);
2263 for (unsigned int outX = 0; outX < stride; outX++) {
2264 int32_t rCount, gCount, bCount;
2265 // TODO: Perfect demosaicing is a cheat
2266 const uint32_t *pixel = mScene.getPixelElectrons();
2267 rCount = pixel[Scene::R] * scale64x;
2268 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2269 gCount = pixel[Scene::Gr] * scale64x;
2270 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2271 bCount = pixel[Scene::B] * scale64x;
2272 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2273
2274 *pxY++ = (rgbToY[0] * rCount +
2275 rgbToY[1] * gCount +
2276 rgbToY[2] * bCount) / scaleOutSq;
2277 if (outY % 2 == 0 && outX % 2 == 0) {
2278 *pxVU++ = (rgbToCr[0] * rCount +
2279 rgbToCr[1] * gCount +
2280 rgbToCr[2] * bCount +
2281 rgbToCr[3]) / scaleOutSq;
2282 *pxVU++ = (rgbToCb[0] * rCount +
2283 rgbToCb[1] * gCount +
2284 rgbToCb[2] * bCount +
2285 rgbToCb[3]) / scaleOutSq;
2286 }
2287 for (unsigned int j = 1; j < inc; j++)
2288 mScene.getPixelElectrons();
2289 }
2290 }
2291#else
2292 uint8_t *src;
2293 if (mKernelBuffer) {
2294 src = mKernelBuffer;
2295 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2296 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2297 ALOGI("Sclale YV12 frame down \n");
2298
2299 int width = vinfo->preview.format.fmt.pix.width;
2300 int height = vinfo->preview.format.fmt.pix.height;
2301 int ret = libyuv::I420Scale(src, width,
2302 src + width * height, width / 2,
2303 src + width * height + width * height / 4, width / 2,
2304 width, height,
2305 b.img, b.width,
2306 b.img + b.width * b.height, b.width / 2,
2307 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2308 b.width, b.height,
2309 libyuv::kFilterNone);
2310 if (ret < 0)
2311 ALOGE("Sclale YV12 frame down failed!\n");
2312 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2313 int width = vinfo->preview.format.fmt.pix.width;
2314 int height = vinfo->preview.format.fmt.pix.height;
2315 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2316
2317 if ( tmp_buffer == NULL) {
2318 ALOGE("new buffer failed!\n");
2319 return;
2320 }
2321
2322 YUYVToYV12(src, tmp_buffer, width, height);
2323
2324 int ret = libyuv::I420Scale(tmp_buffer, width,
2325 tmp_buffer + width * height, width / 2,
2326 tmp_buffer + width * height + width * height / 4, width / 2,
2327 width, height,
2328 b.img, b.width,
2329 b.img + b.width * b.height, b.width / 2,
2330 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2331 b.width, b.height,
2332 libyuv::kFilterNone);
2333 if (ret < 0)
2334 ALOGE("Sclale YV12 frame down failed!\n");
2335 delete [] tmp_buffer;
2336 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2337 int width = vinfo->preview.format.fmt.pix.width;
2338 int height = vinfo->preview.format.fmt.pix.height;
2339 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2340
2341 if ( tmp_buffer == NULL) {
2342 ALOGE("new buffer failed!\n");
2343 return;
2344 }
2345
2346 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2347 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2348 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2349 DBG_LOGA("Decode MJPEG frame failed\n");
2350 }
2351
2352 int ret = libyuv::I420Scale(tmp_buffer, width,
2353 tmp_buffer + width * height, width / 2,
2354 tmp_buffer + width * height + width * height / 4, width / 2,
2355 width, height,
2356 b.img, b.width,
2357 b.img + b.width * b.height, b.width / 2,
2358 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2359 b.width, b.height,
2360 libyuv::kFilterNone);
2361 if (ret < 0)
2362 ALOGE("Sclale YV12 frame down failed!\n");
2363
2364 delete [] tmp_buffer;
2365 } else {
2366 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2367 }
2368 return ;
2369 }
2370 while(1){
2371 if (mExitSensorThread) {
2372 break;
2373 }
2374 src = (uint8_t *)get_frame(vinfo);
2375
2376 if (NULL == src) {
2377 if (get_device_status(vinfo)) {
2378 break;
2379 }
2380 ALOGVV("get frame NULL, sleep 5ms");
2381 usleep(5000);
2382 mTimeOutCount++;
2383 if (mTimeOutCount > 300) {
2384 force_reset_sensor();
2385 }
2386 continue;
2387 }
2388 mTimeOutCount = 0;
2389 if (mSensorType == SENSOR_USB) {
2390 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2391 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2392 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2393 putback_frame(vinfo);
2394 continue;
2395 }
2396 }
2397 }
2398 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2399 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2400 memcpy(b.img, src, vinfo->preview.buf.length);
2401 } else {
2402 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2403 }
2404 mKernelBuffer = b.img;
2405 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2406 int width = vinfo->preview.format.fmt.pix.width;
2407 int height = vinfo->preview.format.fmt.pix.height;
2408 YUYVToYV12(src, b.img, width, height);
2409 mKernelBuffer = b.img;
2410 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2411 int width = vinfo->preview.format.fmt.pix.width;
2412 int height = vinfo->preview.format.fmt.pix.height;
2413 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2414 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2415 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2416 putback_frame(vinfo);
2417 DBG_LOGA("Decode MJPEG frame failed\n");
2418 continue;
2419 }
2420 mKernelBuffer = b.img;
2421 } else {
2422 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2423 }
2424
2425 break;
2426 }
2427#endif
2428 //mKernelBuffer = src;
2429 ALOGVV("YV12 sensor image captured");
2430}
2431
2432void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2433#if 0
2434 float totalGain = gain/100.0 * kBaseGainFactor;
2435 // Using fixed-point math with 6 bits of fractional precision.
2436 // In fixed-point math, calculate total scaling from electrons to 8bpp
2437 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2438 // In fixed-point math, saturation point of sensor after gain
2439 const int saturationPoint = 64 * 255;
2440 // Fixed-point coefficients for RGB-YUV transform
2441 // Based on JFIF RGB->YUV transform.
2442 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2443 const int rgbToY[] = {19, 37, 7};
2444 const int rgbToCb[] = {-10,-21, 32, 524288};
2445 const int rgbToCr[] = {32,-26, -5, 524288};
2446 // Scale back to 8bpp non-fixed-point
2447 const int scaleOut = 64;
2448 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2449
2450 uint32_t inc = kResolution[0] / stride;
2451 uint32_t outH = kResolution[1] / inc;
2452 for (unsigned int y = 0, outY = 0;
2453 y < kResolution[1]; y+=inc, outY++) {
2454 uint8_t *pxY = img + outY * stride;
2455 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2456 mScene.setReadoutPixel(0,y);
2457 for (unsigned int outX = 0; outX < stride; outX++) {
2458 int32_t rCount, gCount, bCount;
2459 // TODO: Perfect demosaicing is a cheat
2460 const uint32_t *pixel = mScene.getPixelElectrons();
2461 rCount = pixel[Scene::R] * scale64x;
2462 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2463 gCount = pixel[Scene::Gr] * scale64x;
2464 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2465 bCount = pixel[Scene::B] * scale64x;
2466 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2467
2468 *pxY++ = (rgbToY[0] * rCount +
2469 rgbToY[1] * gCount +
2470 rgbToY[2] * bCount) / scaleOutSq;
2471 if (outY % 2 == 0 && outX % 2 == 0) {
2472 *pxVU++ = (rgbToCr[0] * rCount +
2473 rgbToCr[1] * gCount +
2474 rgbToCr[2] * bCount +
2475 rgbToCr[3]) / scaleOutSq;
2476 *pxVU++ = (rgbToCb[0] * rCount +
2477 rgbToCb[1] * gCount +
2478 rgbToCb[2] * bCount +
2479 rgbToCb[3]) / scaleOutSq;
2480 }
2481 for (unsigned int j = 1; j < inc; j++)
2482 mScene.getPixelElectrons();
2483 }
2484 }
2485#else
2486 uint8_t *src;
2487 if (mKernelBuffer) {
2488 src = mKernelBuffer;
2489 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2490 //TODO YUYV scale
2491 //memcpy(img, src, vinfo->preview.buf.length);
2492
2493 } else
2494 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2495
2496 return ;
2497 }
2498
2499 while(1) {
2500 if (mExitSensorThread) {
2501 break;
2502 }
2503 src = (uint8_t *)get_frame(vinfo);
2504 if (NULL == src) {
2505 if (get_device_status(vinfo)) {
2506 break;
2507 }
2508 ALOGVV("get frame NULL, sleep 5ms");
2509 usleep(5000);
2510 mTimeOutCount++;
2511 if (mTimeOutCount > 300) {
2512 force_reset_sensor();
2513 }
2514 continue;
2515 }
2516 mTimeOutCount = 0;
2517 if (mSensorType == SENSOR_USB) {
2518 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2519 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2520 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2521 putback_frame(vinfo);
2522 continue;
2523 }
2524 }
2525 }
2526 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2527 memcpy(img, src, vinfo->preview.buf.length);
2528 mKernelBuffer = src;
2529 } else {
2530 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2531 }
2532
2533 break;
2534 }
2535#endif
2536 //mKernelBuffer = src;
2537 ALOGVV("YUYV sensor image captured");
2538}
2539
2540void Sensor::dump(int fd) {
2541 String8 result;
2542 result = String8::format("%s, sensor preview information: \n", __FILE__);
2543 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2544 result.appendFormat("camera preview width: %d , height =%d\n",
2545 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2546
2547 result.appendFormat("camera preview format: %.4s\n\n",
2548 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2549
2550 write(fd, result.string(), result.size());
2551}
2552
2553} // namespace android
2554
2555