summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: 2c5048ba9dd49983f0ed7d3f48e177836c6836b8
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44
45#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
46
47namespace android {
48
49const unsigned int Sensor::kResolution[2] = {1600, 1200};
50
51const nsecs_t Sensor::kExposureTimeRange[2] =
52 {1000L, 30000000000L} ; // 1 us - 30 sec
53const nsecs_t Sensor::kFrameDurationRange[2] =
54 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
55const nsecs_t Sensor::kMinVerticalBlank = 10000L;
56
57const uint8_t Sensor::kColorFilterArrangement =
58 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
59
60// Output image data characteristics
61const uint32_t Sensor::kMaxRawValue = 4000;
62const uint32_t Sensor::kBlackLevel = 1000;
63
64// Sensor sensitivity
65const float Sensor::kSaturationVoltage = 0.520f;
66const uint32_t Sensor::kSaturationElectrons = 2000;
67const float Sensor::kVoltsPerLuxSecond = 0.100f;
68
69const float Sensor::kElectronsPerLuxSecond =
70 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
71 * Sensor::kVoltsPerLuxSecond;
72
73const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
74 Sensor::kSaturationElectrons;
75
76const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
77const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
78const float Sensor::kReadNoiseVarBeforeGain =
79 Sensor::kReadNoiseStddevBeforeGain *
80 Sensor::kReadNoiseStddevBeforeGain;
81const float Sensor::kReadNoiseVarAfterGain =
82 Sensor::kReadNoiseStddevAfterGain *
83 Sensor::kReadNoiseStddevAfterGain;
84
85// While each row has to read out, reset, and then expose, the (reset +
86// expose) sequence can be overlapped by other row readouts, so the final
87// minimum frame duration is purely a function of row readout time, at least
88// if there's a reasonable number of rows.
89const nsecs_t Sensor::kRowReadoutTime =
90 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
91
92const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
93const uint32_t Sensor::kDefaultSensitivity = 100;
94
95const usb_frmsize_discrete_t kUsbAvailablePictureSize[] = {
96 {4128, 3096},
97 {3264, 2448},
98 {2592, 1944},
99 {2592, 1936},
100 {2560, 1920},
101 {2688, 1520},
102 {2048, 1536},
103 {1600, 1200},
104 {1920, 1088},
105 {1920, 1080},
106 {1440, 1080},
107 {1280, 960},
108 {1280, 720},
109 {1024, 768},
110 {960, 720},
111 {720, 480},
112 {640, 480},
113 {320, 240},
114};
115
116/** A few utility functions for math, normal distributions */
117
118// Take advantage of IEEE floating-point format to calculate an approximate
119// square root. Accurate to within +-3.6%
120float sqrtf_approx(float r) {
121 // Modifier is based on IEEE floating-point representation; the
122 // manipulations boil down to finding approximate log2, dividing by two, and
123 // then inverting the log2. A bias is added to make the relative error
124 // symmetric about the real answer.
125 const int32_t modifier = 0x1FBB4000;
126
127 int32_t r_i = *(int32_t*)(&r);
128 r_i = (r_i >> 1) + modifier;
129
130 return *(float*)(&r_i);
131}
132
133void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
134{
135 int stride = (width + 31) & ( ~31);
136 int w, h;
137 for (h=0; h<height; h++)
138 {
139 memcpy( dst, src, width*3);
140 dst += width*3;
141 src += stride*3;
142 }
143}
144
145static int ALIGN(int x, int y) {
146 // y must be a power of 2.
147 return (x + y - 1) & ~(y - 1);
148}
149
150bool IsUsbAvailablePictureSize(const usb_frmsize_discrete_t AvailablePictureSize[], uint32_t width, uint32_t height)
151{
152 int i;
153 bool ret = false;
154 int count = sizeof(kUsbAvailablePictureSize)/sizeof(kUsbAvailablePictureSize[0]);
155 for (i = 0; i < count; i++) {
156 if ((width == AvailablePictureSize[i].width) && (height == AvailablePictureSize[i].height)) {
157 ret = true;
158 } else {
159 continue;
160 }
161 }
162 return ret;
163}
164
165void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
166{
167 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
168 (mmInt32)vinfo->preview.format.fmt.pix.height,
169 (mmInt32)vinfo->preview.format.fmt.pix.width,
170 IC_FORMAT_YCbCr420_lp,
171 (mmByte *) src,
172 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
173 0};
174
175 structConvImage output = {(mmInt32)width,
176 (mmInt32)height,
177 (mmInt32)width,
178 IC_FORMAT_YCbCr420_lp,
179 (mmByte *) img,
180 (mmByte *) img + width * height,
181 0};
182
183 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
184 ALOGE("Sclale NV21 frame down failed!\n");
185}
186
187Sensor::Sensor():
188 Thread(false),
189 mGotVSync(false),
190 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
191 mFrameDuration(kFrameDurationRange[0]),
192 mGainFactor(kDefaultSensitivity),
193 mNextBuffers(NULL),
194 mFrameNumber(0),
195 mCapturedBuffers(NULL),
196 mListener(NULL),
197 mTemp_buffer(NULL),
198 mExitSensorThread(false),
199 mIoctlSupport(0),
200 msupportrotate(0),
201 mTimeOutCount(0),
202 mWait(false),
203 mPre_width(0),
204 mPre_height(0),
205 mFlushFlag(false),
206 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
207{
208
209}
210
211Sensor::~Sensor() {
212 //shutDown();
213}
214
215status_t Sensor::startUp(int idx) {
216 ALOGV("%s: E", __FUNCTION__);
217 DBG_LOGA("ddd");
218
219 int res;
220 mCapturedBuffers = NULL;
221 res = run("EmulatedFakeCamera3::Sensor",
222 ANDROID_PRIORITY_URGENT_DISPLAY);
223
224 if (res != OK) {
225 ALOGE("Unable to start up sensor capture thread: %d", res);
226 }
227
228 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
229 vinfo->idx = idx;
230
231 res = camera_open(vinfo);
232 if (res < 0) {
233 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
234 }
235
236 mSensorType = SENSOR_MMAP;
237 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
238 mSensorType = SENSOR_USB;
239 }
240
241 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
242 mSensorType = SENSOR_SHARE_FD;
243 }
244
245 if (strstr((const char *)vinfo->cap.card, "front"))
246 mSensorFace = SENSOR_FACE_FRONT;
247 else if (strstr((const char *)vinfo->cap.card, "back"))
248 mSensorFace = SENSOR_FACE_BACK;
249 else
250 mSensorFace = SENSOR_FACE_NONE;
251
252 return res;
253}
254
255sensor_type_e Sensor::getSensorType(void)
256{
257 return mSensorType;
258}
259status_t Sensor::IoctlStateProbe(void) {
260 struct v4l2_queryctrl qc;
261 int ret = 0;
262 mIoctlSupport = 0;
263 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
264 qc.id = V4L2_ROTATE_ID;
265 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
266 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
267 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
268 }else{
269 mIoctlSupport |= IOCTL_MASK_ROTATE;
270 }
271
272 if(mIoctlSupport & IOCTL_MASK_ROTATE){
273 msupportrotate = true;
274 DBG_LOGA("camera support capture rotate");
275 }
276 return mIoctlSupport;
277}
278
279uint32_t Sensor::getStreamUsage(int stream_type)
280{
281 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
282
283 switch (stream_type) {
284 case CAMERA3_STREAM_OUTPUT:
285 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
286 break;
287 case CAMERA3_STREAM_INPUT:
288 usage = GRALLOC_USAGE_HW_CAMERA_READ;
289 break;
290 case CAMERA3_STREAM_BIDIRECTIONAL:
291 usage = GRALLOC_USAGE_HW_CAMERA_READ |
292 GRALLOC_USAGE_HW_CAMERA_WRITE;
293 break;
294 }
295 if ((mSensorType == SENSOR_MMAP)
296 || (mSensorType == SENSOR_USB)) {
297 usage = (GRALLOC_USAGE_HW_TEXTURE
298 | GRALLOC_USAGE_HW_RENDER
299 | GRALLOC_USAGE_SW_READ_MASK
300 | GRALLOC_USAGE_SW_WRITE_MASK
301 );
302 }
303
304 return usage;
305}
306
307status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
308{
309 int res;
310
311 mFramecount = 0;
312 mCurFps = 0;
313 gettimeofday(&mTimeStart, NULL);
314
315 if (isjpeg) {
316 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
317 vinfo->picture.format.fmt.pix.width = width;
318 vinfo->picture.format.fmt.pix.height = height;
319 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
320 } else {
321 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
322 vinfo->preview.format.fmt.pix.width = width;
323 vinfo->preview.format.fmt.pix.height = height;
324 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
325
326 res = setBuffersFormat(vinfo);
327 if (res < 0) {
328 ALOGE("set buffer failed\n");
329 return res;
330 }
331 }
332
333 if (NULL == mTemp_buffer) {
334 mPre_width = vinfo->preview.format.fmt.pix.width;
335 mPre_height = vinfo->preview.format.fmt.pix.height;
336 DBG_LOGB("setOutputFormat :: pre_width = %d, pre_height = %d \n" , mPre_width , mPre_height);
337 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
338 if (mTemp_buffer == NULL) {
339 ALOGE("first time allocate mTemp_buffer failed !");
340 return -1;
341 }
342 }
343
344 if ((mPre_width != vinfo->preview.format.fmt.pix.width) && (mPre_height != vinfo->preview.format.fmt.pix.height)) {
345 if (mTemp_buffer) {
346 delete [] mTemp_buffer;
347 mTemp_buffer = NULL;
348 }
349 mPre_width = vinfo->preview.format.fmt.pix.width;
350 mPre_height = vinfo->preview.format.fmt.pix.height;
351 mTemp_buffer = new uint8_t[mPre_width * mPre_height * 3 / 2];
352 if (mTemp_buffer == NULL) {
353 ALOGE("allocate mTemp_buffer failed !");
354 return -1;
355 }
356 }
357
358 return OK;
359
360}
361
362status_t Sensor::streamOn() {
363
364 return start_capturing(vinfo);
365}
366
367bool Sensor::isStreaming() {
368
369 return vinfo->isStreaming;
370}
371
372bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
373{
374 if ((vinfo->preview.format.fmt.pix.width != width)
375 ||(vinfo->preview.format.fmt.pix.height != height)
376 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
377 ) {
378
379 return true;
380
381 }
382
383 return false;
384}
385status_t Sensor::streamOff() {
386 if (mSensorType == SENSOR_USB) {
387 return releasebuf_and_stop_capturing(vinfo);
388 } else {
389 return stop_capturing(vinfo);
390 }
391}
392
393int Sensor::getOutputFormat()
394{
395 struct v4l2_fmtdesc fmt;
396 int ret;
397 memset(&fmt,0,sizeof(fmt));
398 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
399
400 fmt.index = 0;
401 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
402 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
403 return V4L2_PIX_FMT_MJPEG;
404 fmt.index++;
405 }
406
407 fmt.index = 0;
408 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
409 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
410 return V4L2_PIX_FMT_NV21;
411 fmt.index++;
412 }
413
414 fmt.index = 0;
415 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
416 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
417 return V4L2_PIX_FMT_YUYV;
418 fmt.index++;
419 }
420
421 ALOGE("Unable to find a supported sensor format!");
422 return BAD_VALUE;
423}
424
425/* if sensor supports MJPEG, return it first, otherwise
426 * trasform HAL format to v4l2 format then check whether
427 * it is supported.
428 */
429int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
430{
431 struct v4l2_fmtdesc fmt;
432 int ret;
433 memset(&fmt,0,sizeof(fmt));
434 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
435
436 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
437 pixelfmt = V4L2_PIX_FMT_YVU420;
438 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
439 pixelfmt = V4L2_PIX_FMT_NV21;
440 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
441 pixelfmt = V4L2_PIX_FMT_YUYV;
442 } else {
443 pixelfmt = V4L2_PIX_FMT_NV21;
444 }
445
446 fmt.index = 0;
447 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
448 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
449 return V4L2_PIX_FMT_MJPEG;
450 fmt.index++;
451 }
452
453 fmt.index = 0;
454 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
455 if (fmt.pixelformat == pixelfmt)
456 return pixelfmt;
457 fmt.index++;
458 }
459
460 fmt.index = 0;
461 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0) {
462 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
463 return V4L2_PIX_FMT_YUYV;
464 fmt.index++;
465 }
466 ALOGE("%s, Unable to find a supported sensor format!", __FUNCTION__);
467 return BAD_VALUE;
468}
469
470void Sensor::setPictureRotate(int rotate)
471{
472 mRotateValue = rotate;
473}
474int Sensor::getPictureRotate()
475{
476 return mRotateValue;
477}
478status_t Sensor::shutDown() {
479 ALOGV("%s: E", __FUNCTION__);
480
481 int res;
482
483 mTimeOutCount = 0;
484
485 res = requestExitAndWait();
486 if (res != OK) {
487 ALOGE("Unable to shut down sensor capture thread: %d", res);
488 }
489
490 if (vinfo != NULL) {
491 if (mSensorType == SENSOR_USB) {
492 releasebuf_and_stop_capturing(vinfo);
493 } else {
494 stop_capturing(vinfo);
495 }
496 }
497
498 camera_close(vinfo);
499
500 if (vinfo){
501 free(vinfo);
502 vinfo = NULL;
503 }
504
505 if (mTemp_buffer) {
506 delete [] mTemp_buffer;
507 mTemp_buffer = NULL;
508 }
509
510 ALOGD("%s: Exit", __FUNCTION__);
511 return res;
512}
513
514void Sensor::sendExitSingalToSensor() {
515 {
516 Mutex::Autolock lock(mReadoutMutex);
517 mExitSensorThread = true;
518 mReadoutComplete.signal();
519 }
520
521 {
522 Mutex::Autolock lock(mControlMutex);
523 mVSync.signal();
524 }
525
526 {
527 Mutex::Autolock lock(mReadoutMutex);
528 mReadoutAvailable.signal();
529 }
530}
531
532Scene &Sensor::getScene() {
533 return mScene;
534}
535
536int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
537{
538 int ret = 0;
539 struct v4l2_queryctrl qc;
540
541 memset(&qc, 0, sizeof(qc));
542 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
543 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
544
545 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
546 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
547 ret = -1;
548 *zoomMin = 0;
549 *zoomMax = 0;
550 *zoomStep = 1;
551 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
552 } else {
553 *zoomMin = qc.minimum;
554 *zoomMax = qc.maximum;
555 *zoomStep = qc.step;
556 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
557 }
558
559 return ret ;
560}
561
562int Sensor::setZoom(int zoomValue)
563{
564 int ret = 0;
565 struct v4l2_control ctl;
566
567 memset( &ctl, 0, sizeof(ctl));
568 ctl.value = zoomValue;
569 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
570 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
571 if (ret < 0) {
572 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
573 }
574 return ret ;
575}
576
577status_t Sensor::setEffect(uint8_t effect)
578{
579 int ret = 0;
580 struct v4l2_control ctl;
581 ctl.id = V4L2_CID_COLORFX;
582
583 switch (effect) {
584 case ANDROID_CONTROL_EFFECT_MODE_OFF:
585 ctl.value= CAM_EFFECT_ENC_NORMAL;
586 break;
587 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
588 ctl.value= CAM_EFFECT_ENC_COLORINV;
589 break;
590 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
591 ctl.value= CAM_EFFECT_ENC_SEPIA;
592 break;
593 default:
594 ALOGE("%s: Doesn't support effect mode %d",
595 __FUNCTION__, effect);
596 return BAD_VALUE;
597 }
598
599 DBG_LOGB("set effect mode:%d", effect);
600 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
601 if (ret < 0) {
602 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
603 }
604 return ret ;
605}
606
607#define MAX_LEVEL_FOR_EXPOSURE 16
608#define MIN_LEVEL_FOR_EXPOSURE 3
609
610int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
611{
612 struct v4l2_queryctrl qc;
613 int ret=0;
614 int level = 0;
615 int middle = 0;
616
617 memset( &qc, 0, sizeof(qc));
618
619 DBG_LOGA("getExposure\n");
620 qc.id = V4L2_CID_EXPOSURE;
621 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
622 if(ret < 0) {
623 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
624 *minExp = -4;
625 *maxExp = 4;
626 *def = 0;
627 step->numerator = 1;
628 step->denominator = 1;
629 return ret;
630 }
631
632 if(0 < qc.step)
633 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
634
635 if((level > MAX_LEVEL_FOR_EXPOSURE)
636 || (level < MIN_LEVEL_FOR_EXPOSURE)){
637 *minExp = -4;
638 *maxExp = 4;
639 *def = 0;
640 step->numerator = 1;
641 step->denominator = 1;
642 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
643 *minExp, *maxExp, *def);
644 return true;
645 }
646
647 middle = (qc.minimum+qc.maximum)/2;
648 *minExp = qc.minimum - middle;
649 *maxExp = qc.maximum - middle;
650 *def = qc.default_value - middle;
651 step->numerator = 1;
652 step->denominator = 2;//qc.step;
653 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
654 return ret;
655}
656
657status_t Sensor::setExposure(int expCmp)
658{
659 int ret = 0;
660 struct v4l2_control ctl;
661 struct v4l2_queryctrl qc;
662
663 if(mEV == expCmp){
664 return 0;
665 }else{
666 mEV = expCmp;
667 }
668 memset(&ctl, 0, sizeof(ctl));
669 memset(&qc, 0, sizeof(qc));
670
671 qc.id = V4L2_CID_EXPOSURE;
672
673 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
674 if (ret < 0) {
675 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
676 }
677
678 ctl.id = V4L2_CID_EXPOSURE;
679 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
680
681 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
682 if (ret < 0) {
683 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
684 }
685 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
686 return ret ;
687}
688
689int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
690{
691 struct v4l2_queryctrl qc;
692 struct v4l2_querymenu qm;
693 int ret;
694 int mode_count = -1;
695
696 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
697 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
698 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
699 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
700 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
701 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
702 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
703 } else {
704 memset(&qm, 0, sizeof(qm));
705
706 int index = 0;
707 mode_count = 1;
708 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
709
710 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
711 if (mode_count >= maxCont)
712 break;
713
714 memset(&qm, 0, sizeof(struct v4l2_querymenu));
715 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
716 qm.index = index;
717 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
718 continue;
719 } else {
720 if (strcmp((char*)qm.name,"50hz") == 0) {
721 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
722 mode_count++;
723 } else if (strcmp((char*)qm.name,"60hz") == 0) {
724 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
725 mode_count++;
726 } else if (strcmp((char*)qm.name,"auto") == 0) {
727 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
728 mode_count++;
729 }
730
731 }
732 }
733 }
734
735 return mode_count;
736}
737
738status_t Sensor::setAntiBanding(uint8_t antiBanding)
739{
740 int ret = 0;
741 struct v4l2_control ctl;
742 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
743
744 switch (antiBanding) {
745 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
746 ctl.value= CAM_ANTIBANDING_OFF;
747 break;
748 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
749 ctl.value= CAM_ANTIBANDING_50HZ;
750 break;
751 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
752 ctl.value= CAM_ANTIBANDING_60HZ;
753 break;
754 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
755 ctl.value= CAM_ANTIBANDING_AUTO;
756 break;
757 default:
758 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
759 __FUNCTION__, antiBanding);
760 return BAD_VALUE;
761 }
762
763 DBG_LOGB("anti banding mode:%d", antiBanding);
764 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
765 if ( ret < 0) {
766 CAMHAL_LOGDA("failed to set anti banding mode!\n");
767 return BAD_VALUE;
768 }
769 return ret;
770}
771
772status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
773{
774 int ret = 0;
775 struct v4l2_control ctl;
776 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
777 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
778 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
779
780 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
781 return ret;
782}
783
784
785int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
786{
787 struct v4l2_queryctrl qc;
788 struct v4l2_querymenu qm;
789 int ret;
790 int mode_count = -1;
791
792 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
793 qc.id = V4L2_CID_FOCUS_AUTO;
794 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
795 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
796 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
797 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
798 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
799 }else{
800 memset(&qm, 0, sizeof(qm));
801
802 int index = 0;
803 mode_count = 1;
804 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
805
806 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
807 if (mode_count >= maxCount)
808 break;
809
810 memset(&qm, 0, sizeof(struct v4l2_querymenu));
811 qm.id = V4L2_CID_FOCUS_AUTO;
812 qm.index = index;
813 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
814 continue;
815 } else {
816 if (strcmp((char*)qm.name,"auto") == 0) {
817 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
818 mode_count++;
819 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
820 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
821 mode_count++;
822 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
823 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
824 mode_count++;
825 }
826
827 }
828 }
829 }
830
831 return mode_count;
832}
833
834status_t Sensor::setAutoFocuas(uint8_t afMode)
835{
836 struct v4l2_control ctl;
837 ctl.id = V4L2_CID_FOCUS_AUTO;
838
839 switch (afMode) {
840 case ANDROID_CONTROL_AF_MODE_AUTO:
841 ctl.value = CAM_FOCUS_MODE_AUTO;
842 break;
843 case ANDROID_CONTROL_AF_MODE_MACRO:
844 ctl.value = CAM_FOCUS_MODE_MACRO;
845 break;
846 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
847 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
848 break;
849 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
850 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
851 break;
852 default:
853 ALOGE("%s: Emulator doesn't support AF mode %d",
854 __FUNCTION__, afMode);
855 return BAD_VALUE;
856 }
857
858 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
859 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
860 return BAD_VALUE;
861 }
862
863 return OK;
864}
865
866int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
867{
868 struct v4l2_queryctrl qc;
869 struct v4l2_querymenu qm;
870 int ret;
871 int mode_count = -1;
872
873 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
874 qc.id = V4L2_CID_DO_WHITE_BALANCE;
875 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
876 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
877 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
878 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
879 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
880 }else{
881 memset(&qm, 0, sizeof(qm));
882
883 int index = 0;
884 mode_count = 1;
885 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
886
887 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
888 if (mode_count >= maxCount)
889 break;
890
891 memset(&qm, 0, sizeof(struct v4l2_querymenu));
892 qm.id = V4L2_CID_DO_WHITE_BALANCE;
893 qm.index = index;
894 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
895 continue;
896 } else {
897 if (strcmp((char*)qm.name,"auto") == 0) {
898 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
899 mode_count++;
900 } else if (strcmp((char*)qm.name,"daylight") == 0) {
901 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
902 mode_count++;
903 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
904 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
905 mode_count++;
906 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
907 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
908 mode_count++;
909 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
910 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
911 mode_count++;
912 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
913 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
914 mode_count++;
915 } else if (strcmp((char*)qm.name,"twilight") == 0) {
916 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
917 mode_count++;
918 } else if (strcmp((char*)qm.name,"shade") == 0) {
919 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
920 mode_count++;
921 }
922
923 }
924 }
925 }
926
927 return mode_count;
928}
929
930status_t Sensor::setAWB(uint8_t awbMode)
931{
932 int ret = 0;
933 struct v4l2_control ctl;
934 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
935
936 switch (awbMode) {
937 case ANDROID_CONTROL_AWB_MODE_AUTO:
938 ctl.value = CAM_WB_AUTO;
939 break;
940 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
941 ctl.value = CAM_WB_INCANDESCENCE;
942 break;
943 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
944 ctl.value = CAM_WB_FLUORESCENT;
945 break;
946 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
947 ctl.value = CAM_WB_DAYLIGHT;
948 break;
949 case ANDROID_CONTROL_AWB_MODE_SHADE:
950 ctl.value = CAM_WB_SHADE;
951 break;
952 default:
953 ALOGE("%s: Emulator doesn't support AWB mode %d",
954 __FUNCTION__, awbMode);
955 return BAD_VALUE;
956 }
957 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
958 return ret;
959}
960
961void Sensor::setExposureTime(uint64_t ns) {
962 Mutex::Autolock lock(mControlMutex);
963 ALOGVV("Exposure set to %f", ns/1000000.f);
964 mExposureTime = ns;
965}
966
967void Sensor::setFrameDuration(uint64_t ns) {
968 Mutex::Autolock lock(mControlMutex);
969 ALOGVV("Frame duration set to %f", ns/1000000.f);
970 mFrameDuration = ns;
971}
972
973void Sensor::setSensitivity(uint32_t gain) {
974 Mutex::Autolock lock(mControlMutex);
975 ALOGVV("Gain set to %d", gain);
976 mGainFactor = gain;
977}
978
979void Sensor::setDestinationBuffers(Buffers *buffers) {
980 Mutex::Autolock lock(mControlMutex);
981 mNextBuffers = buffers;
982}
983
984void Sensor::setFrameNumber(uint32_t frameNumber) {
985 Mutex::Autolock lock(mControlMutex);
986 mFrameNumber = frameNumber;
987}
988
989void Sensor::setFlushFlag(bool flushFlag) {
990 mFlushFlag = flushFlag;
991}
992
993status_t Sensor::waitForVSync(nsecs_t reltime) {
994 int res;
995 Mutex::Autolock lock(mControlMutex);
996 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
997 if (mExitSensorThread) {
998 return -1;
999 }
1000
1001 mGotVSync = false;
1002 res = mVSync.waitRelative(mControlMutex, reltime);
1003 if (res != OK && res != TIMED_OUT) {
1004 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
1005 return false;
1006 }
1007 CAMHAL_LOGVB("%s , X mControlMutex , mGotVSync = %d " , __FUNCTION__ , mGotVSync);
1008 return mGotVSync;
1009}
1010
1011status_t Sensor::waitForNewFrame(nsecs_t reltime,
1012 nsecs_t *captureTime) {
1013 Mutex::Autolock lock(mReadoutMutex);
1014 if (mExitSensorThread) {
1015 return -1;
1016 }
1017
1018 if (mCapturedBuffers == NULL) {
1019 int res;
1020 CAMHAL_LOGVB("%s , E mReadoutMutex , reltime = %d" , __FUNCTION__, reltime);
1021 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
1022 if (res == TIMED_OUT) {
1023 return false;
1024 } else if (res != OK || mCapturedBuffers == NULL) {
1025 if (mFlushFlag) {
1026 ALOGE("%s , return immediately , mWait = %d", __FUNCTION__, mWait);
1027 if (mWait) {
1028 mWait = false;
1029 *captureTime = mCaptureTime;
1030 mCapturedBuffers = NULL;
1031 mReadoutComplete.signal();
1032 } else {
1033 *captureTime = mCaptureTime;
1034 mCapturedBuffers = NULL;
1035 }
1036 return -2;
1037 } else {
1038 ALOGE("Error waiting for sensor readout signal: %d", res);
1039 return false;
1040 }
1041 }
1042 }
1043 if (mWait) {
1044 mWait = false;
1045 *captureTime = mCaptureTime;
1046 mCapturedBuffers = NULL;
1047 mReadoutComplete.signal();
1048 } else {
1049 *captureTime = mCaptureTime;
1050 mCapturedBuffers = NULL;
1051 }
1052 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1053 return true;
1054}
1055
1056Sensor::SensorListener::~SensorListener() {
1057}
1058
1059void Sensor::setSensorListener(SensorListener *listener) {
1060 Mutex::Autolock lock(mControlMutex);
1061 mListener = listener;
1062}
1063
1064status_t Sensor::readyToRun() {
1065 int res;
1066 ALOGV("Starting up sensor thread");
1067 mStartupTime = systemTime();
1068 mNextCaptureTime = 0;
1069 mNextCapturedBuffers = NULL;
1070
1071 DBG_LOGA("");
1072
1073 return OK;
1074}
1075
1076bool Sensor::threadLoop() {
1077 /**
1078 * Sensor capture operation main loop.
1079 *
1080 * Stages are out-of-order relative to a single frame's processing, but
1081 * in-order in time.
1082 */
1083
1084 if (mExitSensorThread) {
1085 return false;
1086 }
1087
1088 /**
1089 * Stage 1: Read in latest control parameters
1090 */
1091 uint64_t exposureDuration;
1092 uint64_t frameDuration;
1093 uint32_t gain;
1094 Buffers *nextBuffers;
1095 uint32_t frameNumber;
1096 SensorListener *listener = NULL;
1097 {
1098 Mutex::Autolock lock(mControlMutex);
1099 CAMHAL_LOGVB("%s , E mControlMutex" , __FUNCTION__);
1100 exposureDuration = mExposureTime;
1101 frameDuration = mFrameDuration;
1102 gain = mGainFactor;
1103 nextBuffers = mNextBuffers;
1104 frameNumber = mFrameNumber;
1105 listener = mListener;
1106 // Don't reuse a buffer set
1107 mNextBuffers = NULL;
1108
1109 // Signal VSync for start of readout
1110 ALOGVV("Sensor VSync");
1111 mGotVSync = true;
1112 mVSync.signal();
1113 }
1114
1115 /**
1116 * Stage 3: Read out latest captured image
1117 */
1118
1119 Buffers *capturedBuffers = NULL;
1120 nsecs_t captureTime = 0;
1121
1122 nsecs_t startRealTime = systemTime();
1123 // Stagefright cares about system time for timestamps, so base simulated
1124 // time on that.
1125 nsecs_t simulatedTime = startRealTime;
1126 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1127 nsecs_t frameReadoutEndRealTime = startRealTime +
1128 kRowReadoutTime * kResolution[1];
1129
1130 if (mNextCapturedBuffers != NULL) {
1131 ALOGVV("Sensor starting readout");
1132 // Pretend we're doing readout now; will signal once enough time has elapsed
1133 capturedBuffers = mNextCapturedBuffers;
1134 captureTime = mNextCaptureTime;
1135 }
1136 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1137
1138 // TODO: Move this signal to another thread to simulate readout
1139 // time properly
1140 if (capturedBuffers != NULL) {
1141 ALOGVV("Sensor readout complete");
1142 Mutex::Autolock lock(mReadoutMutex);
1143 CAMHAL_LOGVB("%s , E mReadoutMutex" , __FUNCTION__);
1144 if (mCapturedBuffers != NULL) {
1145 ALOGE("Waiting for readout thread to catch up!");
1146 mWait = true;
1147 mReadoutComplete.wait(mReadoutMutex);
1148 }
1149
1150 mCapturedBuffers = capturedBuffers;
1151 mCaptureTime = captureTime;
1152 mReadoutAvailable.signal();
1153 capturedBuffers = NULL;
1154 }
1155 CAMHAL_LOGVB("%s , X mReadoutMutex" , __FUNCTION__);
1156
1157 if (mExitSensorThread) {
1158 return false;
1159 }
1160 /**
1161 * Stage 2: Capture new image
1162 */
1163 mNextCaptureTime = simulatedTime;
1164 mNextCapturedBuffers = nextBuffers;
1165
1166 if (mNextCapturedBuffers != NULL) {
1167 if (listener != NULL) {
1168#if 0
1169 if (get_device_status(vinfo)) {
1170 listener->onSensorEvent(frameNumber, SensorListener::ERROR_CAMERA_DEVICE, mNextCaptureTime);
1171 }
1172#endif
1173 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1174 mNextCaptureTime);
1175 }
1176
1177 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1178 (float)exposureDuration/1e6, gain);
1179 mScene.setExposureDuration((float)exposureDuration/1e9);
1180 mScene.calculateScene(mNextCaptureTime);
1181
1182 if ( mSensorType == SENSOR_SHARE_FD) {
1183 captureNewImageWithGe2d();
1184 } else {
1185 captureNewImage();
1186 }
1187 mFramecount ++;
1188 }
1189
1190 if (mExitSensorThread) {
1191 return false;
1192 }
1193
1194 if (mFramecount == 100) {
1195 gettimeofday(&mTimeEnd, NULL);
1196 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1197 mCurFps = mFramecount/(interval/1000000.0f);
1198 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1199 mFramecount = 0;
1200 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1201 }
1202 ALOGVV("Sensor vertical blanking interval");
1203 nsecs_t workDoneRealTime = systemTime();
1204 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1205 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1206 timespec t;
1207 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1208 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1209
1210 int ret;
1211 do {
1212 ret = nanosleep(&t, &t);
1213 } while (ret != 0);
1214 }
1215 nsecs_t endRealTime = systemTime();
1216 ALOGVV("Frame cycle took %d ms, target %d ms",
1217 (int)((endRealTime - startRealTime)/1000000),
1218 (int)(frameDuration / 1000000));
1219 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1220 return true;
1221};
1222
1223int Sensor::captureNewImageWithGe2d() {
1224
1225 uint32_t gain = mGainFactor;
1226 mKernelPhysAddr = 0;
1227
1228
1229 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1230 usleep(5000);
1231 }
1232
1233 // Might be adding more buffers, so size isn't constant
1234 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1235 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1236 fillStream(vinfo, mKernelPhysAddr, b);
1237 }
1238 putback_frame(vinfo);
1239 mKernelPhysAddr = 0;
1240
1241 return 0;
1242
1243}
1244
1245int Sensor::captureNewImage() {
1246 bool isjpeg = false;
1247 uint32_t gain = mGainFactor;
1248 mKernelBuffer = NULL;
1249
1250 // Might be adding more buffers, so size isn't constant
1251 ALOGVV("size=%d\n", mNextCapturedBuffers->size());
1252 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1253 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1254 ALOGVV("Sensor capturing buffer %d: stream %d,"
1255 " %d x %d, format %x, stride %d, buf %p, img %p",
1256 i, b.streamId, b.width, b.height, b.format, b.stride,
1257 b.buffer, b.img);
1258 switch (b.format) {
1259#if PLATFORM_SDK_VERSION <= 22
1260 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1261 captureRaw(b.img, gain, b.stride);
1262 break;
1263#endif
1264 case HAL_PIXEL_FORMAT_RGB_888:
1265 captureRGB(b.img, gain, b.stride);
1266 break;
1267 case HAL_PIXEL_FORMAT_RGBA_8888:
1268 captureRGBA(b.img, gain, b.stride);
1269 break;
1270 case HAL_PIXEL_FORMAT_BLOB:
1271 // Add auxillary buffer of the right size
1272 // Assumes only one BLOB (JPEG) buffer in
1273 // mNextCapturedBuffers
1274 StreamBuffer bAux;
1275 int orientation;
1276 orientation = getPictureRotate();
1277 ALOGD("bAux orientation=%d",orientation);
1278 uint32_t pixelfmt;
1279 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1280 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1281
1282 pixelfmt = getOutputFormat();
1283 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1284 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1285 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1286 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1287 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1288 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1289 } else {
1290 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1291 }
1292 } else {
1293 isjpeg = true;
1294 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1295 }
1296
1297 if (!msupportrotate) {
1298 bAux.streamId = 0;
1299 bAux.width = b.width;
1300 bAux.height = b.height;
1301 bAux.format = pixelfmt;
1302 bAux.stride = b.width;
1303 bAux.buffer = NULL;
1304 } else {
1305 if ((orientation == 90) || (orientation == 270)) {
1306 bAux.streamId = 0;
1307 bAux.width = b.height;
1308 bAux.height = b.width;
1309 bAux.format = pixelfmt;
1310 bAux.stride = b.height;
1311 bAux.buffer = NULL;
1312 } else {
1313 bAux.streamId = 0;
1314 bAux.width = b.width;
1315 bAux.height = b.height;
1316 bAux.format = pixelfmt;
1317 bAux.stride = b.width;
1318 bAux.buffer = NULL;
1319 }
1320 }
1321 // TODO: Reuse these
1322 bAux.img = new uint8_t[b.width * b.height * 3];
1323 mNextCapturedBuffers->push_back(bAux);
1324 break;
1325 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1326 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1327 captureNV21(b, gain);
1328 break;
1329 case HAL_PIXEL_FORMAT_YV12:
1330 captureYV12(b, gain);
1331 break;
1332 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1333 captureYUYV(b.img, gain, b.stride);
1334 break;
1335 default:
1336 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1337 b.format);
1338 break;
1339 }
1340 }
1341 if ((!isjpeg)&&(mKernelBuffer)) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1342 // whose buffer putback separately.
1343 putback_frame(vinfo);
1344 }
1345 mKernelBuffer = NULL;
1346
1347 return 0;
1348}
1349
1350int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1351 int res;
1352 int i, j, k, START;
1353 int count = 0;
1354 int pixelfmt;
1355 struct v4l2_frmsizeenum frmsize;
1356 char property[PROPERTY_VALUE_MAX];
1357 unsigned int support_w,support_h;
1358
1359 support_w = 10000;
1360 support_h = 10000;
1361 memset(property, 0, sizeof(property));
1362 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1363 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1364 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1365 support_w = 10000;
1366 support_h = 10000;
1367 }
1368 }
1369
1370 memset(&frmsize,0,sizeof(frmsize));
1371 frmsize.pixel_format = getOutputFormat();
1372
1373 START = 0;
1374 for (i = 0; ; i++) {
1375 frmsize.index = i;
1376 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1377 if (res < 0){
1378 DBG_LOGB("index=%d, break\n", i);
1379 break;
1380 }
1381
1382 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1383
1384 if (0 != (frmsize.discrete.width%16))
1385 continue;
1386
1387 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1388 continue;
1389
1390 if (count >= size)
1391 break;
1392
1393 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1394 picSizes[count+1] = frmsize.discrete.width;
1395 picSizes[count+2] = frmsize.discrete.height;
1396 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1397
1398 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1399 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1400 if (0 == i) {
1401 count += 4;
1402 continue;
1403 }
1404
1405 for (k = count; k > START; k -= 4) {
1406 if (frmsize.discrete.width * frmsize.discrete.height >
1407 picSizes[k - 3] * picSizes[k - 2]) {
1408 picSizes[k + 1] = picSizes[k - 3];
1409 picSizes[k + 2] = picSizes[k - 2];
1410
1411 } else {
1412 break;
1413 }
1414 }
1415 picSizes[k + 1] = frmsize.discrete.width;
1416 picSizes[k + 2] = frmsize.discrete.height;
1417
1418 count += 4;
1419 }
1420 }
1421
1422 START = count;
1423 for (i = 0; ; i++) {
1424 frmsize.index = i;
1425 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1426 if (res < 0){
1427 DBG_LOGB("index=%d, break\n", i);
1428 break;
1429 }
1430
1431 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1432
1433 if (0 != (frmsize.discrete.width%16))
1434 continue;
1435
1436 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1437 continue;
1438
1439 if (count >= size)
1440 break;
1441
1442 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1443 picSizes[count+1] = frmsize.discrete.width;
1444 picSizes[count+2] = frmsize.discrete.height;
1445 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1446
1447 DBG_LOGB("get output width=%d, height=%d, format =\
1448 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1449 frmsize.discrete.height);
1450 if (0 == i) {
1451 count += 4;
1452 continue;
1453 }
1454
1455 for (k = count; k > START; k -= 4) {
1456 if (frmsize.discrete.width * frmsize.discrete.height >
1457 picSizes[k - 3] * picSizes[k - 2]) {
1458 picSizes[k + 1] = picSizes[k - 3];
1459 picSizes[k + 2] = picSizes[k - 2];
1460
1461 } else {
1462 break;
1463 }
1464 }
1465 picSizes[k + 1] = frmsize.discrete.width;
1466 picSizes[k + 2] = frmsize.discrete.height;
1467
1468 count += 4;
1469 }
1470 }
1471
1472#if 0
1473 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1474 START = count;
1475 for (i = 0; ; i++) {
1476 frmsize.index = i;
1477 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1478 if (res < 0){
1479 DBG_LOGB("index=%d, break\n", i);
1480 break;
1481 }
1482
1483 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1484
1485 if (0 != (frmsize.discrete.width%16))
1486 continue;
1487
1488 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1489 continue;
1490
1491 if (count >= size)
1492 break;
1493
1494 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1495 picSizes[count+1] = frmsize.discrete.width;
1496 picSizes[count+2] = frmsize.discrete.height;
1497 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1498
1499 DBG_LOGB("get output width=%d, height=%d, format =\
1500 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1501 frmsize.discrete.height);
1502 if (0 == i) {
1503 count += 4;
1504 continue;
1505 }
1506
1507 for (k = count; k > START; k -= 4) {
1508 if (frmsize.discrete.width * frmsize.discrete.height >
1509 picSizes[k - 3] * picSizes[k - 2]) {
1510 picSizes[k + 1] = picSizes[k - 3];
1511 picSizes[k + 2] = picSizes[k - 2];
1512
1513 } else {
1514 break;
1515 }
1516 }
1517 picSizes[k + 1] = frmsize.discrete.width;
1518 picSizes[k + 2] = frmsize.discrete.height;
1519
1520 count += 4;
1521 }
1522 }
1523 }
1524#endif
1525
1526 uint32_t jpgSrcfmt[] = {
1527 V4L2_PIX_FMT_RGB24,
1528 V4L2_PIX_FMT_MJPEG,
1529 V4L2_PIX_FMT_YUYV,
1530 };
1531
1532 START = count;
1533 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1534 memset(&frmsize,0,sizeof(frmsize));
1535 frmsize.pixel_format = jpgSrcfmt[j];
1536
1537 for (i = 0; ; i++) {
1538 frmsize.index = i;
1539 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1540 if (res < 0){
1541 DBG_LOGB("index=%d, break\n", i);
1542 break;
1543 }
1544
1545 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1546
1547 if (0 != (frmsize.discrete.width%16))
1548 continue;
1549
1550 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1551 // continue;
1552
1553 if (count >= size)
1554 break;
1555
1556 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1557 if (!IsUsbAvailablePictureSize(kUsbAvailablePictureSize, frmsize.discrete.width, frmsize.discrete.height))
1558 continue;
1559 }
1560
1561 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1562 picSizes[count+1] = frmsize.discrete.width;
1563 picSizes[count+2] = frmsize.discrete.height;
1564 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1565
1566 if (0 == i) {
1567 count += 4;
1568 continue;
1569 }
1570
1571 //TODO insert in descend order
1572 for (k = count; k > START; k -= 4) {
1573 if (frmsize.discrete.width * frmsize.discrete.height >
1574 picSizes[k - 3] * picSizes[k - 2]) {
1575 picSizes[k + 1] = picSizes[k - 3];
1576 picSizes[k + 2] = picSizes[k - 2];
1577
1578 } else {
1579 break;
1580 }
1581 }
1582
1583 picSizes[k + 1] = frmsize.discrete.width;
1584 picSizes[k + 2] = frmsize.discrete.height;
1585
1586 count += 4;
1587 }
1588 }
1589
1590 if (frmsize.index > 0)
1591 break;
1592 }
1593
1594 if (frmsize.index == 0)
1595 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1596
1597 return count;
1598
1599}
1600
1601int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1602{
1603 int ret=0; int framerate=0; int temp_rate=0;
1604 struct v4l2_frmivalenum fival;
1605 int i,j=0;
1606 int count = 0;
1607 int tmp_size = size;
1608 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1609 int pixelfmt_tbl[] = {
1610 V4L2_PIX_FMT_MJPEG,
1611 V4L2_PIX_FMT_YVU420,
1612 V4L2_PIX_FMT_NV21,
1613 V4L2_PIX_FMT_RGB24,
1614 V4L2_PIX_FMT_YUYV,
1615 //V4L2_PIX_FMT_YVU420
1616 };
1617
1618 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1619 {
1620 /* we got all duration for each resolution for prev format*/
1621 if (count >= tmp_size)
1622 break;
1623
1624 for( ; size > 0; size-=4)
1625 {
1626 memset(&fival, 0, sizeof(fival));
1627
1628 for (fival.index = 0;;fival.index++)
1629 {
1630 fival.pixel_format = pixelfmt_tbl[i];
1631 fival.width = picSizes[size-3];
1632 fival.height = picSizes[size-2];
1633 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1634 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1635 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1636 if(framerate < temp_rate)
1637 framerate = temp_rate;
1638 duration[count+0] = (int64_t)(picSizes[size-4]);
1639 duration[count+1] = (int64_t)(picSizes[size-3]);
1640 duration[count+2] = (int64_t)(picSizes[size-2]);
1641 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1642 j++;
1643 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1644 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1645 if(framerate < temp_rate)
1646 framerate = temp_rate;
1647 duration[count+0] = (int64_t)picSizes[size-4];
1648 duration[count+1] = (int64_t)picSizes[size-3];
1649 duration[count+2] = (int64_t)picSizes[size-2];
1650 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1651 j++;
1652 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1653 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1654 if(framerate < temp_rate)
1655 framerate = temp_rate;
1656 duration[count+0] = (int64_t)picSizes[size-4];
1657 duration[count+1] = (int64_t)picSizes[size-3];
1658 duration[count+2] = (int64_t)picSizes[size-2];
1659 duration[count+3] = (int64_t)((1.0/framerate) * 1000000000);
1660 j++;
1661 }
1662 } else {
1663 if (j > 0) {
1664 if (count >= tmp_size)
1665 break;
1666 duration[count+0] = (int64_t)(picSizes[size-4]);
1667 duration[count+1] = (int64_t)(picSizes[size-3]);
1668 duration[count+2] = (int64_t)(picSizes[size-2]);
1669 if (framerate == 5) {
1670 duration[count+3] = (int64_t)200000000L;
1671 } else if (framerate == 10) {
1672 duration[count+3] = (int64_t)100000000L;
1673 } else if (framerate == 15) {
1674 duration[count+3] = (int64_t)66666666L;
1675 } else if (framerate == 30) {
1676 duration[count+3] = (int64_t)33333333L;
1677 } else {
1678 duration[count+3] = (int64_t)66666666L;
1679 }
1680 count += 4;
1681 break;
1682 } else {
1683 break;
1684 }
1685 }
1686 }
1687 j=0;
1688 }
1689 size = tmp_size;
1690 }
1691
1692 return count;
1693
1694}
1695
1696int64_t Sensor::getMinFrameDuration()
1697{
1698 int64_t tmpDuration = 66666666L; // 1/15 s
1699 int64_t frameDuration = 66666666L; // 1/15 s
1700 struct v4l2_frmivalenum fival;
1701 int i,j;
1702
1703 uint32_t pixelfmt_tbl[]={
1704 V4L2_PIX_FMT_MJPEG,
1705 V4L2_PIX_FMT_YUYV,
1706 V4L2_PIX_FMT_NV21,
1707 };
1708 struct v4l2_frmsize_discrete resolution_tbl[]={
1709 {1920, 1080},
1710 {1280, 960},
1711 {640, 480},
1712 {320, 240},
1713 };
1714
1715 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1716 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1717 memset(&fival, 0, sizeof(fival));
1718 fival.index = 0;
1719 fival.pixel_format = pixelfmt_tbl[i];
1720 fival.width = resolution_tbl[j].width;
1721 fival.height = resolution_tbl[j].height;
1722
1723 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1724 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1725 tmpDuration =
1726 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1727
1728 if (frameDuration > tmpDuration)
1729 frameDuration = tmpDuration;
1730 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1731 frameDuration =
1732 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1733 break;
1734 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1735 frameDuration =
1736 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1737 break;
1738 }
1739 fival.index++;
1740 }
1741 }
1742
1743 if (fival.index > 0) {
1744 break;
1745 }
1746 }
1747
1748 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1749 return frameDuration;
1750}
1751
1752int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1753 int res;
1754 int i;
1755 int count = 0;
1756 struct v4l2_frmsizeenum frmsize;
1757 char property[PROPERTY_VALUE_MAX];
1758 unsigned int support_w,support_h;
1759 int preview_fmt;
1760
1761 support_w = 10000;
1762 support_h = 10000;
1763 memset(property, 0, sizeof(property));
1764 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1765 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1766 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1767 support_w = 10000;
1768 support_h = 10000;
1769 }
1770 }
1771
1772
1773 memset(&frmsize,0,sizeof(frmsize));
1774 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1775
1776 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1777 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1778 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1779 if (preview == true)
1780 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1781 else
1782 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1783 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1784 if (preview == true)
1785 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1786 else
1787 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1788 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1789 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1790
1791 for (i = 0; ; i++) {
1792 frmsize.index = i;
1793 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1794 if (res < 0){
1795 DBG_LOGB("index=%d, break\n", i);
1796 break;
1797 }
1798
1799
1800 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1801
1802 if (0 != (frmsize.discrete.width%16))
1803 continue;
1804
1805 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1806 continue;
1807
1808 if (count >= size)
1809 break;
1810
1811 picSizes[count] = frmsize.discrete.width;
1812 picSizes[count+1] = frmsize.discrete.height;
1813
1814 if (0 == i) {
1815 count += 2;
1816 continue;
1817 }
1818
1819 //TODO insert in descend order
1820 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1821 picSizes[count + 0] = picSizes[count - 2];
1822 picSizes[count + 1] = picSizes[count - 1];
1823
1824 picSizes[count - 2] = frmsize.discrete.width;
1825 picSizes[count - 1] = frmsize.discrete.height;
1826 }
1827
1828 count += 2;
1829 }
1830 }
1831
1832 return count;
1833
1834}
1835
1836void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1837 float totalGain = gain/100.0 * kBaseGainFactor;
1838 float noiseVarGain = totalGain * totalGain;
1839 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1840 + kReadNoiseVarAfterGain;
1841
1842 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1843 mScene.setReadoutPixel(0,0);
1844 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1845 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1846 uint16_t *px = (uint16_t*)img + y * stride;
1847 for (unsigned int x = 0; x < kResolution[0]; x++) {
1848 uint32_t electronCount;
1849 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1850
1851 // TODO: Better pixel saturation curve?
1852 electronCount = (electronCount < kSaturationElectrons) ?
1853 electronCount : kSaturationElectrons;
1854
1855 // TODO: Better A/D saturation curve?
1856 uint16_t rawCount = electronCount * totalGain;
1857 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1858
1859 // Calculate noise value
1860 // TODO: Use more-correct Gaussian instead of uniform noise
1861 float photonNoiseVar = electronCount * noiseVarGain;
1862 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1863 // Scaled to roughly match gaussian/uniform noise stddev
1864 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1865
1866 rawCount += kBlackLevel;
1867 rawCount += noiseStddev * noiseSample;
1868
1869 *px++ = rawCount;
1870 }
1871 // TODO: Handle this better
1872 //simulatedTime += kRowReadoutTime;
1873 }
1874 ALOGVV("Raw sensor image captured");
1875}
1876
1877void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1878 float totalGain = gain/100.0 * kBaseGainFactor;
1879 // In fixed-point math, calculate total scaling from electrons to 8bpp
1880 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1881 uint32_t inc = kResolution[0] / stride;
1882
1883 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1884 uint8_t *px = img + outY * stride * 4;
1885 mScene.setReadoutPixel(0, y);
1886 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1887 uint32_t rCount, gCount, bCount;
1888 // TODO: Perfect demosaicing is a cheat
1889 const uint32_t *pixel = mScene.getPixelElectrons();
1890 rCount = pixel[Scene::R] * scale64x;
1891 gCount = pixel[Scene::Gr] * scale64x;
1892 bCount = pixel[Scene::B] * scale64x;
1893
1894 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1895 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1896 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1897 *px++ = 255;
1898 for (unsigned int j = 1; j < inc; j++)
1899 mScene.getPixelElectrons();
1900 }
1901 // TODO: Handle this better
1902 //simulatedTime += kRowReadoutTime;
1903 }
1904 ALOGVV("RGBA sensor image captured");
1905}
1906
1907void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1908#if 0
1909 float totalGain = gain/100.0 * kBaseGainFactor;
1910 // In fixed-point math, calculate total scaling from electrons to 8bpp
1911 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1912 uint32_t inc = kResolution[0] / stride;
1913
1914 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1915 mScene.setReadoutPixel(0, y);
1916 uint8_t *px = img + outY * stride * 3;
1917 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1918 uint32_t rCount, gCount, bCount;
1919 // TODO: Perfect demosaicing is a cheat
1920 const uint32_t *pixel = mScene.getPixelElectrons();
1921 rCount = pixel[Scene::R] * scale64x;
1922 gCount = pixel[Scene::Gr] * scale64x;
1923 bCount = pixel[Scene::B] * scale64x;
1924
1925 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1926 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1927 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1928 for (unsigned int j = 1; j < inc; j++)
1929 mScene.getPixelElectrons();
1930 }
1931 // TODO: Handle this better
1932 //simulatedTime += kRowReadoutTime;
1933 }
1934#else
1935 uint8_t *src = NULL;
1936 int ret = 0, rotate = 0;
1937 uint32_t width = 0, height = 0;
1938 int dqTryNum = 3;
1939
1940 rotate = getPictureRotate();
1941 width = vinfo->picture.format.fmt.pix.width;
1942 height = vinfo->picture.format.fmt.pix.height;
1943
1944 if (mSensorType == SENSOR_USB) {
1945 releasebuf_and_stop_capturing(vinfo);
1946 } else {
1947 stop_capturing(vinfo);
1948 }
1949
1950 ret = start_picture(vinfo,rotate);
1951 if (ret < 0)
1952 {
1953 ALOGD("start picture failed!");
1954 }
1955 while(1)
1956 {
1957 src = (uint8_t *)get_picture(vinfo);
1958 if ((NULL != src) && (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)) {
1959 while (dqTryNum > 0) {
1960 if (NULL != src) {
1961 putback_picture_frame(vinfo);
1962 }
1963 usleep(10000);
1964 dqTryNum --;
1965 src = (uint8_t *)get_picture(vinfo);
1966 }
1967 }
1968
1969 if (NULL != src) {
1970 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
1971 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1972 if ( tmp_buffer == NULL) {
1973 ALOGE("new buffer failed!\n");
1974 return;
1975 }
1976 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
1977 width, tmp_buffer + width * height, (width + 1) / 2, width,
1978 height, width, height, libyuv::FOURCC_MJPG) != 0) {
1979 DBG_LOGA("Decode MJPEG frame failed\n");
1980 putback_picture_frame(vinfo);
1981 usleep(5000);
1982 } else {
1983 nv21_to_rgb24(tmp_buffer,img,width,height);
1984 if (tmp_buffer != NULL)
1985 delete [] tmp_buffer;
1986 break;
1987 }
1988 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1989 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
1990 yuyv422_to_rgb24(src,img,width,height);
1991 break;
1992 } else {
1993 putback_picture_frame(vinfo);
1994 usleep(5000);
1995 }
1996 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
1997 if (vinfo->picture.buf.length == width * height * 3) {
1998 memcpy(img, src, vinfo->picture.buf.length);
1999 } else {
2000 rgb24_memcpy(img, src, width, height);
2001 }
2002 break;
2003 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2004 memcpy(img, src, vinfo->picture.buf.length);
2005 break;
2006 }
2007 }
2008 }
2009 ALOGD("get picture success !");
2010
2011 if (mSensorType == SENSOR_USB) {
2012 releasebuf_and_stop_picture(vinfo);
2013 } else {
2014 stop_picture(vinfo);
2015 }
2016
2017#endif
2018}
2019
2020void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
2021{
2022 for (int i = 0; i < width * height * 2; i += 2) {
2023 *dst++ = *(src + i);
2024 }
2025
2026 for (int y = 0; y < height - 1; y +=2) {
2027 for (int j = 0; j < width * 2; j += 4) {
2028 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
2029 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
2030 }
2031 src += width * 2 * 2;
2032 }
2033
2034 if (height & 1)
2035 for (int j = 0; j < width * 2; j += 4) {
2036 *dst++ = *(src + 3 + j); //v
2037 *dst++ = *(src + 1 + j); //u
2038 }
2039}
2040
2041void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
2042{
2043 //width should be an even number.
2044 //uv ALIGN 32.
2045 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
2046 unsigned char *dst_copy,*src_copy;
2047
2048 dst_copy = dst;
2049 src_copy = src;
2050
2051 y_size = width*height;
2052 c_stride = ALIGN(width/2, 16);
2053 c_size = c_stride * height/2;
2054 cr_offset = y_size;
2055 cb_offset = y_size+c_size;
2056
2057 for(i=0;i< y_size;i++){
2058 *dst++ = *src;
2059 src += 2;
2060 }
2061
2062 dst = dst_copy;
2063 src = src_copy;
2064
2065 for(i=0;i<height;i+=2){
2066 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
2067 //ceil(u1+u2)/2
2068 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
2069 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
2070 }
2071 dst += c_stride;
2072 src += width*4;
2073 }
2074}
2075
2076status_t Sensor::force_reset_sensor() {
2077 DBG_LOGA("force_reset_sensor");
2078 status_t ret;
2079 mTimeOutCount = 0;
2080 ret = streamOff();
2081 ret = setBuffersFormat(vinfo);
2082 ret = streamOn();
2083 DBG_LOGB("%s , ret = %d", __FUNCTION__, ret);
2084 return ret;
2085}
2086
2087void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
2088#if 0
2089 float totalGain = gain/100.0 * kBaseGainFactor;
2090 // Using fixed-point math with 6 bits of fractional precision.
2091 // In fixed-point math, calculate total scaling from electrons to 8bpp
2092 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2093 // In fixed-point math, saturation point of sensor after gain
2094 const int saturationPoint = 64 * 255;
2095 // Fixed-point coefficients for RGB-YUV transform
2096 // Based on JFIF RGB->YUV transform.
2097 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2098 const int rgbToY[] = {19, 37, 7};
2099 const int rgbToCb[] = {-10,-21, 32, 524288};
2100 const int rgbToCr[] = {32,-26, -5, 524288};
2101 // Scale back to 8bpp non-fixed-point
2102 const int scaleOut = 64;
2103 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2104
2105 uint32_t inc = kResolution[0] / stride;
2106 uint32_t outH = kResolution[1] / inc;
2107 for (unsigned int y = 0, outY = 0;
2108 y < kResolution[1]; y+=inc, outY++) {
2109 uint8_t *pxY = img + outY * stride;
2110 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2111 mScene.setReadoutPixel(0,y);
2112 for (unsigned int outX = 0; outX < stride; outX++) {
2113 int32_t rCount, gCount, bCount;
2114 // TODO: Perfect demosaicing is a cheat
2115 const uint32_t *pixel = mScene.getPixelElectrons();
2116 rCount = pixel[Scene::R] * scale64x;
2117 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2118 gCount = pixel[Scene::Gr] * scale64x;
2119 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2120 bCount = pixel[Scene::B] * scale64x;
2121 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2122
2123 *pxY++ = (rgbToY[0] * rCount +
2124 rgbToY[1] * gCount +
2125 rgbToY[2] * bCount) / scaleOutSq;
2126 if (outY % 2 == 0 && outX % 2 == 0) {
2127 *pxVU++ = (rgbToCr[0] * rCount +
2128 rgbToCr[1] * gCount +
2129 rgbToCr[2] * bCount +
2130 rgbToCr[3]) / scaleOutSq;
2131 *pxVU++ = (rgbToCb[0] * rCount +
2132 rgbToCb[1] * gCount +
2133 rgbToCb[2] * bCount +
2134 rgbToCb[3]) / scaleOutSq;
2135 }
2136 for (unsigned int j = 1; j < inc; j++)
2137 mScene.getPixelElectrons();
2138 }
2139 }
2140#else
2141 uint8_t *src;
2142
2143 if (mKernelBuffer) {
2144 src = mKernelBuffer;
2145 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2146 uint32_t width = vinfo->preview.format.fmt.pix.width;
2147 uint32_t height = vinfo->preview.format.fmt.pix.height;
2148 if ((width == b.width) && (height == b.height)) {
2149 memcpy(b.img, src, b.width * b.height * 3/2);
2150 } else {
2151 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2152 }
2153 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2154 uint32_t width = vinfo->preview.format.fmt.pix.width;
2155 uint32_t height = vinfo->preview.format.fmt.pix.height;
2156
2157 if ((width == b.width) && (height == b.height)) {
2158 memcpy(b.img, src, b.width * b.height * 3/2);
2159 } else {
2160 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2161 }
2162 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2163 uint32_t width = vinfo->preview.format.fmt.pix.width;
2164 uint32_t height = vinfo->preview.format.fmt.pix.height;
2165
2166 if ((width == b.width) && (height == b.height)) {
2167 memcpy(b.img, src, b.width * b.height * 3/2);
2168 } else {
2169 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2170 }
2171 } else {
2172 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2173 }
2174 return ;
2175 }
2176 while(1){
2177 if (mFlushFlag) {
2178 break;
2179 }
2180
2181 if (mExitSensorThread) {
2182 break;
2183 }
2184
2185 src = (uint8_t *)get_frame(vinfo);
2186 if (NULL == src) {
2187 if (get_device_status(vinfo)) {
2188 break;
2189 }
2190 ALOGVV("get frame NULL, sleep 5ms");
2191 usleep(5000);
2192 mTimeOutCount++;
2193 if (mTimeOutCount > 300) {
2194 force_reset_sensor();
2195 }
2196 continue;
2197 }
2198 mTimeOutCount = 0;
2199 if (mSensorType == SENSOR_USB) {
2200 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2201 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2202 DBG_LOGB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2203 putback_frame(vinfo);
2204 continue;
2205 }
2206 }
2207 }
2208 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2209 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2210 memcpy(b.img, src, vinfo->preview.buf.length);
2211 } else {
2212 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2213 }
2214 mKernelBuffer = b.img;
2215 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2216 uint32_t width = vinfo->preview.format.fmt.pix.width;
2217 uint32_t height = vinfo->preview.format.fmt.pix.height;
2218 memset(mTemp_buffer, 0 , width * height * 3/2);
2219 YUYVToNV21(src, mTemp_buffer, width, height);
2220 if ((width == b.width) && (height == b.height)) {
2221 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2222 mKernelBuffer = b.img;
2223 } else {
2224 if ((b.height % 2) != 0) {
2225 DBG_LOGB("%d , b.height = %d", __LINE__, b.height);
2226 b.height = b.height - 1;
2227 }
2228 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2229 mKernelBuffer = mTemp_buffer;
2230 }
2231 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2232 uint32_t width = vinfo->preview.format.fmt.pix.width;
2233 uint32_t height = vinfo->preview.format.fmt.pix.height;
2234 memset(mTemp_buffer, 0 , width * height * 3/2);
2235 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, mTemp_buffer,
2236 width, mTemp_buffer + width * height, (width + 1) / 2, width,
2237 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2238 putback_frame(vinfo);
2239 ALOGE("%s , %d , Decode MJPEG frame failed \n", __FUNCTION__ , __LINE__);
2240 continue;
2241 }
2242 if ((width == b.width) && (height == b.height)) {
2243 memcpy(b.img, mTemp_buffer, b.width * b.height * 3/2);
2244 mKernelBuffer = b.img;
2245 } else {
2246 if ((b.height % 2) != 0) {
2247 DBG_LOGB("%d, b.height = %d", __LINE__, b.height);
2248 b.height = b.height - 1;
2249 }
2250 ReSizeNV21(vinfo, mTemp_buffer, b.img, b.width, b.height);
2251 mKernelBuffer = mTemp_buffer;
2252 }
2253 }
2254
2255 break;
2256 }
2257#endif
2258
2259 ALOGVV("NV21 sensor image captured");
2260}
2261
2262void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2263#if 0
2264 float totalGain = gain/100.0 * kBaseGainFactor;
2265 // Using fixed-point math with 6 bits of fractional precision.
2266 // In fixed-point math, calculate total scaling from electrons to 8bpp
2267 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2268 // In fixed-point math, saturation point of sensor after gain
2269 const int saturationPoint = 64 * 255;
2270 // Fixed-point coefficients for RGB-YUV transform
2271 // Based on JFIF RGB->YUV transform.
2272 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2273 const int rgbToY[] = {19, 37, 7};
2274 const int rgbToCb[] = {-10,-21, 32, 524288};
2275 const int rgbToCr[] = {32,-26, -5, 524288};
2276 // Scale back to 8bpp non-fixed-point
2277 const int scaleOut = 64;
2278 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2279
2280 uint32_t inc = kResolution[0] / stride;
2281 uint32_t outH = kResolution[1] / inc;
2282 for (unsigned int y = 0, outY = 0;
2283 y < kResolution[1]; y+=inc, outY++) {
2284 uint8_t *pxY = img + outY * stride;
2285 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2286 mScene.setReadoutPixel(0,y);
2287 for (unsigned int outX = 0; outX < stride; outX++) {
2288 int32_t rCount, gCount, bCount;
2289 // TODO: Perfect demosaicing is a cheat
2290 const uint32_t *pixel = mScene.getPixelElectrons();
2291 rCount = pixel[Scene::R] * scale64x;
2292 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2293 gCount = pixel[Scene::Gr] * scale64x;
2294 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2295 bCount = pixel[Scene::B] * scale64x;
2296 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2297
2298 *pxY++ = (rgbToY[0] * rCount +
2299 rgbToY[1] * gCount +
2300 rgbToY[2] * bCount) / scaleOutSq;
2301 if (outY % 2 == 0 && outX % 2 == 0) {
2302 *pxVU++ = (rgbToCr[0] * rCount +
2303 rgbToCr[1] * gCount +
2304 rgbToCr[2] * bCount +
2305 rgbToCr[3]) / scaleOutSq;
2306 *pxVU++ = (rgbToCb[0] * rCount +
2307 rgbToCb[1] * gCount +
2308 rgbToCb[2] * bCount +
2309 rgbToCb[3]) / scaleOutSq;
2310 }
2311 for (unsigned int j = 1; j < inc; j++)
2312 mScene.getPixelElectrons();
2313 }
2314 }
2315#else
2316 uint8_t *src;
2317 if (mKernelBuffer) {
2318 src = mKernelBuffer;
2319 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2320 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2321 ALOGI("Sclale YV12 frame down \n");
2322
2323 int width = vinfo->preview.format.fmt.pix.width;
2324 int height = vinfo->preview.format.fmt.pix.height;
2325 int ret = libyuv::I420Scale(src, width,
2326 src + width * height, width / 2,
2327 src + width * height + width * height / 4, width / 2,
2328 width, height,
2329 b.img, b.width,
2330 b.img + b.width * b.height, b.width / 2,
2331 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2332 b.width, b.height,
2333 libyuv::kFilterNone);
2334 if (ret < 0)
2335 ALOGE("Sclale YV12 frame down failed!\n");
2336 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2337 int width = vinfo->preview.format.fmt.pix.width;
2338 int height = vinfo->preview.format.fmt.pix.height;
2339 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2340
2341 if ( tmp_buffer == NULL) {
2342 ALOGE("new buffer failed!\n");
2343 return;
2344 }
2345
2346 YUYVToYV12(src, tmp_buffer, width, height);
2347
2348 int ret = libyuv::I420Scale(tmp_buffer, width,
2349 tmp_buffer + width * height, width / 2,
2350 tmp_buffer + width * height + width * height / 4, width / 2,
2351 width, height,
2352 b.img, b.width,
2353 b.img + b.width * b.height, b.width / 2,
2354 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2355 b.width, b.height,
2356 libyuv::kFilterNone);
2357 if (ret < 0)
2358 ALOGE("Sclale YV12 frame down failed!\n");
2359 delete [] tmp_buffer;
2360 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2361 int width = vinfo->preview.format.fmt.pix.width;
2362 int height = vinfo->preview.format.fmt.pix.height;
2363 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2364
2365 if ( tmp_buffer == NULL) {
2366 ALOGE("new buffer failed!\n");
2367 return;
2368 }
2369
2370 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2371 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2372 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2373 DBG_LOGA("Decode MJPEG frame failed\n");
2374 }
2375
2376 int ret = libyuv::I420Scale(tmp_buffer, width,
2377 tmp_buffer + width * height, width / 2,
2378 tmp_buffer + width * height + width * height / 4, width / 2,
2379 width, height,
2380 b.img, b.width,
2381 b.img + b.width * b.height, b.width / 2,
2382 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2383 b.width, b.height,
2384 libyuv::kFilterNone);
2385 if (ret < 0)
2386 ALOGE("Sclale YV12 frame down failed!\n");
2387
2388 delete [] tmp_buffer;
2389 } else {
2390 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2391 }
2392 return ;
2393 }
2394 while(1){
2395 if (mFlushFlag) {
2396 break;
2397 }
2398 if (mExitSensorThread) {
2399 break;
2400 }
2401 src = (uint8_t *)get_frame(vinfo);
2402
2403 if (NULL == src) {
2404 if (get_device_status(vinfo)) {
2405 break;
2406 }
2407 ALOGVV("get frame NULL, sleep 5ms");
2408 usleep(5000);
2409 mTimeOutCount++;
2410 if (mTimeOutCount > 300) {
2411 force_reset_sensor();
2412 }
2413 continue;
2414 }
2415 mTimeOutCount = 0;
2416 if (mSensorType == SENSOR_USB) {
2417 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2418 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2419 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2420 putback_frame(vinfo);
2421 continue;
2422 }
2423 }
2424 }
2425 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2426 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2427 memcpy(b.img, src, vinfo->preview.buf.length);
2428 } else {
2429 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2430 }
2431 mKernelBuffer = b.img;
2432 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2433 int width = vinfo->preview.format.fmt.pix.width;
2434 int height = vinfo->preview.format.fmt.pix.height;
2435 YUYVToYV12(src, b.img, width, height);
2436 mKernelBuffer = b.img;
2437 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2438 int width = vinfo->preview.format.fmt.pix.width;
2439 int height = vinfo->preview.format.fmt.pix.height;
2440 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2441 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2442 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2443 putback_frame(vinfo);
2444 DBG_LOGA("Decode MJPEG frame failed\n");
2445 continue;
2446 }
2447 mKernelBuffer = b.img;
2448 } else {
2449 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2450 }
2451
2452 break;
2453 }
2454#endif
2455 //mKernelBuffer = src;
2456 ALOGVV("YV12 sensor image captured");
2457}
2458
2459void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2460#if 0
2461 float totalGain = gain/100.0 * kBaseGainFactor;
2462 // Using fixed-point math with 6 bits of fractional precision.
2463 // In fixed-point math, calculate total scaling from electrons to 8bpp
2464 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2465 // In fixed-point math, saturation point of sensor after gain
2466 const int saturationPoint = 64 * 255;
2467 // Fixed-point coefficients for RGB-YUV transform
2468 // Based on JFIF RGB->YUV transform.
2469 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2470 const int rgbToY[] = {19, 37, 7};
2471 const int rgbToCb[] = {-10,-21, 32, 524288};
2472 const int rgbToCr[] = {32,-26, -5, 524288};
2473 // Scale back to 8bpp non-fixed-point
2474 const int scaleOut = 64;
2475 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2476
2477 uint32_t inc = kResolution[0] / stride;
2478 uint32_t outH = kResolution[1] / inc;
2479 for (unsigned int y = 0, outY = 0;
2480 y < kResolution[1]; y+=inc, outY++) {
2481 uint8_t *pxY = img + outY * stride;
2482 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2483 mScene.setReadoutPixel(0,y);
2484 for (unsigned int outX = 0; outX < stride; outX++) {
2485 int32_t rCount, gCount, bCount;
2486 // TODO: Perfect demosaicing is a cheat
2487 const uint32_t *pixel = mScene.getPixelElectrons();
2488 rCount = pixel[Scene::R] * scale64x;
2489 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2490 gCount = pixel[Scene::Gr] * scale64x;
2491 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2492 bCount = pixel[Scene::B] * scale64x;
2493 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2494
2495 *pxY++ = (rgbToY[0] * rCount +
2496 rgbToY[1] * gCount +
2497 rgbToY[2] * bCount) / scaleOutSq;
2498 if (outY % 2 == 0 && outX % 2 == 0) {
2499 *pxVU++ = (rgbToCr[0] * rCount +
2500 rgbToCr[1] * gCount +
2501 rgbToCr[2] * bCount +
2502 rgbToCr[3]) / scaleOutSq;
2503 *pxVU++ = (rgbToCb[0] * rCount +
2504 rgbToCb[1] * gCount +
2505 rgbToCb[2] * bCount +
2506 rgbToCb[3]) / scaleOutSq;
2507 }
2508 for (unsigned int j = 1; j < inc; j++)
2509 mScene.getPixelElectrons();
2510 }
2511 }
2512#else
2513 uint8_t *src;
2514 if (mKernelBuffer) {
2515 src = mKernelBuffer;
2516 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2517 //TODO YUYV scale
2518 //memcpy(img, src, vinfo->preview.buf.length);
2519
2520 } else
2521 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2522
2523 return ;
2524 }
2525
2526 while(1) {
2527 if (mFlushFlag) {
2528 break;
2529 }
2530 if (mExitSensorThread) {
2531 break;
2532 }
2533 src = (uint8_t *)get_frame(vinfo);
2534 if (NULL == src) {
2535 if (get_device_status(vinfo)) {
2536 break;
2537 }
2538 ALOGVV("get frame NULL, sleep 5ms");
2539 usleep(5000);
2540 mTimeOutCount++;
2541 if (mTimeOutCount > 300) {
2542 force_reset_sensor();
2543 }
2544 continue;
2545 }
2546 mTimeOutCount = 0;
2547 if (mSensorType == SENSOR_USB) {
2548 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2549 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2550 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2551 putback_frame(vinfo);
2552 continue;
2553 }
2554 }
2555 }
2556 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2557 memcpy(img, src, vinfo->preview.buf.length);
2558 mKernelBuffer = src;
2559 } else {
2560 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2561 }
2562
2563 break;
2564 }
2565#endif
2566 //mKernelBuffer = src;
2567 ALOGVV("YUYV sensor image captured");
2568}
2569
2570void Sensor::dump(int fd) {
2571 String8 result;
2572 result = String8::format("%s, sensor preview information: \n", __FILE__);
2573 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2574 result.appendFormat("camera preview width: %d , height =%d\n",
2575 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2576
2577 result.appendFormat("camera preview format: %.4s\n\n",
2578 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2579
2580 write(fd, result.string(), result.size());
2581}
2582
2583} // namespace android
2584
2585