summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: aea3ab0519fa6b37d732d43d3904e722aa42508a
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44
45#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
46
47namespace android {
48
49const unsigned int Sensor::kResolution[2] = {1600, 1200};
50
51const nsecs_t Sensor::kExposureTimeRange[2] =
52 {1000L, 30000000000L} ; // 1 us - 30 sec
53const nsecs_t Sensor::kFrameDurationRange[2] =
54 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
55const nsecs_t Sensor::kMinVerticalBlank = 10000L;
56
57const uint8_t Sensor::kColorFilterArrangement =
58 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
59
60// Output image data characteristics
61const uint32_t Sensor::kMaxRawValue = 4000;
62const uint32_t Sensor::kBlackLevel = 1000;
63
64// Sensor sensitivity
65const float Sensor::kSaturationVoltage = 0.520f;
66const uint32_t Sensor::kSaturationElectrons = 2000;
67const float Sensor::kVoltsPerLuxSecond = 0.100f;
68
69const float Sensor::kElectronsPerLuxSecond =
70 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
71 * Sensor::kVoltsPerLuxSecond;
72
73const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
74 Sensor::kSaturationElectrons;
75
76const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
77const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
78const float Sensor::kReadNoiseVarBeforeGain =
79 Sensor::kReadNoiseStddevBeforeGain *
80 Sensor::kReadNoiseStddevBeforeGain;
81const float Sensor::kReadNoiseVarAfterGain =
82 Sensor::kReadNoiseStddevAfterGain *
83 Sensor::kReadNoiseStddevAfterGain;
84
85// While each row has to read out, reset, and then expose, the (reset +
86// expose) sequence can be overlapped by other row readouts, so the final
87// minimum frame duration is purely a function of row readout time, at least
88// if there's a reasonable number of rows.
89const nsecs_t Sensor::kRowReadoutTime =
90 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
91
92const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
93const uint32_t Sensor::kDefaultSensitivity = 100;
94
95const uint32_t kUsbAvailableSize [10] = {176, 144, 320, 240, 352, 288, 640, 480, 1280, 720};
96
97/** A few utility functions for math, normal distributions */
98
99// Take advantage of IEEE floating-point format to calculate an approximate
100// square root. Accurate to within +-3.6%
101float sqrtf_approx(float r) {
102 // Modifier is based on IEEE floating-point representation; the
103 // manipulations boil down to finding approximate log2, dividing by two, and
104 // then inverting the log2. A bias is added to make the relative error
105 // symmetric about the real answer.
106 const int32_t modifier = 0x1FBB4000;
107
108 int32_t r_i = *(int32_t*)(&r);
109 r_i = (r_i >> 1) + modifier;
110
111 return *(float*)(&r_i);
112}
113
114void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
115{
116 int stride = (width + 31) & ( ~31);
117 int w, h;
118 for (h=0; h<height; h++)
119 {
120 memcpy( dst, src, width*3);
121 dst += width*3;
122 src += stride*3;
123 }
124}
125
126static int ALIGN(int x, int y) {
127 // y must be a power of 2.
128 return (x + y - 1) & ~(y - 1);
129}
130
131bool IsUsbAvailableSize(const uint32_t kUsbAvailableSize[], uint32_t width, uint32_t height, int count)
132{
133 int i;
134 bool ret = false;
135 for (i = 0; i < count; i += 2) {
136 if ((width == kUsbAvailableSize[i]) && (height == kUsbAvailableSize[i+1])) {
137 ret = true;
138 } else {
139 continue;
140 }
141 }
142 return ret;
143}
144
145void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
146{
147 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
148 (mmInt32)vinfo->preview.format.fmt.pix.height,
149 (mmInt32)vinfo->preview.format.fmt.pix.width,
150 IC_FORMAT_YCbCr420_lp,
151 (mmByte *) src,
152 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
153 0};
154
155 structConvImage output = {(mmInt32)width,
156 (mmInt32)height,
157 (mmInt32)width,
158 IC_FORMAT_YCbCr420_lp,
159 (mmByte *) img,
160 (mmByte *) img + width * height,
161 0};
162
163 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
164 ALOGE("Sclale NV21 frame down failed!\n");
165}
166
167Sensor::Sensor():
168 Thread(false),
169 mGotVSync(false),
170 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
171 mFrameDuration(kFrameDurationRange[0]),
172 mGainFactor(kDefaultSensitivity),
173 mNextBuffers(NULL),
174 mFrameNumber(0),
175 mCapturedBuffers(NULL),
176 mListener(NULL),
177 mIoctlSupport(0),
178 msupportrotate(0),
179 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
180{
181
182}
183
184Sensor::~Sensor() {
185 //shutDown();
186}
187
188status_t Sensor::startUp(int idx) {
189 ALOGV("%s: E", __FUNCTION__);
190 DBG_LOGA("ddd");
191
192 int res;
193 mCapturedBuffers = NULL;
194 res = run("EmulatedFakeCamera2::Sensor",
195 ANDROID_PRIORITY_URGENT_DISPLAY);
196
197 if (res != OK) {
198 ALOGE("Unable to start up sensor capture thread: %d", res);
199 }
200
201 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
202 vinfo->idx = idx;
203
204 res = camera_open(vinfo);
205 if (res < 0) {
206 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
207 }
208
209 mSensorType = SENSOR_MMAP;
210 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
211 mSensorType = SENSOR_USB;
212 }
213
214 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
215 mSensorType = SENSOR_SHARE_FD;
216 }
217
218 if (strstr((const char *)vinfo->cap.card, "front"))
219 mSensorFace = SENSOR_FACE_FRONT;
220 else if (strstr((const char *)vinfo->cap.card, "back"))
221 mSensorFace = SENSOR_FACE_BACK;
222 else
223 mSensorFace = SENSOR_FACE_NONE;
224
225 return res;
226}
227
228sensor_type_e Sensor::getSensorType(void)
229{
230 return mSensorType;
231}
232status_t Sensor::IoctlStateProbe(void) {
233 struct v4l2_queryctrl qc;
234 int ret = 0;
235 mIoctlSupport = 0;
236 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
237 qc.id = V4L2_ROTATE_ID;
238 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
239 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
240 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
241 }else{
242 mIoctlSupport |= IOCTL_MASK_ROTATE;
243 }
244
245 if(mIoctlSupport & IOCTL_MASK_ROTATE){
246 msupportrotate = true;
247 DBG_LOGA("camera support capture rotate");
248 }
249 return mIoctlSupport;
250}
251
252uint32_t Sensor::getStreamUsage(int stream_type)
253{
254 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
255
256 switch (stream_type) {
257 case CAMERA3_STREAM_OUTPUT:
258 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
259 break;
260 case CAMERA3_STREAM_INPUT:
261 usage = GRALLOC_USAGE_HW_CAMERA_READ;
262 break;
263 case CAMERA3_STREAM_BIDIRECTIONAL:
264 usage = GRALLOC_USAGE_HW_CAMERA_READ |
265 GRALLOC_USAGE_HW_CAMERA_WRITE;
266 break;
267 }
268 if ((mSensorType == SENSOR_MMAP)
269 || (mSensorType == SENSOR_USB)) {
270 usage = (GRALLOC_USAGE_HW_TEXTURE
271 | GRALLOC_USAGE_HW_RENDER
272 | GRALLOC_USAGE_SW_READ_MASK
273 | GRALLOC_USAGE_SW_WRITE_MASK
274 );
275 }
276
277 return usage;
278}
279
280status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
281{
282 int res;
283
284 mFramecount = 0;
285 mCurFps = 0;
286 gettimeofday(&mTimeStart, NULL);
287
288 if (isjpeg) {
289 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
290 vinfo->picture.format.fmt.pix.width = width;
291 vinfo->picture.format.fmt.pix.height = height;
292 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
293 } else {
294 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
295 vinfo->preview.format.fmt.pix.width = width;
296 vinfo->preview.format.fmt.pix.height = height;
297 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
298
299 res = setBuffersFormat(vinfo);
300 if (res < 0) {
301 ALOGE("set buffer failed\n");
302 return res;
303 }
304 }
305
306 return OK;
307
308}
309
310status_t Sensor::streamOn() {
311
312 return start_capturing(vinfo);
313}
314
315bool Sensor::isStreaming() {
316
317 return vinfo->isStreaming;
318}
319
320bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
321{
322 if ((vinfo->preview.format.fmt.pix.width != width)
323 ||(vinfo->preview.format.fmt.pix.height != height)
324 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
325 ) {
326
327 return true;
328
329 }
330
331 return false;
332}
333status_t Sensor::streamOff() {
334 if (mSensorType == SENSOR_USB) {
335 return releasebuf_and_stop_capturing(vinfo);
336 } else {
337 return stop_capturing(vinfo);
338 }
339}
340
341int Sensor::getOutputFormat()
342{
343 struct v4l2_fmtdesc fmt;
344 int ret;
345 memset(&fmt,0,sizeof(fmt));
346 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
347
348 fmt.index = 0;
349 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
350 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
351 return V4L2_PIX_FMT_MJPEG;
352 fmt.index++;
353 }
354
355 fmt.index = 0;
356 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
357 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
358 return V4L2_PIX_FMT_NV21;
359 fmt.index++;
360 }
361
362 fmt.index = 0;
363 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
364 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
365 return V4L2_PIX_FMT_YUYV;
366 fmt.index++;
367 }
368
369 ALOGE("Unable to find a supported sensor format!");
370 return BAD_VALUE;
371}
372
373/* if sensor supports MJPEG, return it first, otherwise
374 * trasform HAL format to v4l2 format then check whether
375 * it is supported.
376 */
377int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
378{
379 struct v4l2_fmtdesc fmt;
380 int ret;
381 memset(&fmt,0,sizeof(fmt));
382 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
383
384 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
385 pixelfmt = V4L2_PIX_FMT_YVU420;
386 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
387 pixelfmt = V4L2_PIX_FMT_NV21;
388 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
389 pixelfmt = V4L2_PIX_FMT_YUYV;
390 } else {
391 pixelfmt = V4L2_PIX_FMT_NV21;
392 }
393
394 fmt.index = 0;
395 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
396 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
397 return V4L2_PIX_FMT_MJPEG;
398 fmt.index++;
399 }
400
401 fmt.index = 0;
402 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
403 if (fmt.pixelformat == pixelfmt)
404 return pixelfmt;
405 fmt.index++;
406 }
407
408 ALOGE("Unable to find a supported sensor format!");
409 return BAD_VALUE;
410}
411
412void Sensor::setPictureRotate(int rotate)
413{
414 mRotateValue = rotate;
415}
416int Sensor::getPictureRotate()
417{
418 return mRotateValue;
419}
420status_t Sensor::shutDown() {
421 ALOGV("%s: E", __FUNCTION__);
422
423 int res;
424 res = requestExitAndWait();
425 if (res != OK) {
426 ALOGE("Unable to shut down sensor capture thread: %d", res);
427 }
428
429 if (vinfo != NULL) {
430 if (mSensorType == SENSOR_USB) {
431 releasebuf_and_stop_capturing(vinfo);
432 } else {
433 stop_capturing(vinfo);
434 }
435 }
436
437 camera_close(vinfo);
438
439 if (vinfo){
440 free(vinfo);
441 vinfo = NULL;
442 }
443 ALOGD("%s: Exit", __FUNCTION__);
444 return res;
445}
446
447Scene &Sensor::getScene() {
448 return mScene;
449}
450
451int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
452{
453 int ret = 0;
454 struct v4l2_queryctrl qc;
455
456 memset(&qc, 0, sizeof(qc));
457 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
458 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
459
460 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
461 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
462 ret = -1;
463 *zoomMin = 0;
464 *zoomMax = 0;
465 *zoomStep = 1;
466 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
467 } else {
468 *zoomMin = qc.minimum;
469 *zoomMax = qc.maximum;
470 *zoomStep = qc.step;
471 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
472 }
473
474 return ret ;
475}
476
477int Sensor::setZoom(int zoomValue)
478{
479 int ret = 0;
480 struct v4l2_control ctl;
481
482 memset( &ctl, 0, sizeof(ctl));
483 ctl.value = zoomValue;
484 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
485 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
486 if (ret < 0) {
487 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
488 }
489 return ret ;
490}
491
492status_t Sensor::setEffect(uint8_t effect)
493{
494 int ret = 0;
495 struct v4l2_control ctl;
496 ctl.id = V4L2_CID_COLORFX;
497
498 switch (effect) {
499 case ANDROID_CONTROL_EFFECT_MODE_OFF:
500 ctl.value= CAM_EFFECT_ENC_NORMAL;
501 break;
502 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
503 ctl.value= CAM_EFFECT_ENC_COLORINV;
504 break;
505 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
506 ctl.value= CAM_EFFECT_ENC_SEPIA;
507 break;
508 default:
509 ALOGE("%s: Doesn't support effect mode %d",
510 __FUNCTION__, effect);
511 return BAD_VALUE;
512 }
513
514 DBG_LOGB("set effect mode:%d", effect);
515 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
516 if (ret < 0) {
517 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
518 }
519 return ret ;
520}
521
522#define MAX_LEVEL_FOR_EXPOSURE 16
523#define MIN_LEVEL_FOR_EXPOSURE 3
524
525int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
526{
527 struct v4l2_queryctrl qc;
528 int ret=0;
529 int level = 0;
530 int middle = 0;
531
532 memset( &qc, 0, sizeof(qc));
533
534 DBG_LOGA("getExposure\n");
535 qc.id = V4L2_CID_EXPOSURE;
536 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
537 if(ret < 0) {
538 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
539 *minExp = -4;
540 *maxExp = 4;
541 *def = 0;
542 step->numerator = 1;
543 step->denominator = 1;
544 return ret;
545 }
546
547 if(0 < qc.step)
548 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
549
550 if((level > MAX_LEVEL_FOR_EXPOSURE)
551 || (level < MIN_LEVEL_FOR_EXPOSURE)){
552 *minExp = -4;
553 *maxExp = 4;
554 *def = 0;
555 step->numerator = 1;
556 step->denominator = 1;
557 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
558 *minExp, *maxExp, *def);
559 return true;
560 }
561
562 middle = (qc.minimum+qc.maximum)/2;
563 *minExp = qc.minimum - middle;
564 *maxExp = qc.maximum - middle;
565 *def = qc.default_value - middle;
566 step->numerator = 1;
567 step->denominator = 2;//qc.step;
568 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
569 return ret;
570}
571
572status_t Sensor::setExposure(int expCmp)
573{
574 int ret = 0;
575 struct v4l2_control ctl;
576 struct v4l2_queryctrl qc;
577
578 if(mEV == expCmp){
579 return 0;
580 }else{
581 mEV = expCmp;
582 }
583 memset(&ctl, 0, sizeof(ctl));
584 memset(&qc, 0, sizeof(qc));
585
586 qc.id = V4L2_CID_EXPOSURE;
587
588 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
589 if (ret < 0) {
590 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
591 }
592
593 ctl.id = V4L2_CID_EXPOSURE;
594 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
595
596 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
597 if (ret < 0) {
598 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
599 }
600 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
601 return ret ;
602}
603
604int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
605{
606 struct v4l2_queryctrl qc;
607 struct v4l2_querymenu qm;
608 int ret;
609 int mode_count = -1;
610
611 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
612 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
613 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
614 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
615 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
616 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
617 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
618 } else {
619 memset(&qm, 0, sizeof(qm));
620
621 int index = 0;
622 mode_count = 1;
623 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
624
625 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
626 if (mode_count >= maxCont)
627 break;
628
629 memset(&qm, 0, sizeof(struct v4l2_querymenu));
630 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
631 qm.index = index;
632 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
633 continue;
634 } else {
635 if (strcmp((char*)qm.name,"50hz") == 0) {
636 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
637 mode_count++;
638 } else if (strcmp((char*)qm.name,"60hz") == 0) {
639 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
640 mode_count++;
641 } else if (strcmp((char*)qm.name,"auto") == 0) {
642 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
643 mode_count++;
644 }
645
646 }
647 }
648 }
649
650 return mode_count;
651}
652
653status_t Sensor::setAntiBanding(uint8_t antiBanding)
654{
655 int ret = 0;
656 struct v4l2_control ctl;
657 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
658
659 switch (antiBanding) {
660 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
661 ctl.value= CAM_ANTIBANDING_OFF;
662 break;
663 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
664 ctl.value= CAM_ANTIBANDING_50HZ;
665 break;
666 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
667 ctl.value= CAM_ANTIBANDING_60HZ;
668 break;
669 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
670 ctl.value= CAM_ANTIBANDING_AUTO;
671 break;
672 default:
673 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
674 __FUNCTION__, antiBanding);
675 return BAD_VALUE;
676 }
677
678 DBG_LOGB("anti banding mode:%d", antiBanding);
679 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
680 if ( ret < 0) {
681 CAMHAL_LOGDA("failed to set anti banding mode!\n");
682 return BAD_VALUE;
683 }
684 return ret;
685}
686
687status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
688{
689 int ret = 0;
690 struct v4l2_control ctl;
691 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
692 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
693 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
694
695 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
696 return ret;
697}
698
699
700int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
701{
702 struct v4l2_queryctrl qc;
703 struct v4l2_querymenu qm;
704 int ret;
705 int mode_count = -1;
706
707 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
708 qc.id = V4L2_CID_FOCUS_AUTO;
709 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
710 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
711 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
712 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
713 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
714 }else{
715 memset(&qm, 0, sizeof(qm));
716
717 int index = 0;
718 mode_count = 1;
719 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
720
721 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
722 if (mode_count >= maxCount)
723 break;
724
725 memset(&qm, 0, sizeof(struct v4l2_querymenu));
726 qm.id = V4L2_CID_FOCUS_AUTO;
727 qm.index = index;
728 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
729 continue;
730 } else {
731 if (strcmp((char*)qm.name,"auto") == 0) {
732 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
733 mode_count++;
734 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
735 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
736 mode_count++;
737 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
738 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
739 mode_count++;
740 }
741
742 }
743 }
744 }
745
746 return mode_count;
747}
748
749status_t Sensor::setAutoFocuas(uint8_t afMode)
750{
751 struct v4l2_control ctl;
752 ctl.id = V4L2_CID_FOCUS_AUTO;
753
754 switch (afMode) {
755 case ANDROID_CONTROL_AF_MODE_AUTO:
756 ctl.value = CAM_FOCUS_MODE_AUTO;
757 break;
758 case ANDROID_CONTROL_AF_MODE_MACRO:
759 ctl.value = CAM_FOCUS_MODE_MACRO;
760 break;
761 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
762 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
763 break;
764 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
765 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
766 break;
767 default:
768 ALOGE("%s: Emulator doesn't support AF mode %d",
769 __FUNCTION__, afMode);
770 return BAD_VALUE;
771 }
772
773 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
774 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
775 return BAD_VALUE;
776 }
777
778 return OK;
779}
780
781int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
782{
783 struct v4l2_queryctrl qc;
784 struct v4l2_querymenu qm;
785 int ret;
786 int mode_count = -1;
787
788 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
789 qc.id = V4L2_CID_DO_WHITE_BALANCE;
790 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
791 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
792 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
793 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
794 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
795 }else{
796 memset(&qm, 0, sizeof(qm));
797
798 int index = 0;
799 mode_count = 1;
800 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
801
802 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
803 if (mode_count >= maxCount)
804 break;
805
806 memset(&qm, 0, sizeof(struct v4l2_querymenu));
807 qm.id = V4L2_CID_DO_WHITE_BALANCE;
808 qm.index = index;
809 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
810 continue;
811 } else {
812 if (strcmp((char*)qm.name,"auto") == 0) {
813 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
814 mode_count++;
815 } else if (strcmp((char*)qm.name,"daylight") == 0) {
816 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
817 mode_count++;
818 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
819 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
820 mode_count++;
821 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
822 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
823 mode_count++;
824 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
825 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
826 mode_count++;
827 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
828 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
829 mode_count++;
830 } else if (strcmp((char*)qm.name,"twilight") == 0) {
831 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
832 mode_count++;
833 } else if (strcmp((char*)qm.name,"shade") == 0) {
834 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
835 mode_count++;
836 }
837
838 }
839 }
840 }
841
842 return mode_count;
843}
844
845status_t Sensor::setAWB(uint8_t awbMode)
846{
847 int ret = 0;
848 struct v4l2_control ctl;
849 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
850
851 switch (awbMode) {
852 case ANDROID_CONTROL_AWB_MODE_AUTO:
853 ctl.value = CAM_WB_AUTO;
854 break;
855 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
856 ctl.value = CAM_WB_INCANDESCENCE;
857 break;
858 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
859 ctl.value = CAM_WB_FLUORESCENT;
860 break;
861 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
862 ctl.value = CAM_WB_DAYLIGHT;
863 break;
864 case ANDROID_CONTROL_AWB_MODE_SHADE:
865 ctl.value = CAM_WB_SHADE;
866 break;
867 default:
868 ALOGE("%s: Emulator doesn't support AWB mode %d",
869 __FUNCTION__, awbMode);
870 return BAD_VALUE;
871 }
872 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
873 return ret;
874}
875
876void Sensor::setExposureTime(uint64_t ns) {
877 Mutex::Autolock lock(mControlMutex);
878 ALOGVV("Exposure set to %f", ns/1000000.f);
879 mExposureTime = ns;
880}
881
882void Sensor::setFrameDuration(uint64_t ns) {
883 Mutex::Autolock lock(mControlMutex);
884 ALOGVV("Frame duration set to %f", ns/1000000.f);
885 mFrameDuration = ns;
886}
887
888void Sensor::setSensitivity(uint32_t gain) {
889 Mutex::Autolock lock(mControlMutex);
890 ALOGVV("Gain set to %d", gain);
891 mGainFactor = gain;
892}
893
894void Sensor::setDestinationBuffers(Buffers *buffers) {
895 Mutex::Autolock lock(mControlMutex);
896 mNextBuffers = buffers;
897}
898
899void Sensor::setFrameNumber(uint32_t frameNumber) {
900 Mutex::Autolock lock(mControlMutex);
901 mFrameNumber = frameNumber;
902}
903
904bool Sensor::waitForVSync(nsecs_t reltime) {
905 int res;
906 Mutex::Autolock lock(mControlMutex);
907
908 mGotVSync = false;
909 res = mVSync.waitRelative(mControlMutex, reltime);
910 if (res != OK && res != TIMED_OUT) {
911 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
912 return false;
913 }
914 return mGotVSync;
915}
916
917bool Sensor::waitForNewFrame(nsecs_t reltime,
918 nsecs_t *captureTime) {
919 Mutex::Autolock lock(mReadoutMutex);
920 uint8_t *ret;
921 if (mCapturedBuffers == NULL) {
922 int res;
923 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
924 if (res == TIMED_OUT) {
925 return false;
926 } else if (res != OK || mCapturedBuffers == NULL) {
927 ALOGE("Error waiting for sensor readout signal: %d", res);
928 return false;
929 }
930 } else {
931 mReadoutComplete.signal();
932 }
933
934 *captureTime = mCaptureTime;
935 mCapturedBuffers = NULL;
936 return true;
937}
938
939Sensor::SensorListener::~SensorListener() {
940}
941
942void Sensor::setSensorListener(SensorListener *listener) {
943 Mutex::Autolock lock(mControlMutex);
944 mListener = listener;
945}
946
947status_t Sensor::readyToRun() {
948 int res;
949 ALOGV("Starting up sensor thread");
950 mStartupTime = systemTime();
951 mNextCaptureTime = 0;
952 mNextCapturedBuffers = NULL;
953
954 DBG_LOGA("");
955
956 return OK;
957}
958
959bool Sensor::threadLoop() {
960 /**
961 * Sensor capture operation main loop.
962 *
963 * Stages are out-of-order relative to a single frame's processing, but
964 * in-order in time.
965 */
966
967 /**
968 * Stage 1: Read in latest control parameters
969 */
970 uint64_t exposureDuration;
971 uint64_t frameDuration;
972 uint32_t gain;
973 Buffers *nextBuffers;
974 uint32_t frameNumber;
975 SensorListener *listener = NULL;
976 {
977 Mutex::Autolock lock(mControlMutex);
978 exposureDuration = mExposureTime;
979 frameDuration = mFrameDuration;
980 gain = mGainFactor;
981 nextBuffers = mNextBuffers;
982 frameNumber = mFrameNumber;
983 listener = mListener;
984 // Don't reuse a buffer set
985 mNextBuffers = NULL;
986
987 // Signal VSync for start of readout
988 ALOGVV("Sensor VSync");
989 mGotVSync = true;
990 mVSync.signal();
991 }
992
993 /**
994 * Stage 3: Read out latest captured image
995 */
996
997 Buffers *capturedBuffers = NULL;
998 nsecs_t captureTime = 0;
999
1000 nsecs_t startRealTime = systemTime();
1001 // Stagefright cares about system time for timestamps, so base simulated
1002 // time on that.
1003 nsecs_t simulatedTime = startRealTime;
1004 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1005 nsecs_t frameReadoutEndRealTime = startRealTime +
1006 kRowReadoutTime * kResolution[1];
1007
1008 if (mNextCapturedBuffers != NULL) {
1009 ALOGVV("Sensor starting readout");
1010 // Pretend we're doing readout now; will signal once enough time has elapsed
1011 capturedBuffers = mNextCapturedBuffers;
1012 captureTime = mNextCaptureTime;
1013 }
1014 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1015
1016 // TODO: Move this signal to another thread to simulate readout
1017 // time properly
1018 if (capturedBuffers != NULL) {
1019 ALOGVV("Sensor readout complete");
1020 Mutex::Autolock lock(mReadoutMutex);
1021 if (mCapturedBuffers != NULL) {
1022 ALOGV("Waiting for readout thread to catch up!");
1023 mReadoutComplete.wait(mReadoutMutex);
1024 }
1025
1026 mCapturedBuffers = capturedBuffers;
1027 mCaptureTime = captureTime;
1028 mReadoutAvailable.signal();
1029 capturedBuffers = NULL;
1030 }
1031
1032 /**
1033 * Stage 2: Capture new image
1034 */
1035 mNextCaptureTime = simulatedTime;
1036 mNextCapturedBuffers = nextBuffers;
1037
1038 if (mNextCapturedBuffers != NULL) {
1039 if (listener != NULL) {
1040 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1041 mNextCaptureTime);
1042 }
1043
1044 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1045 (float)exposureDuration/1e6, gain);
1046 mScene.setExposureDuration((float)exposureDuration/1e9);
1047 mScene.calculateScene(mNextCaptureTime);
1048
1049 if ( mSensorType == SENSOR_SHARE_FD) {
1050 captureNewImageWithGe2d();
1051 } else {
1052 captureNewImage();
1053 }
1054 mFramecount ++;
1055 }
1056 if (mFramecount == 100) {
1057 gettimeofday(&mTimeEnd, NULL);
1058 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1059 mCurFps = mFramecount/(interval/1000000.0f);
1060 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1061 mFramecount = 0;
1062 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1063 }
1064 ALOGVV("Sensor vertical blanking interval");
1065 nsecs_t workDoneRealTime = systemTime();
1066 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1067 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1068 timespec t;
1069 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1070 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1071
1072 int ret;
1073 do {
1074 ret = nanosleep(&t, &t);
1075 } while (ret != 0);
1076 }
1077 nsecs_t endRealTime = systemTime();
1078 ALOGVV("Frame cycle took %d ms, target %d ms",
1079 (int)((endRealTime - startRealTime)/1000000),
1080 (int)(frameDuration / 1000000));
1081 return true;
1082};
1083
1084int Sensor::captureNewImageWithGe2d() {
1085
1086 uint32_t gain = mGainFactor;
1087 mKernelPhysAddr = 0;
1088
1089
1090 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1091 usleep(5000);
1092 }
1093
1094 // Might be adding more buffers, so size isn't constant
1095 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1096 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1097 fillStream(vinfo, mKernelPhysAddr, b);
1098 }
1099 putback_frame(vinfo);
1100 mKernelPhysAddr = 0;
1101
1102 return 0;
1103
1104}
1105
1106int Sensor::captureNewImage() {
1107 bool isjpeg = false;
1108 uint32_t gain = mGainFactor;
1109 mKernelBuffer = NULL;
1110
1111 // Might be adding more buffers, so size isn't constant
1112 DBG_LOGB("size=%d\n", mNextCapturedBuffers->size());
1113 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1114 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1115 ALOGVV("Sensor capturing buffer %d: stream %d,"
1116 " %d x %d, format %x, stride %d, buf %p, img %p",
1117 i, b.streamId, b.width, b.height, b.format, b.stride,
1118 b.buffer, b.img);
1119 switch (b.format) {
1120 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1121 captureRaw(b.img, gain, b.stride);
1122 break;
1123 case HAL_PIXEL_FORMAT_RGB_888:
1124 captureRGB(b.img, gain, b.stride);
1125 break;
1126 case HAL_PIXEL_FORMAT_RGBA_8888:
1127 captureRGBA(b.img, gain, b.stride);
1128 break;
1129 case HAL_PIXEL_FORMAT_BLOB:
1130 // Add auxillary buffer of the right size
1131 // Assumes only one BLOB (JPEG) buffer in
1132 // mNextCapturedBuffers
1133 StreamBuffer bAux;
1134 int orientation;
1135 orientation = getPictureRotate();
1136 ALOGD("bAux orientation=%d",orientation);
1137 uint32_t pixelfmt;
1138 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1139 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1140
1141 pixelfmt = getOutputFormat();
1142 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1143 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1144 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1145 DBG_LOGA("");
1146 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1147 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1148 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1149 } else {
1150 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1151 }
1152 } else {
1153 isjpeg = true;
1154 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1155 }
1156
1157 if (!msupportrotate) {
1158 bAux.streamId = 0;
1159 bAux.width = b.width;
1160 bAux.height = b.height;
1161 bAux.format = pixelfmt;
1162 bAux.stride = b.width;
1163 bAux.buffer = NULL;
1164 } else {
1165 if ((orientation == 90) || (orientation == 270)) {
1166 bAux.streamId = 0;
1167 bAux.width = b.height;
1168 bAux.height = b.width;
1169 bAux.format = pixelfmt;
1170 bAux.stride = b.height;
1171 bAux.buffer = NULL;
1172 } else {
1173 bAux.streamId = 0;
1174 bAux.width = b.width;
1175 bAux.height = b.height;
1176 bAux.format = pixelfmt;
1177 bAux.stride = b.width;
1178 bAux.buffer = NULL;
1179 }
1180 }
1181 // TODO: Reuse these
1182 bAux.img = new uint8_t[b.width * b.height * 3];
1183 mNextCapturedBuffers->push_back(bAux);
1184 break;
1185 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1186 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1187 captureNV21(b, gain);
1188 break;
1189 case HAL_PIXEL_FORMAT_YV12:
1190 captureYV12(b, gain);
1191 break;
1192 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1193 captureYUYV(b.img, gain, b.stride);
1194 break;
1195 default:
1196 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1197 b.format);
1198 break;
1199 }
1200 }
1201 if (!isjpeg) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1202 // whose buffer putback separately.
1203 putback_frame(vinfo);
1204 }
1205 mKernelBuffer = NULL;
1206
1207 return 0;
1208}
1209
1210int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1211 int res;
1212 int i, j, k, START;
1213 int count = 0;
1214 int pixelfmt;
1215 struct v4l2_frmsizeenum frmsize;
1216 char property[PROPERTY_VALUE_MAX];
1217 unsigned int support_w,support_h;
1218
1219 support_w = 10000;
1220 support_h = 10000;
1221 memset(property, 0, sizeof(property));
1222 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1223 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1224 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1225 support_w = 10000;
1226 support_h = 10000;
1227 }
1228 }
1229
1230 memset(&frmsize,0,sizeof(frmsize));
1231 frmsize.pixel_format = getOutputFormat();
1232
1233 START = 0;
1234 for (i = 0; ; i++) {
1235 frmsize.index = i;
1236 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1237 if (res < 0){
1238 DBG_LOGB("index=%d, break\n", i);
1239 break;
1240 }
1241
1242 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1243
1244 if (0 != (frmsize.discrete.width%16))
1245 continue;
1246
1247 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1248 continue;
1249
1250 if (count >= size)
1251 break;
1252
1253 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1254 int count = sizeof(kUsbAvailableSize)/sizeof(kUsbAvailableSize[0]);
1255 if (!IsUsbAvailableSize(kUsbAvailableSize, frmsize.discrete.width, frmsize.discrete.height,count))
1256 continue;
1257 }
1258
1259 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1260 picSizes[count+1] = frmsize.discrete.width;
1261 picSizes[count+2] = frmsize.discrete.height;
1262 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1263
1264 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1265 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1266 if (0 == i) {
1267 count += 4;
1268 continue;
1269 }
1270
1271 for (k = count; k > START; k -= 4) {
1272 if (frmsize.discrete.width * frmsize.discrete.height >
1273 picSizes[k - 3] * picSizes[k - 2]) {
1274 picSizes[k + 1] = picSizes[k - 3];
1275 picSizes[k + 2] = picSizes[k - 2];
1276
1277 } else {
1278 break;
1279 }
1280 }
1281 picSizes[k + 1] = frmsize.discrete.width;
1282 picSizes[k + 2] = frmsize.discrete.height;
1283
1284 count += 4;
1285 }
1286 }
1287
1288 START = count;
1289 for (i = 0; ; i++) {
1290 frmsize.index = i;
1291 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1292 if (res < 0){
1293 DBG_LOGB("index=%d, break\n", i);
1294 break;
1295 }
1296
1297 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1298
1299 if (0 != (frmsize.discrete.width%16))
1300 continue;
1301
1302 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1303 continue;
1304
1305 if (count >= size)
1306 break;
1307
1308 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1309 int count = sizeof(kUsbAvailableSize)/sizeof(kUsbAvailableSize[0]);
1310 if (!IsUsbAvailableSize(kUsbAvailableSize, frmsize.discrete.width, frmsize.discrete.height,count))
1311 continue;
1312 }
1313
1314 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1315 picSizes[count+1] = frmsize.discrete.width;
1316 picSizes[count+2] = frmsize.discrete.height;
1317 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1318
1319 DBG_LOGB("get output width=%d, height=%d, format =\
1320 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1321 frmsize.discrete.height);
1322 if (0 == i) {
1323 count += 4;
1324 continue;
1325 }
1326
1327 for (k = count; k > START; k -= 4) {
1328 if (frmsize.discrete.width * frmsize.discrete.height >
1329 picSizes[k - 3] * picSizes[k - 2]) {
1330 picSizes[k + 1] = picSizes[k - 3];
1331 picSizes[k + 2] = picSizes[k - 2];
1332
1333 } else {
1334 break;
1335 }
1336 }
1337 picSizes[k + 1] = frmsize.discrete.width;
1338 picSizes[k + 2] = frmsize.discrete.height;
1339
1340 count += 4;
1341 }
1342 }
1343
1344#if 0
1345 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1346 START = count;
1347 for (i = 0; ; i++) {
1348 frmsize.index = i;
1349 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1350 if (res < 0){
1351 DBG_LOGB("index=%d, break\n", i);
1352 break;
1353 }
1354
1355 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1356
1357 if (0 != (frmsize.discrete.width%16))
1358 continue;
1359
1360 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1361 continue;
1362
1363 if (count >= size)
1364 break;
1365
1366 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1367 picSizes[count+1] = frmsize.discrete.width;
1368 picSizes[count+2] = frmsize.discrete.height;
1369 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1370
1371 DBG_LOGB("get output width=%d, height=%d, format =\
1372 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1373 frmsize.discrete.height);
1374 if (0 == i) {
1375 count += 4;
1376 continue;
1377 }
1378
1379 for (k = count; k > START; k -= 4) {
1380 if (frmsize.discrete.width * frmsize.discrete.height >
1381 picSizes[k - 3] * picSizes[k - 2]) {
1382 picSizes[k + 1] = picSizes[k - 3];
1383 picSizes[k + 2] = picSizes[k - 2];
1384
1385 } else {
1386 break;
1387 }
1388 }
1389 picSizes[k + 1] = frmsize.discrete.width;
1390 picSizes[k + 2] = frmsize.discrete.height;
1391
1392 count += 4;
1393 }
1394 }
1395 }
1396#endif
1397
1398 uint32_t jpgSrcfmt[] = {
1399 V4L2_PIX_FMT_RGB24,
1400 V4L2_PIX_FMT_MJPEG,
1401 V4L2_PIX_FMT_YUYV,
1402 };
1403
1404 START = count;
1405 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1406 memset(&frmsize,0,sizeof(frmsize));
1407 frmsize.pixel_format = jpgSrcfmt[j];
1408
1409 for (i = 0; ; i++) {
1410 frmsize.index = i;
1411 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1412 if (res < 0){
1413 DBG_LOGB("index=%d, break\n", i);
1414 break;
1415 }
1416
1417 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1418
1419 if (0 != (frmsize.discrete.width%16))
1420 continue;
1421
1422 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1423 // continue;
1424
1425 if (count >= size)
1426 break;
1427
1428 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1429 picSizes[count+1] = frmsize.discrete.width;
1430 picSizes[count+2] = frmsize.discrete.height;
1431 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1432
1433 if (0 == i) {
1434 count += 4;
1435 continue;
1436 }
1437
1438 //TODO insert in descend order
1439 for (k = count; k > START; k -= 4) {
1440 if (frmsize.discrete.width * frmsize.discrete.height >
1441 picSizes[k - 3] * picSizes[k - 2]) {
1442 picSizes[k + 1] = picSizes[k - 3];
1443 picSizes[k + 2] = picSizes[k - 2];
1444
1445 } else {
1446 break;
1447 }
1448 }
1449
1450 picSizes[k + 1] = frmsize.discrete.width;
1451 picSizes[k + 2] = frmsize.discrete.height;
1452
1453 count += 4;
1454 }
1455 }
1456
1457 if (frmsize.index > 0)
1458 break;
1459 }
1460
1461 if (frmsize.index == 0)
1462 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1463
1464 return count;
1465
1466}
1467
1468int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1469{
1470 int ret=0; int framerate=0; int temp_rate=0;
1471 struct v4l2_frmivalenum fival;
1472 int i,j=0;
1473 int count = 0;
1474 int tmp_size = size;
1475 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1476 int pixelfmt_tbl[] = {
1477 V4L2_PIX_FMT_MJPEG,
1478 V4L2_PIX_FMT_YVU420,
1479 V4L2_PIX_FMT_NV21,
1480 V4L2_PIX_FMT_RGB24,
1481 V4L2_PIX_FMT_YUYV,
1482 // V4L2_PIX_FMT_YVU420
1483 };
1484
1485 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1486 {
1487 for( ; size > 0; size-=4)
1488 {
1489 memset(&fival, 0, sizeof(fival));
1490
1491 for (fival.index = 0;;fival.index++)
1492 {
1493 fival.pixel_format = pixelfmt_tbl[i];
1494 fival.width = picSizes[size-3];
1495 fival.height = picSizes[size-2];
1496 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1497 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1498 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1499 if(framerate < temp_rate)
1500 framerate = temp_rate;
1501 duration[count+0] = (int64_t)(picSizes[size-4]);
1502 duration[count+1] = (int64_t)(picSizes[size-3]);
1503 duration[count+2] = (int64_t)(picSizes[size-2]);
1504 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1505 j++;
1506 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1507 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1508 if(framerate < temp_rate)
1509 framerate = temp_rate;
1510 duration[count+0] = (int64_t)picSizes[size-4];
1511 duration[count+1] = (int64_t)picSizes[size-3];
1512 duration[count+2] = (int64_t)picSizes[size-2];
1513 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1514 j++;
1515 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1516 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1517 if(framerate < temp_rate)
1518 framerate = temp_rate;
1519 duration[count+0] = (int64_t)picSizes[size-4];
1520 duration[count+1] = (int64_t)picSizes[size-3];
1521 duration[count+2] = (int64_t)picSizes[size-2];
1522 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1523 j++;
1524 }
1525 } else {
1526 if (j > 0) {
1527 if (count > tmp_size)
1528 break;
1529 duration[count+0] = (int64_t)(picSizes[size-4]);
1530 duration[count+1] = (int64_t)(picSizes[size-3]);
1531 duration[count+2] = (int64_t)(picSizes[size-2]);
1532 if (framerate == 5) {
1533 duration[count+3] = (int64_t)200000000L;
1534 } else if (framerate == 10) {
1535 duration[count+3] = (int64_t)100000000L;
1536 } else if (framerate == 15) {
1537 duration[count+3] = (int64_t)66666666L;
1538 } else if (framerate == 30) {
1539 duration[count+3] = (int64_t)33333333L;
1540 } else {
1541 duration[count+3] = (int64_t)66666666L;
1542 }
1543 count += 4;
1544 break;
1545 } else {
1546 break;
1547 }
1548 }
1549 }
1550 j=0;
1551 }
1552 size = tmp_size;
1553 }
1554
1555 return count;
1556
1557}
1558
1559int64_t Sensor::getMinFrameDuration()
1560{
1561 int64_t tmpDuration = 66666666L; // 1/15 s
1562 int64_t frameDuration = 66666666L; // 1/15 s
1563 struct v4l2_frmivalenum fival;
1564 int i,j;
1565
1566 uint32_t pixelfmt_tbl[]={
1567 V4L2_PIX_FMT_MJPEG,
1568 V4L2_PIX_FMT_YUYV,
1569 V4L2_PIX_FMT_NV21,
1570 };
1571 struct v4l2_frmsize_discrete resolution_tbl[]={
1572 {1920, 1080},
1573 {1280, 960},
1574 {640, 480},
1575 {320, 240},
1576 };
1577
1578 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1579 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1580 memset(&fival, 0, sizeof(fival));
1581 fival.index = 0;
1582 fival.pixel_format = pixelfmt_tbl[i];
1583 fival.width = resolution_tbl[j].width;
1584 fival.height = resolution_tbl[j].height;
1585
1586 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1587 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1588 tmpDuration =
1589 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1590
1591 if (frameDuration > tmpDuration)
1592 frameDuration = tmpDuration;
1593 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1594 frameDuration =
1595 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1596 break;
1597 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1598 frameDuration =
1599 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1600 break;
1601 }
1602 fival.index++;
1603 }
1604 }
1605
1606 if (fival.index > 0) {
1607 break;
1608 }
1609 }
1610
1611 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1612 return frameDuration;
1613}
1614
1615int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1616 int res;
1617 int i;
1618 int count = 0;
1619 struct v4l2_frmsizeenum frmsize;
1620 char property[PROPERTY_VALUE_MAX];
1621 unsigned int support_w,support_h;
1622 int preview_fmt;
1623
1624 support_w = 10000;
1625 support_h = 10000;
1626 memset(property, 0, sizeof(property));
1627 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1628 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1629 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1630 support_w = 10000;
1631 support_h = 10000;
1632 }
1633 }
1634
1635
1636 memset(&frmsize,0,sizeof(frmsize));
1637 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1638
1639 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1640 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1641 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1642 if (preview == true)
1643 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1644 else
1645 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1646 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1647 if (preview == true)
1648 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1649 else
1650 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1651 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1652 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1653
1654 for (i = 0; ; i++) {
1655 frmsize.index = i;
1656 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1657 if (res < 0){
1658 DBG_LOGB("index=%d, break\n", i);
1659 break;
1660 }
1661
1662
1663 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1664
1665 if (0 != (frmsize.discrete.width%16))
1666 continue;
1667
1668 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1669 continue;
1670
1671 if (count >= size)
1672 break;
1673
1674 picSizes[count] = frmsize.discrete.width;
1675 picSizes[count+1] = frmsize.discrete.height;
1676
1677 if (0 == i) {
1678 count += 2;
1679 continue;
1680 }
1681
1682 //TODO insert in descend order
1683 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1684 picSizes[count + 0] = picSizes[count - 2];
1685 picSizes[count + 1] = picSizes[count - 1];
1686
1687 picSizes[count - 2] = frmsize.discrete.width;
1688 picSizes[count - 1] = frmsize.discrete.height;
1689 }
1690
1691 count += 2;
1692 }
1693 }
1694
1695 return count;
1696
1697}
1698
1699void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1700 float totalGain = gain/100.0 * kBaseGainFactor;
1701 float noiseVarGain = totalGain * totalGain;
1702 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1703 + kReadNoiseVarAfterGain;
1704
1705 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1706 mScene.setReadoutPixel(0,0);
1707 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1708 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1709 uint16_t *px = (uint16_t*)img + y * stride;
1710 for (unsigned int x = 0; x < kResolution[0]; x++) {
1711 uint32_t electronCount;
1712 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1713
1714 // TODO: Better pixel saturation curve?
1715 electronCount = (electronCount < kSaturationElectrons) ?
1716 electronCount : kSaturationElectrons;
1717
1718 // TODO: Better A/D saturation curve?
1719 uint16_t rawCount = electronCount * totalGain;
1720 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1721
1722 // Calculate noise value
1723 // TODO: Use more-correct Gaussian instead of uniform noise
1724 float photonNoiseVar = electronCount * noiseVarGain;
1725 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1726 // Scaled to roughly match gaussian/uniform noise stddev
1727 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1728
1729 rawCount += kBlackLevel;
1730 rawCount += noiseStddev * noiseSample;
1731
1732 *px++ = rawCount;
1733 }
1734 // TODO: Handle this better
1735 //simulatedTime += kRowReadoutTime;
1736 }
1737 ALOGVV("Raw sensor image captured");
1738}
1739
1740void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1741 float totalGain = gain/100.0 * kBaseGainFactor;
1742 // In fixed-point math, calculate total scaling from electrons to 8bpp
1743 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1744 uint32_t inc = kResolution[0] / stride;
1745
1746 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1747 uint8_t *px = img + outY * stride * 4;
1748 mScene.setReadoutPixel(0, y);
1749 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1750 uint32_t rCount, gCount, bCount;
1751 // TODO: Perfect demosaicing is a cheat
1752 const uint32_t *pixel = mScene.getPixelElectrons();
1753 rCount = pixel[Scene::R] * scale64x;
1754 gCount = pixel[Scene::Gr] * scale64x;
1755 bCount = pixel[Scene::B] * scale64x;
1756
1757 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1758 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1759 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1760 *px++ = 255;
1761 for (unsigned int j = 1; j < inc; j++)
1762 mScene.getPixelElectrons();
1763 }
1764 // TODO: Handle this better
1765 //simulatedTime += kRowReadoutTime;
1766 }
1767 ALOGVV("RGBA sensor image captured");
1768}
1769
1770void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1771#if 0
1772 float totalGain = gain/100.0 * kBaseGainFactor;
1773 // In fixed-point math, calculate total scaling from electrons to 8bpp
1774 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1775 uint32_t inc = kResolution[0] / stride;
1776
1777 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1778 mScene.setReadoutPixel(0, y);
1779 uint8_t *px = img + outY * stride * 3;
1780 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1781 uint32_t rCount, gCount, bCount;
1782 // TODO: Perfect demosaicing is a cheat
1783 const uint32_t *pixel = mScene.getPixelElectrons();
1784 rCount = pixel[Scene::R] * scale64x;
1785 gCount = pixel[Scene::Gr] * scale64x;
1786 bCount = pixel[Scene::B] * scale64x;
1787
1788 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1789 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1790 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1791 for (unsigned int j = 1; j < inc; j++)
1792 mScene.getPixelElectrons();
1793 }
1794 // TODO: Handle this better
1795 //simulatedTime += kRowReadoutTime;
1796 }
1797#else
1798 uint8_t *src = NULL;
1799 int ret = 0, rotate = 0;
1800 uint32_t width = 0, height = 0;
1801
1802 rotate = getPictureRotate();
1803 width = vinfo->picture.format.fmt.pix.width;
1804 height = vinfo->picture.format.fmt.pix.height;
1805
1806 if (mSensorType == SENSOR_USB) {
1807 releasebuf_and_stop_capturing(vinfo);
1808 } else {
1809 stop_capturing(vinfo);
1810 }
1811
1812 ret = start_picture(vinfo,rotate);
1813 if (ret < 0)
1814 {
1815 ALOGD("start picture failed!");
1816 }
1817 while(1)
1818 {
1819 src = (uint8_t *)get_picture(vinfo);
1820 if (NULL != src) {
1821 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
1822 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1823 if ( tmp_buffer == NULL) {
1824 ALOGE("new buffer failed!\n");
1825 return;
1826 }
1827 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
1828 width, tmp_buffer + width * height, (width + 1) / 2, width,
1829 height, width, height, libyuv::FOURCC_MJPG) != 0) {
1830 DBG_LOGA("Decode MJPEG frame failed\n");
1831 putback_picture_frame(vinfo);
1832 usleep(5000);
1833 } else {
1834 nv21_to_rgb24(tmp_buffer,img,width,height);
1835 if (tmp_buffer != NULL)
1836 delete [] tmp_buffer;
1837 break;
1838 }
1839 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1840 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
1841 yuyv422_to_rgb24(src,img,width,height);
1842 break;
1843 } else {
1844 putback_picture_frame(vinfo);
1845 usleep(5000);
1846 }
1847 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
1848 if (vinfo->picture.buf.length == width * height * 3) {
1849 memcpy(img, src, vinfo->picture.buf.length);
1850 } else {
1851 rgb24_memcpy(img, src, width, height);
1852 }
1853 break;
1854 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
1855 memcpy(img, src, vinfo->picture.buf.length);
1856 break;
1857 }
1858 }
1859 }
1860 ALOGD("get picture success !");
1861
1862 if (mSensorType == SENSOR_USB) {
1863 releasebuf_and_stop_picture(vinfo);
1864 } else {
1865 stop_picture(vinfo);
1866 }
1867
1868#endif
1869}
1870
1871void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
1872{
1873 for (int i = 0; i < width * height * 2; i += 2) {
1874 *dst++ = *(src + i);
1875 }
1876
1877 for (int y = 0; y < height - 1; y +=2) {
1878 for (int j = 0; j < width * 2; j += 4) {
1879 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
1880 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
1881 }
1882 src += width * 2 * 2;
1883 }
1884
1885 if (height & 1)
1886 for (int j = 0; j < width * 2; j += 4) {
1887 *dst++ = *(src + 3 + j); //v
1888 *dst++ = *(src + 1 + j); //u
1889 }
1890}
1891
1892void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
1893{
1894 //width should be an even number.
1895 //uv ALIGN 32.
1896 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
1897 unsigned char *dst_copy,*src_copy;
1898
1899 dst_copy = dst;
1900 src_copy = src;
1901
1902 y_size = width*height;
1903 c_stride = ALIGN(width/2, 16);
1904 c_size = c_stride * height/2;
1905 cr_offset = y_size;
1906 cb_offset = y_size+c_size;
1907
1908 for(i=0;i< y_size;i++){
1909 *dst++ = *src;
1910 src += 2;
1911 }
1912
1913 dst = dst_copy;
1914 src = src_copy;
1915
1916 for(i=0;i<height;i+=2){
1917 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
1918 //ceil(u1+u2)/2
1919 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
1920 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
1921 }
1922 dst += c_stride;
1923 src += width*4;
1924 }
1925}
1926
1927
1928void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
1929#if 0
1930 float totalGain = gain/100.0 * kBaseGainFactor;
1931 // Using fixed-point math with 6 bits of fractional precision.
1932 // In fixed-point math, calculate total scaling from electrons to 8bpp
1933 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1934 // In fixed-point math, saturation point of sensor after gain
1935 const int saturationPoint = 64 * 255;
1936 // Fixed-point coefficients for RGB-YUV transform
1937 // Based on JFIF RGB->YUV transform.
1938 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
1939 const int rgbToY[] = {19, 37, 7};
1940 const int rgbToCb[] = {-10,-21, 32, 524288};
1941 const int rgbToCr[] = {32,-26, -5, 524288};
1942 // Scale back to 8bpp non-fixed-point
1943 const int scaleOut = 64;
1944 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
1945
1946 uint32_t inc = kResolution[0] / stride;
1947 uint32_t outH = kResolution[1] / inc;
1948 for (unsigned int y = 0, outY = 0;
1949 y < kResolution[1]; y+=inc, outY++) {
1950 uint8_t *pxY = img + outY * stride;
1951 uint8_t *pxVU = img + (outH + outY / 2) * stride;
1952 mScene.setReadoutPixel(0,y);
1953 for (unsigned int outX = 0; outX < stride; outX++) {
1954 int32_t rCount, gCount, bCount;
1955 // TODO: Perfect demosaicing is a cheat
1956 const uint32_t *pixel = mScene.getPixelElectrons();
1957 rCount = pixel[Scene::R] * scale64x;
1958 rCount = rCount < saturationPoint ? rCount : saturationPoint;
1959 gCount = pixel[Scene::Gr] * scale64x;
1960 gCount = gCount < saturationPoint ? gCount : saturationPoint;
1961 bCount = pixel[Scene::B] * scale64x;
1962 bCount = bCount < saturationPoint ? bCount : saturationPoint;
1963
1964 *pxY++ = (rgbToY[0] * rCount +
1965 rgbToY[1] * gCount +
1966 rgbToY[2] * bCount) / scaleOutSq;
1967 if (outY % 2 == 0 && outX % 2 == 0) {
1968 *pxVU++ = (rgbToCr[0] * rCount +
1969 rgbToCr[1] * gCount +
1970 rgbToCr[2] * bCount +
1971 rgbToCr[3]) / scaleOutSq;
1972 *pxVU++ = (rgbToCb[0] * rCount +
1973 rgbToCb[1] * gCount +
1974 rgbToCb[2] * bCount +
1975 rgbToCb[3]) / scaleOutSq;
1976 }
1977 for (unsigned int j = 1; j < inc; j++)
1978 mScene.getPixelElectrons();
1979 }
1980 }
1981#else
1982 uint8_t *src;
1983
1984 if (mKernelBuffer) {
1985 src = mKernelBuffer;
1986 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
1987 uint32_t width = vinfo->preview.format.fmt.pix.width;
1988 uint32_t height = vinfo->preview.format.fmt.pix.height;
1989 if ((width == b.width) && (height == b.height)) {
1990 memcpy(b.img, src, b.width * b.height * 3/2);
1991 } else {
1992 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
1993 }
1994 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1995 uint32_t width = vinfo->preview.format.fmt.pix.width;
1996 uint32_t height = vinfo->preview.format.fmt.pix.height;
1997
1998 if ((width == b.width) && (height == b.height)) {
1999 memcpy(b.img, src, b.width * b.height * 3/2);
2000 } else {
2001 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2002 }
2003 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2004 uint32_t width = vinfo->preview.format.fmt.pix.width;
2005 uint32_t height = vinfo->preview.format.fmt.pix.height;
2006
2007 if ((width == b.width) && (height == b.height)) {
2008 memcpy(b.img, src, b.width * b.height * 3/2);
2009 } else {
2010 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2011 }
2012 } else {
2013 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2014 }
2015 return ;
2016 }
2017 while(1){
2018 if (get_device_status(vinfo)) {
2019 break;
2020 }
2021 src = (uint8_t *)get_frame(vinfo);
2022 if (NULL == src) {
2023 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2024 usleep(5000);
2025 continue;
2026 }
2027 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2028 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2029 memcpy(b.img, src, vinfo->preview.buf.length);
2030 } else {
2031 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2032 }
2033 mKernelBuffer = b.img;
2034 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2035 int width = vinfo->preview.format.fmt.pix.width;
2036 int height = vinfo->preview.format.fmt.pix.height;
2037 YUYVToNV21(src, b.img, width, height);
2038 mKernelBuffer = b.img;
2039 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2040 int width = vinfo->preview.format.fmt.pix.width;
2041 int height = vinfo->preview.format.fmt.pix.height;
2042 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, b.img,
2043 width, b.img + width * height, (width + 1) / 2, width,
2044 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2045 putback_frame(vinfo);
2046 DBG_LOGA("Decode MJPEG frame failed\n");
2047 continue;
2048 }
2049 mKernelBuffer = b.img;
2050 }
2051
2052 break;
2053 }
2054#endif
2055
2056 ALOGVV("NV21 sensor image captured");
2057}
2058
2059void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2060#if 0
2061 float totalGain = gain/100.0 * kBaseGainFactor;
2062 // Using fixed-point math with 6 bits of fractional precision.
2063 // In fixed-point math, calculate total scaling from electrons to 8bpp
2064 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2065 // In fixed-point math, saturation point of sensor after gain
2066 const int saturationPoint = 64 * 255;
2067 // Fixed-point coefficients for RGB-YUV transform
2068 // Based on JFIF RGB->YUV transform.
2069 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2070 const int rgbToY[] = {19, 37, 7};
2071 const int rgbToCb[] = {-10,-21, 32, 524288};
2072 const int rgbToCr[] = {32,-26, -5, 524288};
2073 // Scale back to 8bpp non-fixed-point
2074 const int scaleOut = 64;
2075 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2076
2077 uint32_t inc = kResolution[0] / stride;
2078 uint32_t outH = kResolution[1] / inc;
2079 for (unsigned int y = 0, outY = 0;
2080 y < kResolution[1]; y+=inc, outY++) {
2081 uint8_t *pxY = img + outY * stride;
2082 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2083 mScene.setReadoutPixel(0,y);
2084 for (unsigned int outX = 0; outX < stride; outX++) {
2085 int32_t rCount, gCount, bCount;
2086 // TODO: Perfect demosaicing is a cheat
2087 const uint32_t *pixel = mScene.getPixelElectrons();
2088 rCount = pixel[Scene::R] * scale64x;
2089 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2090 gCount = pixel[Scene::Gr] * scale64x;
2091 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2092 bCount = pixel[Scene::B] * scale64x;
2093 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2094
2095 *pxY++ = (rgbToY[0] * rCount +
2096 rgbToY[1] * gCount +
2097 rgbToY[2] * bCount) / scaleOutSq;
2098 if (outY % 2 == 0 && outX % 2 == 0) {
2099 *pxVU++ = (rgbToCr[0] * rCount +
2100 rgbToCr[1] * gCount +
2101 rgbToCr[2] * bCount +
2102 rgbToCr[3]) / scaleOutSq;
2103 *pxVU++ = (rgbToCb[0] * rCount +
2104 rgbToCb[1] * gCount +
2105 rgbToCb[2] * bCount +
2106 rgbToCb[3]) / scaleOutSq;
2107 }
2108 for (unsigned int j = 1; j < inc; j++)
2109 mScene.getPixelElectrons();
2110 }
2111 }
2112#else
2113 uint8_t *src;
2114 if (mKernelBuffer) {
2115 src = mKernelBuffer;
2116 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2117 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2118 ALOGI("Sclale YV12 frame down \n");
2119
2120 int width = vinfo->preview.format.fmt.pix.width;
2121 int height = vinfo->preview.format.fmt.pix.height;
2122 int ret = libyuv::I420Scale(src, width,
2123 src + width * height, width / 2,
2124 src + width * height + width * height / 4, width / 2,
2125 width, height,
2126 b.img, b.width,
2127 b.img + b.width * b.height, b.width / 2,
2128 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2129 b.width, b.height,
2130 libyuv::kFilterNone);
2131 if (ret < 0)
2132 ALOGE("Sclale YV12 frame down failed!\n");
2133 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2134 int width = vinfo->preview.format.fmt.pix.width;
2135 int height = vinfo->preview.format.fmt.pix.height;
2136 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2137
2138 if ( tmp_buffer == NULL) {
2139 ALOGE("new buffer failed!\n");
2140 return;
2141 }
2142
2143 YUYVToYV12(src, tmp_buffer, width, height);
2144
2145 int ret = libyuv::I420Scale(tmp_buffer, width,
2146 tmp_buffer + width * height, width / 2,
2147 tmp_buffer + width * height + width * height / 4, width / 2,
2148 width, height,
2149 b.img, b.width,
2150 b.img + b.width * b.height, b.width / 2,
2151 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2152 b.width, b.height,
2153 libyuv::kFilterNone);
2154 if (ret < 0)
2155 ALOGE("Sclale YV12 frame down failed!\n");
2156 delete [] tmp_buffer;
2157 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2158 int width = vinfo->preview.format.fmt.pix.width;
2159 int height = vinfo->preview.format.fmt.pix.height;
2160 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2161
2162 if ( tmp_buffer == NULL) {
2163 ALOGE("new buffer failed!\n");
2164 return;
2165 }
2166
2167 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2168 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2169 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2170 DBG_LOGA("Decode MJPEG frame failed\n");
2171 }
2172
2173 int ret = libyuv::I420Scale(tmp_buffer, width,
2174 tmp_buffer + width * height, width / 2,
2175 tmp_buffer + width * height + width * height / 4, width / 2,
2176 width, height,
2177 b.img, b.width,
2178 b.img + b.width * b.height, b.width / 2,
2179 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2180 b.width, b.height,
2181 libyuv::kFilterNone);
2182 if (ret < 0)
2183 ALOGE("Sclale YV12 frame down failed!\n");
2184
2185 delete [] tmp_buffer;
2186 } else {
2187 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2188 }
2189 return ;
2190 }
2191 while(1){
2192 src = (uint8_t *)get_frame(vinfo);
2193
2194 if (NULL == src) {
2195 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2196 usleep(5000);
2197 continue;
2198 }
2199 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2200 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2201 memcpy(b.img, src, vinfo->preview.buf.length);
2202 } else {
2203 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2204 }
2205 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2206 int width = vinfo->preview.format.fmt.pix.width;
2207 int height = vinfo->preview.format.fmt.pix.height;
2208 YUYVToYV12(src, b.img, width, height);
2209 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2210 int width = vinfo->preview.format.fmt.pix.width;
2211 int height = vinfo->preview.format.fmt.pix.height;
2212 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2213 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2214 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2215 putback_frame(vinfo);
2216 DBG_LOGA("Decode MJPEG frame failed\n");
2217 continue;
2218 }
2219 } else {
2220 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2221 }
2222
2223 break;
2224 }
2225#endif
2226 mKernelBuffer = src;
2227 ALOGVV("YV12 sensor image captured");
2228}
2229
2230void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2231#if 0
2232 float totalGain = gain/100.0 * kBaseGainFactor;
2233 // Using fixed-point math with 6 bits of fractional precision.
2234 // In fixed-point math, calculate total scaling from electrons to 8bpp
2235 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2236 // In fixed-point math, saturation point of sensor after gain
2237 const int saturationPoint = 64 * 255;
2238 // Fixed-point coefficients for RGB-YUV transform
2239 // Based on JFIF RGB->YUV transform.
2240 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2241 const int rgbToY[] = {19, 37, 7};
2242 const int rgbToCb[] = {-10,-21, 32, 524288};
2243 const int rgbToCr[] = {32,-26, -5, 524288};
2244 // Scale back to 8bpp non-fixed-point
2245 const int scaleOut = 64;
2246 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2247
2248 uint32_t inc = kResolution[0] / stride;
2249 uint32_t outH = kResolution[1] / inc;
2250 for (unsigned int y = 0, outY = 0;
2251 y < kResolution[1]; y+=inc, outY++) {
2252 uint8_t *pxY = img + outY * stride;
2253 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2254 mScene.setReadoutPixel(0,y);
2255 for (unsigned int outX = 0; outX < stride; outX++) {
2256 int32_t rCount, gCount, bCount;
2257 // TODO: Perfect demosaicing is a cheat
2258 const uint32_t *pixel = mScene.getPixelElectrons();
2259 rCount = pixel[Scene::R] * scale64x;
2260 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2261 gCount = pixel[Scene::Gr] * scale64x;
2262 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2263 bCount = pixel[Scene::B] * scale64x;
2264 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2265
2266 *pxY++ = (rgbToY[0] * rCount +
2267 rgbToY[1] * gCount +
2268 rgbToY[2] * bCount) / scaleOutSq;
2269 if (outY % 2 == 0 && outX % 2 == 0) {
2270 *pxVU++ = (rgbToCr[0] * rCount +
2271 rgbToCr[1] * gCount +
2272 rgbToCr[2] * bCount +
2273 rgbToCr[3]) / scaleOutSq;
2274 *pxVU++ = (rgbToCb[0] * rCount +
2275 rgbToCb[1] * gCount +
2276 rgbToCb[2] * bCount +
2277 rgbToCb[3]) / scaleOutSq;
2278 }
2279 for (unsigned int j = 1; j < inc; j++)
2280 mScene.getPixelElectrons();
2281 }
2282 }
2283#else
2284 uint8_t *src;
2285 if (mKernelBuffer) {
2286 src = mKernelBuffer;
2287 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2288 //TODO YUYV scale
2289 //memcpy(img, src, vinfo->preview.buf.length);
2290
2291 } else
2292 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2293
2294 return ;
2295 }
2296
2297 while(1) {
2298 src = (uint8_t *)get_frame(vinfo);
2299 if (NULL == src) {
2300 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2301 usleep(5000);
2302 continue;
2303 }
2304 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2305 memcpy(img, src, vinfo->preview.buf.length);
2306 } else {
2307 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2308 }
2309
2310 break;
2311 }
2312#endif
2313 mKernelBuffer = src;
2314 ALOGVV("YUYV sensor image captured");
2315}
2316
2317void Sensor::dump(int fd) {
2318 String8 result;
2319 result = String8::format("%s, sensor preview information: \n", __FILE__);
2320 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2321 result.appendFormat("camera preview width: %d , height =%d\n",
2322 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2323
2324 result.appendFormat("camera preview format: %.4s\n\n",
2325 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2326
2327 write(fd, result.string(), result.size());
2328}
2329
2330} // namespace android
2331
2332