summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: 5172d0dec01ffa58f3c58fc7ed8bf6ff451f8275
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
45
46namespace android {
47
48const unsigned int Sensor::kResolution[2] = {1600, 1200};
49
50const nsecs_t Sensor::kExposureTimeRange[2] =
51 {1000L, 30000000000L} ; // 1 us - 30 sec
52const nsecs_t Sensor::kFrameDurationRange[2] =
53 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
54const nsecs_t Sensor::kMinVerticalBlank = 10000L;
55
56const uint8_t Sensor::kColorFilterArrangement =
57 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
58
59// Output image data characteristics
60const uint32_t Sensor::kMaxRawValue = 4000;
61const uint32_t Sensor::kBlackLevel = 1000;
62
63// Sensor sensitivity
64const float Sensor::kSaturationVoltage = 0.520f;
65const uint32_t Sensor::kSaturationElectrons = 2000;
66const float Sensor::kVoltsPerLuxSecond = 0.100f;
67
68const float Sensor::kElectronsPerLuxSecond =
69 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
70 * Sensor::kVoltsPerLuxSecond;
71
72const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
73 Sensor::kSaturationElectrons;
74
75const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
76const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
77const float Sensor::kReadNoiseVarBeforeGain =
78 Sensor::kReadNoiseStddevBeforeGain *
79 Sensor::kReadNoiseStddevBeforeGain;
80const float Sensor::kReadNoiseVarAfterGain =
81 Sensor::kReadNoiseStddevAfterGain *
82 Sensor::kReadNoiseStddevAfterGain;
83
84// While each row has to read out, reset, and then expose, the (reset +
85// expose) sequence can be overlapped by other row readouts, so the final
86// minimum frame duration is purely a function of row readout time, at least
87// if there's a reasonable number of rows.
88const nsecs_t Sensor::kRowReadoutTime =
89 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
90
91const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
92const uint32_t Sensor::kDefaultSensitivity = 100;
93
94/** A few utility functions for math, normal distributions */
95
96// Take advantage of IEEE floating-point format to calculate an approximate
97// square root. Accurate to within +-3.6%
98float sqrtf_approx(float r) {
99 // Modifier is based on IEEE floating-point representation; the
100 // manipulations boil down to finding approximate log2, dividing by two, and
101 // then inverting the log2. A bias is added to make the relative error
102 // symmetric about the real answer.
103 const int32_t modifier = 0x1FBB4000;
104
105 int32_t r_i = *(int32_t*)(&r);
106 r_i = (r_i >> 1) + modifier;
107
108 return *(float*)(&r_i);
109}
110
111void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
112{
113 int stride = (width + 31) & ( ~31);
114 int w, h;
115 for (h=0; h<height; h++)
116 {
117 memcpy( dst, src, width*3);
118 dst += width*3;
119 src += stride*3;
120 }
121}
122
123static int ALIGN(int x, int y) {
124 // y must be a power of 2.
125 return (x + y - 1) & ~(y - 1);
126}
127
128Sensor::Sensor():
129 Thread(false),
130 mGotVSync(false),
131 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
132 mFrameDuration(kFrameDurationRange[0]),
133 mGainFactor(kDefaultSensitivity),
134 mNextBuffers(NULL),
135 mFrameNumber(0),
136 mCapturedBuffers(NULL),
137 mListener(NULL),
138 mIoctlSupport(0),
139 msupportrotate(0),
140 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
141{
142
143}
144
145Sensor::~Sensor() {
146 shutDown();
147}
148
149status_t Sensor::startUp(int idx) {
150 ALOGV("%s: E", __FUNCTION__);
151 DBG_LOGA("ddd");
152
153 int res;
154 mCapturedBuffers = NULL;
155 res = run("EmulatedFakeCamera2::Sensor",
156 ANDROID_PRIORITY_URGENT_DISPLAY);
157
158 if (res != OK) {
159 ALOGE("Unable to start up sensor capture thread: %d", res);
160 }
161
162 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
163 vinfo->idx = idx;
164
165 res = camera_open(vinfo);
166 if (res < 0) {
167 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
168 }
169
170 mSensorType = SENSOR_MMAP;
171 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
172 mSensorType = SENSOR_USB;
173 }
174
175 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
176 mSensorType = SENSOR_SHARE_FD;
177 }
178
179 if (strstr((const char *)vinfo->cap.card, "front"))
180 mSensorFace = SENSOR_FACE_FRONT;
181 else if (strstr((const char *)vinfo->cap.card, "back"))
182 mSensorFace = SENSOR_FACE_BACK;
183 else
184 mSensorFace = SENSOR_FACE_NONE;
185
186 return res;
187}
188
189sensor_type_e Sensor::getSensorType(void)
190{
191 return mSensorType;
192}
193status_t Sensor::IoctlStateProbe(void) {
194 struct v4l2_queryctrl qc;
195 int ret = 0;
196 mIoctlSupport = 0;
197 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
198 qc.id = V4L2_ROTATE_ID;
199 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
200 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
201 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
202 }else{
203 mIoctlSupport |= IOCTL_MASK_ROTATE;
204 }
205
206 if(mIoctlSupport & IOCTL_MASK_ROTATE){
207 msupportrotate = true;
208 DBG_LOGA("camera support capture rotate");
209 }
210 return mIoctlSupport;
211}
212
213uint32_t Sensor::getStreamUsage(int stream_type)
214{
215 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
216
217 switch (stream_type) {
218 case CAMERA3_STREAM_OUTPUT:
219 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
220 break;
221 case CAMERA3_STREAM_INPUT:
222 usage = GRALLOC_USAGE_HW_CAMERA_READ;
223 break;
224 case CAMERA3_STREAM_BIDIRECTIONAL:
225 usage = GRALLOC_USAGE_HW_CAMERA_READ |
226 GRALLOC_USAGE_HW_CAMERA_WRITE;
227 break;
228 }
229 if ((mSensorType == SENSOR_MMAP)
230 || (mSensorType == SENSOR_USB)) {
231 usage = (GRALLOC_USAGE_HW_TEXTURE
232 | GRALLOC_USAGE_HW_RENDER
233 | GRALLOC_USAGE_SW_READ_MASK
234 | GRALLOC_USAGE_SW_WRITE_MASK
235 );
236 }
237
238 return usage;
239}
240
241status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
242{
243 int res;
244
245 mFramecount = 0;
246 mCurFps = 0;
247 gettimeofday(&mTimeStart, NULL);
248
249 if (isjpeg) {
250 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
251 vinfo->picture.format.fmt.pix.width = width;
252 vinfo->picture.format.fmt.pix.height = height;
253 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
254 } else {
255 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
256 vinfo->preview.format.fmt.pix.width = width;
257 vinfo->preview.format.fmt.pix.height = height;
258 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
259
260 res = setBuffersFormat(vinfo);
261 if (res < 0) {
262 ALOGE("set buffer failed\n");
263 return res;
264 }
265 }
266
267 return OK;
268
269}
270
271status_t Sensor::streamOn() {
272
273 return start_capturing(vinfo);
274}
275
276bool Sensor::isStreaming() {
277
278 return vinfo->isStreaming;
279}
280
281bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
282{
283 if ((vinfo->preview.format.fmt.pix.width != width)
284 ||(vinfo->preview.format.fmt.pix.height != height)
285 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
286 ) {
287
288 return true;
289
290 }
291
292 return false;
293}
294status_t Sensor::streamOff() {
295 if (mSensorType == SENSOR_USB) {
296 return releasebuf_and_stop_capturing(vinfo);
297 } else {
298 return stop_capturing(vinfo);
299 }
300}
301
302int Sensor::getOutputFormat()
303{
304 struct v4l2_fmtdesc fmt;
305 int ret;
306 memset(&fmt,0,sizeof(fmt));
307 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
308
309 fmt.index = 0;
310 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
311 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
312 return V4L2_PIX_FMT_MJPEG;
313 fmt.index++;
314 }
315
316 fmt.index = 0;
317 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
318 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
319 return V4L2_PIX_FMT_NV21;
320 fmt.index++;
321 }
322
323 fmt.index = 0;
324 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
325 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
326 return V4L2_PIX_FMT_YUYV;
327 fmt.index++;
328 }
329
330 ALOGE("Unable to find a supported sensor format!");
331 return BAD_VALUE;
332}
333
334/* if sensor supports MJPEG, return it first, otherwise
335 * trasform HAL format to v4l2 format then check whether
336 * it is supported.
337 */
338int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
339{
340 struct v4l2_fmtdesc fmt;
341 int ret;
342 memset(&fmt,0,sizeof(fmt));
343 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
344
345 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
346 pixelfmt = V4L2_PIX_FMT_YVU420;
347 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
348 pixelfmt = V4L2_PIX_FMT_NV21;
349 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
350 pixelfmt = V4L2_PIX_FMT_YUYV;
351 } else {
352 pixelfmt = V4L2_PIX_FMT_NV21;
353 }
354
355 fmt.index = 0;
356 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
357 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
358 return V4L2_PIX_FMT_MJPEG;
359 fmt.index++;
360 }
361
362 fmt.index = 0;
363 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
364 if (fmt.pixelformat == pixelfmt)
365 return pixelfmt;
366 fmt.index++;
367 }
368
369 ALOGE("Unable to find a supported sensor format!");
370 return BAD_VALUE;
371}
372
373void Sensor::setPictureRotate(int rotate)
374{
375 mRotateValue = rotate;
376}
377int Sensor::getPictureRotate()
378{
379 return mRotateValue;
380}
381status_t Sensor::shutDown() {
382 ALOGV("%s: E", __FUNCTION__);
383
384 int res;
385 res = requestExitAndWait();
386 if (res != OK) {
387 ALOGE("Unable to shut down sensor capture thread: %d", res);
388 }
389
390 if (vinfo != NULL) {
391 if (mSensorType == SENSOR_USB) {
392 releasebuf_and_stop_capturing(vinfo);
393 } else {
394 stop_capturing(vinfo);
395 }
396 }
397
398 camera_close(vinfo);
399
400 if (vinfo){
401 free(vinfo);
402 vinfo = NULL;
403 }
404 ALOGD("%s: Exit", __FUNCTION__);
405 return res;
406}
407
408Scene &Sensor::getScene() {
409 return mScene;
410}
411
412int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
413{
414 int ret = 0;
415 struct v4l2_queryctrl qc;
416
417 memset(&qc, 0, sizeof(qc));
418 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
419 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
420
421 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
422 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
423 ret = -1;
424 *zoomMin = 0;
425 *zoomMax = 0;
426 *zoomStep = 1;
427 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
428 } else {
429 *zoomMin = qc.minimum;
430 *zoomMax = qc.maximum;
431 *zoomStep = qc.step;
432 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
433 }
434
435 return ret ;
436}
437
438int Sensor::setZoom(int zoomValue)
439{
440 int ret = 0;
441 struct v4l2_control ctl;
442
443 memset( &ctl, 0, sizeof(ctl));
444 ctl.value = zoomValue;
445 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
446 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
447 if (ret < 0) {
448 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
449 }
450 return ret ;
451}
452
453status_t Sensor::setEffect(uint8_t effect)
454{
455 int ret = 0;
456 struct v4l2_control ctl;
457 ctl.id = V4L2_CID_COLORFX;
458
459 switch (effect) {
460 case ANDROID_CONTROL_EFFECT_MODE_OFF:
461 ctl.value= CAM_EFFECT_ENC_NORMAL;
462 break;
463 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
464 ctl.value= CAM_EFFECT_ENC_COLORINV;
465 break;
466 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
467 ctl.value= CAM_EFFECT_ENC_SEPIA;
468 break;
469 default:
470 ALOGE("%s: Doesn't support effect mode %d",
471 __FUNCTION__, effect);
472 return BAD_VALUE;
473 }
474
475 DBG_LOGB("set effect mode:%d", effect);
476 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
477 if (ret < 0) {
478 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
479 }
480 return ret ;
481}
482
483#define MAX_LEVEL_FOR_EXPOSURE 16
484#define MIN_LEVEL_FOR_EXPOSURE 3
485
486int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
487{
488 struct v4l2_queryctrl qc;
489 int ret=0;
490 int level = 0;
491 int middle = 0;
492
493 memset( &qc, 0, sizeof(qc));
494
495 DBG_LOGA("getExposure\n");
496 qc.id = V4L2_CID_EXPOSURE;
497 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
498 if(ret < 0) {
499 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
500 *minExp = -4;
501 *maxExp = 4;
502 *def = 0;
503 step->numerator = 1;
504 step->denominator = 1;
505 return ret;
506 }
507
508 if(0 < qc.step)
509 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
510
511 if((level > MAX_LEVEL_FOR_EXPOSURE)
512 || (level < MIN_LEVEL_FOR_EXPOSURE)){
513 *minExp = -4;
514 *maxExp = 4;
515 *def = 0;
516 step->numerator = 1;
517 step->denominator = 1;
518 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
519 *minExp, *maxExp, *def);
520 return true;
521 }
522
523 middle = (qc.minimum+qc.maximum)/2;
524 *minExp = qc.minimum - middle;
525 *maxExp = qc.maximum - middle;
526 *def = qc.default_value - middle;
527 step->numerator = 1;
528 step->denominator = 2;//qc.step;
529 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
530 return ret;
531}
532
533status_t Sensor::setExposure(int expCmp)
534{
535 int ret = 0;
536 struct v4l2_control ctl;
537 struct v4l2_queryctrl qc;
538
539 if(mEV == expCmp){
540 return 0;
541 }else{
542 mEV = expCmp;
543 }
544 memset(&ctl, 0, sizeof(ctl));
545 memset(&qc, 0, sizeof(qc));
546
547 qc.id = V4L2_CID_EXPOSURE;
548
549 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
550 if (ret < 0) {
551 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
552 }
553
554 ctl.id = V4L2_CID_EXPOSURE;
555 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
556
557 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
558 if (ret < 0) {
559 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
560 }
561 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
562 return ret ;
563}
564
565int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
566{
567 struct v4l2_queryctrl qc;
568 struct v4l2_querymenu qm;
569 int ret;
570 int mode_count = -1;
571
572 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
573 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
574 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
575 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
576 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
577 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
578 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
579 } else {
580 memset(&qm, 0, sizeof(qm));
581
582 int index = 0;
583 mode_count = 1;
584 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
585
586 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
587 if (mode_count >= maxCont)
588 break;
589
590 memset(&qm, 0, sizeof(struct v4l2_querymenu));
591 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
592 qm.index = index;
593 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
594 continue;
595 } else {
596 if (strcmp((char*)qm.name,"50hz") == 0) {
597 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
598 mode_count++;
599 } else if (strcmp((char*)qm.name,"60hz") == 0) {
600 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
601 mode_count++;
602 } else if (strcmp((char*)qm.name,"auto") == 0) {
603 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
604 mode_count++;
605 }
606
607 }
608 }
609 }
610
611 return mode_count;
612}
613
614status_t Sensor::setAntiBanding(uint8_t antiBanding)
615{
616 int ret = 0;
617 struct v4l2_control ctl;
618 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
619
620 switch (antiBanding) {
621 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
622 ctl.value= CAM_ANTIBANDING_OFF;
623 break;
624 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
625 ctl.value= CAM_ANTIBANDING_50HZ;
626 break;
627 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
628 ctl.value= CAM_ANTIBANDING_60HZ;
629 break;
630 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
631 ctl.value= CAM_ANTIBANDING_AUTO;
632 break;
633 default:
634 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
635 __FUNCTION__, antiBanding);
636 return BAD_VALUE;
637 }
638
639 DBG_LOGB("anti banding mode:%d", antiBanding);
640 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
641 if ( ret < 0) {
642 CAMHAL_LOGDA("failed to set anti banding mode!\n");
643 return BAD_VALUE;
644 }
645 return ret;
646}
647
648status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
649{
650 int ret = 0;
651 struct v4l2_control ctl;
652 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
653 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
654 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
655
656 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
657 return ret;
658}
659
660
661int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
662{
663 struct v4l2_queryctrl qc;
664 struct v4l2_querymenu qm;
665 int ret;
666 int mode_count = -1;
667
668 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
669 qc.id = V4L2_CID_FOCUS_AUTO;
670 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
671 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
672 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
673 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
674 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
675 }else{
676 memset(&qm, 0, sizeof(qm));
677
678 int index = 0;
679 mode_count = 1;
680 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
681
682 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
683 if (mode_count >= maxCount)
684 break;
685
686 memset(&qm, 0, sizeof(struct v4l2_querymenu));
687 qm.id = V4L2_CID_FOCUS_AUTO;
688 qm.index = index;
689 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
690 continue;
691 } else {
692 if (strcmp((char*)qm.name,"auto") == 0) {
693 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
694 mode_count++;
695 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
696 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
697 mode_count++;
698 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
699 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
700 mode_count++;
701 }
702
703 }
704 }
705 }
706
707 return mode_count;
708}
709
710status_t Sensor::setAutoFocuas(uint8_t afMode)
711{
712 struct v4l2_control ctl;
713 ctl.id = V4L2_CID_FOCUS_AUTO;
714
715 switch (afMode) {
716 case ANDROID_CONTROL_AF_MODE_AUTO:
717 ctl.value = CAM_FOCUS_MODE_AUTO;
718 break;
719 case ANDROID_CONTROL_AF_MODE_MACRO:
720 ctl.value = CAM_FOCUS_MODE_MACRO;
721 break;
722 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
723 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
724 break;
725 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
726 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
727 break;
728 default:
729 ALOGE("%s: Emulator doesn't support AF mode %d",
730 __FUNCTION__, afMode);
731 return BAD_VALUE;
732 }
733
734 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
735 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
736 return BAD_VALUE;
737 }
738
739 return OK;
740}
741
742int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
743{
744 struct v4l2_queryctrl qc;
745 struct v4l2_querymenu qm;
746 int ret;
747 int mode_count = -1;
748
749 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
750 qc.id = V4L2_CID_DO_WHITE_BALANCE;
751 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
752 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
753 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
754 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
755 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
756 }else{
757 memset(&qm, 0, sizeof(qm));
758
759 int index = 0;
760 mode_count = 1;
761 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
762
763 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
764 if (mode_count >= maxCount)
765 break;
766
767 memset(&qm, 0, sizeof(struct v4l2_querymenu));
768 qm.id = V4L2_CID_DO_WHITE_BALANCE;
769 qm.index = index;
770 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
771 continue;
772 } else {
773 if (strcmp((char*)qm.name,"auto") == 0) {
774 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
775 mode_count++;
776 } else if (strcmp((char*)qm.name,"daylight") == 0) {
777 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
778 mode_count++;
779 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
780 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
781 mode_count++;
782 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
783 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
784 mode_count++;
785 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
786 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
787 mode_count++;
788 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
789 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
790 mode_count++;
791 } else if (strcmp((char*)qm.name,"twilight") == 0) {
792 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
793 mode_count++;
794 } else if (strcmp((char*)qm.name,"shade") == 0) {
795 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
796 mode_count++;
797 }
798
799 }
800 }
801 }
802
803 return mode_count;
804}
805
806status_t Sensor::setAWB(uint8_t awbMode)
807{
808 int ret = 0;
809 struct v4l2_control ctl;
810 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
811
812 switch (awbMode) {
813 case ANDROID_CONTROL_AWB_MODE_AUTO:
814 ctl.value = CAM_WB_AUTO;
815 break;
816 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
817 ctl.value = CAM_WB_INCANDESCENCE;
818 break;
819 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
820 ctl.value = CAM_WB_FLUORESCENT;
821 break;
822 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
823 ctl.value = CAM_WB_DAYLIGHT;
824 break;
825 case ANDROID_CONTROL_AWB_MODE_SHADE:
826 ctl.value = CAM_WB_SHADE;
827 break;
828 default:
829 ALOGE("%s: Emulator doesn't support AWB mode %d",
830 __FUNCTION__, awbMode);
831 return BAD_VALUE;
832 }
833 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
834 return ret;
835}
836
837void Sensor::setExposureTime(uint64_t ns) {
838 Mutex::Autolock lock(mControlMutex);
839 ALOGVV("Exposure set to %f", ns/1000000.f);
840 mExposureTime = ns;
841}
842
843void Sensor::setFrameDuration(uint64_t ns) {
844 Mutex::Autolock lock(mControlMutex);
845 ALOGVV("Frame duration set to %f", ns/1000000.f);
846 mFrameDuration = ns;
847}
848
849void Sensor::setSensitivity(uint32_t gain) {
850 Mutex::Autolock lock(mControlMutex);
851 ALOGVV("Gain set to %d", gain);
852 mGainFactor = gain;
853}
854
855void Sensor::setDestinationBuffers(Buffers *buffers) {
856 Mutex::Autolock lock(mControlMutex);
857 mNextBuffers = buffers;
858}
859
860void Sensor::setFrameNumber(uint32_t frameNumber) {
861 Mutex::Autolock lock(mControlMutex);
862 mFrameNumber = frameNumber;
863}
864
865bool Sensor::waitForVSync(nsecs_t reltime) {
866 int res;
867 Mutex::Autolock lock(mControlMutex);
868
869 mGotVSync = false;
870 res = mVSync.waitRelative(mControlMutex, reltime);
871 if (res != OK && res != TIMED_OUT) {
872 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
873 return false;
874 }
875 return mGotVSync;
876}
877
878bool Sensor::waitForNewFrame(nsecs_t reltime,
879 nsecs_t *captureTime) {
880 Mutex::Autolock lock(mReadoutMutex);
881 uint8_t *ret;
882 if (mCapturedBuffers == NULL) {
883 int res;
884 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
885 if (res == TIMED_OUT) {
886 return false;
887 } else if (res != OK || mCapturedBuffers == NULL) {
888 ALOGE("Error waiting for sensor readout signal: %d", res);
889 return false;
890 }
891 } else {
892 mReadoutComplete.signal();
893 }
894
895 *captureTime = mCaptureTime;
896 mCapturedBuffers = NULL;
897 return true;
898}
899
900Sensor::SensorListener::~SensorListener() {
901}
902
903void Sensor::setSensorListener(SensorListener *listener) {
904 Mutex::Autolock lock(mControlMutex);
905 mListener = listener;
906}
907
908status_t Sensor::readyToRun() {
909 int res;
910 ALOGV("Starting up sensor thread");
911 mStartupTime = systemTime();
912 mNextCaptureTime = 0;
913 mNextCapturedBuffers = NULL;
914
915 DBG_LOGA("");
916
917 return OK;
918}
919
920bool Sensor::threadLoop() {
921 /**
922 * Sensor capture operation main loop.
923 *
924 * Stages are out-of-order relative to a single frame's processing, but
925 * in-order in time.
926 */
927
928 /**
929 * Stage 1: Read in latest control parameters
930 */
931 uint64_t exposureDuration;
932 uint64_t frameDuration;
933 uint32_t gain;
934 Buffers *nextBuffers;
935 uint32_t frameNumber;
936 SensorListener *listener = NULL;
937 {
938 Mutex::Autolock lock(mControlMutex);
939 exposureDuration = mExposureTime;
940 frameDuration = mFrameDuration;
941 gain = mGainFactor;
942 nextBuffers = mNextBuffers;
943 frameNumber = mFrameNumber;
944 listener = mListener;
945 // Don't reuse a buffer set
946 mNextBuffers = NULL;
947
948 // Signal VSync for start of readout
949 ALOGVV("Sensor VSync");
950 mGotVSync = true;
951 mVSync.signal();
952 }
953
954 /**
955 * Stage 3: Read out latest captured image
956 */
957
958 Buffers *capturedBuffers = NULL;
959 nsecs_t captureTime = 0;
960
961 nsecs_t startRealTime = systemTime();
962 // Stagefright cares about system time for timestamps, so base simulated
963 // time on that.
964 nsecs_t simulatedTime = startRealTime;
965 nsecs_t frameEndRealTime = startRealTime + frameDuration;
966 nsecs_t frameReadoutEndRealTime = startRealTime +
967 kRowReadoutTime * kResolution[1];
968
969 if (mNextCapturedBuffers != NULL) {
970 ALOGVV("Sensor starting readout");
971 // Pretend we're doing readout now; will signal once enough time has elapsed
972 capturedBuffers = mNextCapturedBuffers;
973 captureTime = mNextCaptureTime;
974 }
975 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
976
977 // TODO: Move this signal to another thread to simulate readout
978 // time properly
979 if (capturedBuffers != NULL) {
980 ALOGVV("Sensor readout complete");
981 Mutex::Autolock lock(mReadoutMutex);
982 if (mCapturedBuffers != NULL) {
983 ALOGV("Waiting for readout thread to catch up!");
984 mReadoutComplete.wait(mReadoutMutex);
985 }
986
987 mCapturedBuffers = capturedBuffers;
988 mCaptureTime = captureTime;
989 mReadoutAvailable.signal();
990 capturedBuffers = NULL;
991 }
992
993 /**
994 * Stage 2: Capture new image
995 */
996 mNextCaptureTime = simulatedTime;
997 mNextCapturedBuffers = nextBuffers;
998
999 if (mNextCapturedBuffers != NULL) {
1000 if (listener != NULL) {
1001 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1002 mNextCaptureTime);
1003 }
1004
1005 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1006 (float)exposureDuration/1e6, gain);
1007 mScene.setExposureDuration((float)exposureDuration/1e9);
1008 mScene.calculateScene(mNextCaptureTime);
1009
1010 if ( mSensorType == SENSOR_SHARE_FD) {
1011 captureNewImageWithGe2d();
1012 } else {
1013 captureNewImage();
1014 }
1015 mFramecount ++;
1016 }
1017 if (mFramecount == 100) {
1018 gettimeofday(&mTimeEnd, NULL);
1019 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1020 mCurFps = mFramecount/(interval/1000000.0f);
1021 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1022 mFramecount = 0;
1023 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1024 }
1025 ALOGVV("Sensor vertical blanking interval");
1026 nsecs_t workDoneRealTime = systemTime();
1027 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1028 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1029 timespec t;
1030 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1031 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1032
1033 int ret;
1034 do {
1035 ret = nanosleep(&t, &t);
1036 } while (ret != 0);
1037 }
1038 nsecs_t endRealTime = systemTime();
1039 ALOGVV("Frame cycle took %d ms, target %d ms",
1040 (int)((endRealTime - startRealTime)/1000000),
1041 (int)(frameDuration / 1000000));
1042 return true;
1043};
1044
1045int Sensor::captureNewImageWithGe2d() {
1046
1047 uint32_t gain = mGainFactor;
1048 mKernelPhysAddr = 0;
1049
1050
1051 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1052 usleep(5000);
1053 }
1054
1055 // Might be adding more buffers, so size isn't constant
1056 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1057 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1058 fillStream(vinfo, mKernelPhysAddr, b);
1059 }
1060 putback_frame(vinfo);
1061 mKernelPhysAddr = 0;
1062
1063 return 0;
1064
1065}
1066
1067int Sensor::captureNewImage() {
1068 bool isjpeg = false;
1069 uint32_t gain = mGainFactor;
1070 mKernelBuffer = NULL;
1071
1072 // Might be adding more buffers, so size isn't constant
1073 DBG_LOGB("size=%d\n", mNextCapturedBuffers->size());
1074 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1075 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1076 ALOGVV("Sensor capturing buffer %d: stream %d,"
1077 " %d x %d, format %x, stride %d, buf %p, img %p",
1078 i, b.streamId, b.width, b.height, b.format, b.stride,
1079 b.buffer, b.img);
1080 switch (b.format) {
1081 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1082 captureRaw(b.img, gain, b.stride);
1083 break;
1084 case HAL_PIXEL_FORMAT_RGB_888:
1085 captureRGB(b.img, gain, b.stride);
1086 break;
1087 case HAL_PIXEL_FORMAT_RGBA_8888:
1088 captureRGBA(b.img, gain, b.stride);
1089 break;
1090 case HAL_PIXEL_FORMAT_BLOB:
1091 // Add auxillary buffer of the right size
1092 // Assumes only one BLOB (JPEG) buffer in
1093 // mNextCapturedBuffers
1094 isjpeg = true;
1095 StreamBuffer bAux;
1096 int orientation;
1097 orientation = getPictureRotate();
1098 ALOGD("bAux orientation=%d",orientation);
1099 if (!msupportrotate) {
1100 bAux.streamId = 0;
1101 bAux.width = b.width;
1102 bAux.height = b.height;
1103 bAux.format = HAL_PIXEL_FORMAT_RGB_888;
1104 bAux.stride = b.width;
1105 bAux.buffer = NULL;
1106 } else {
1107 if ((orientation == 90) || (orientation == 270)) {
1108 bAux.streamId = 0;
1109 bAux.width = b.height;
1110 bAux.height = b.width;
1111 bAux.format = HAL_PIXEL_FORMAT_RGB_888;
1112 bAux.stride = b.height;
1113 bAux.buffer = NULL;
1114 } else {
1115 bAux.streamId = 0;
1116 bAux.width = b.width;
1117 bAux.height = b.height;
1118 bAux.format = HAL_PIXEL_FORMAT_RGB_888;
1119 bAux.stride = b.width;
1120 bAux.buffer = NULL;
1121 }
1122 }
1123 // TODO: Reuse these
1124 bAux.img = new uint8_t[b.width * b.height * 3];
1125 mNextCapturedBuffers->push_back(bAux);
1126 break;
1127 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1128 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1129 captureNV21(b, gain);
1130 break;
1131 case HAL_PIXEL_FORMAT_YV12:
1132 captureYV12(b, gain);
1133 break;
1134 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1135 captureYUYV(b.img, gain, b.stride);
1136 break;
1137 default:
1138 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1139 b.format);
1140 break;
1141 }
1142 }
1143 if (!isjpeg) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1144 // whose buffer putback separately.
1145 putback_frame(vinfo);
1146 }
1147 mKernelBuffer = NULL;
1148
1149 return 0;
1150}
1151
1152int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1153 int res;
1154 int i, j, k, START;
1155 int count = 0;
1156 int pixelfmt;
1157 struct v4l2_frmsizeenum frmsize;
1158 char property[PROPERTY_VALUE_MAX];
1159 unsigned int support_w,support_h;
1160
1161 support_w = 10000;
1162 support_h = 10000;
1163 memset(property, 0, sizeof(property));
1164 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1165 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1166 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1167 support_w = 10000;
1168 support_h = 10000;
1169 }
1170 }
1171
1172 memset(&frmsize,0,sizeof(frmsize));
1173 frmsize.pixel_format = getOutputFormat();
1174
1175 START = 0;
1176 for(i=0;;i++, count+=4){
1177 frmsize.index = i;
1178 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1179 if (res < 0){
1180 DBG_LOGB("index=%d, break\n", i);
1181 break;
1182 }
1183
1184 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1185
1186 if (0 != (frmsize.discrete.width%16))
1187 continue;
1188
1189 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1190 continue;
1191
1192 if (count >= size)
1193 break;
1194
1195 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1196 picSizes[count+1] = frmsize.discrete.width;
1197 picSizes[count+2] = frmsize.discrete.height;
1198 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1199
1200 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1201 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1202 if (0 == i)
1203 continue;
1204
1205 for (k = count; k > START; k -= 4) {
1206 if (frmsize.discrete.width * frmsize.discrete.height >
1207 picSizes[k - 3] * picSizes[k - 2]) {
1208 picSizes[k + 1] = picSizes[k - 3];
1209 picSizes[k + 2] = picSizes[k - 2];
1210
1211 } else {
1212 break;
1213 }
1214 }
1215 picSizes[k + 1] = frmsize.discrete.width;
1216 picSizes[k + 2] = frmsize.discrete.height;
1217 }
1218
1219 }
1220
1221 START = count;
1222 for(i=0;;i++, count+=4){
1223 frmsize.index = i;
1224 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1225 if (res < 0){
1226 DBG_LOGB("index=%d, break\n", i);
1227 break;
1228 }
1229
1230 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1231
1232 if (0 != (frmsize.discrete.width%16))
1233 continue;
1234
1235 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1236 continue;
1237
1238 if (count >= size)
1239 break;
1240
1241 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1242 picSizes[count+1] = frmsize.discrete.width;
1243 picSizes[count+2] = frmsize.discrete.height;
1244 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1245
1246 DBG_LOGB("get output width=%d, height=%d, format =\
1247 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1248 frmsize.discrete.height);
1249 if (0 == i)
1250 continue;
1251
1252 for (k = count; k > START; k -= 4) {
1253 if (frmsize.discrete.width * frmsize.discrete.height >
1254 picSizes[k - 3] * picSizes[k - 2]) {
1255 picSizes[k + 1] = picSizes[k - 3];
1256 picSizes[k + 2] = picSizes[k - 2];
1257
1258 } else {
1259 break;
1260 }
1261 }
1262 picSizes[k + 1] = frmsize.discrete.width;
1263 picSizes[k + 2] = frmsize.discrete.height;
1264 }
1265
1266 }
1267
1268#if 0
1269 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1270 START = count;
1271 for(i=0;;i++, count+=4){
1272 frmsize.index = i;
1273 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1274 if (res < 0){
1275 DBG_LOGB("index=%d, break\n", i);
1276 break;
1277 }
1278
1279 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1280
1281 if (0 != (frmsize.discrete.width%16))
1282 continue;
1283
1284 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1285 continue;
1286
1287 if (count >= size)
1288 break;
1289
1290 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1291 picSizes[count+1] = frmsize.discrete.width;
1292 picSizes[count+2] = frmsize.discrete.height;
1293 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1294
1295 DBG_LOGB("get output width=%d, height=%d, format =\
1296 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1297 frmsize.discrete.height);
1298 if (0 == i)
1299 continue;
1300
1301 for (k = count; k > START; k -= 4) {
1302 if (frmsize.discrete.width * frmsize.discrete.height >
1303 picSizes[k - 3] * picSizes[k - 2]) {
1304 picSizes[k + 1] = picSizes[k - 3];
1305 picSizes[k + 2] = picSizes[k - 2];
1306
1307 } else {
1308 break;
1309 }
1310 }
1311 picSizes[k + 1] = frmsize.discrete.width;
1312 picSizes[k + 2] = frmsize.discrete.height;
1313 }
1314
1315 }
1316 }
1317#endif
1318
1319 uint32_t jpgSrcfmt[] = {
1320 V4L2_PIX_FMT_RGB24,
1321 V4L2_PIX_FMT_MJPEG,
1322 V4L2_PIX_FMT_YUYV,
1323 };
1324
1325 START = count;
1326 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1327 memset(&frmsize,0,sizeof(frmsize));
1328 frmsize.pixel_format = jpgSrcfmt[j];
1329
1330 for(i=0;;i++, count+=4){
1331 frmsize.index = i;
1332 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1333 if (res < 0){
1334 DBG_LOGB("index=%d, break\n", i);
1335 break;
1336 }
1337
1338 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1339
1340 if (0 != (frmsize.discrete.width%16))
1341 continue;
1342
1343 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1344 // continue;
1345
1346 if (count >= size)
1347 break;
1348
1349 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1350 picSizes[count+1] = frmsize.discrete.width;
1351 picSizes[count+2] = frmsize.discrete.height;
1352 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1353
1354 if (0 == i)
1355 continue;
1356
1357 //TODO insert in descend order
1358 for (k = count; k > START; k -= 4) {
1359 if (frmsize.discrete.width * frmsize.discrete.height >
1360 picSizes[k - 3] * picSizes[k - 2]) {
1361 picSizes[k + 1] = picSizes[k - 3];
1362 picSizes[k + 2] = picSizes[k - 2];
1363
1364 } else {
1365 break;
1366 }
1367 }
1368
1369 picSizes[k + 1] = frmsize.discrete.width;
1370 picSizes[k + 2] = frmsize.discrete.height;
1371 }
1372
1373 }
1374
1375 if (frmsize.index > 0)
1376 break;
1377 }
1378
1379 if (frmsize.index == 0)
1380 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1381
1382 return count;
1383
1384}
1385
1386int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1387{
1388 int ret=0; int framerate=0; int temp_rate=0;
1389 struct v4l2_frmivalenum fival;
1390 int i,j=0;
1391 int count = 0;
1392 int tmp_size = size;
1393 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1394 int pixelfmt_tbl[] = {
1395 V4L2_PIX_FMT_MJPEG,
1396 V4L2_PIX_FMT_YVU420,
1397 V4L2_PIX_FMT_NV21,
1398 V4L2_PIX_FMT_RGB24,
1399 V4L2_PIX_FMT_YUYV,
1400 // V4L2_PIX_FMT_YVU420
1401 };
1402
1403 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1404 {
1405 for( ; size > 0; size-=4)
1406 {
1407 memset(&fival, 0, sizeof(fival));
1408
1409 for (fival.index = 0;;fival.index++)
1410 {
1411 fival.pixel_format = pixelfmt_tbl[i];
1412 fival.width = picSizes[size-3];
1413 fival.height = picSizes[size-2];
1414 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1415 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1416 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1417 if(framerate < temp_rate)
1418 framerate = temp_rate;
1419 duration[count+0] = (int64_t)(picSizes[size-4]);
1420 duration[count+1] = (int64_t)(picSizes[size-3]);
1421 duration[count+2] = (int64_t)(picSizes[size-2]);
1422 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1423 j++;
1424 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1425 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1426 if(framerate < temp_rate)
1427 framerate = temp_rate;
1428 duration[count+0] = (int64_t)picSizes[size-4];
1429 duration[count+1] = (int64_t)picSizes[size-3];
1430 duration[count+2] = (int64_t)picSizes[size-2];
1431 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1432 j++;
1433 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1434 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1435 if(framerate < temp_rate)
1436 framerate = temp_rate;
1437 duration[count+0] = (int64_t)picSizes[size-4];
1438 duration[count+1] = (int64_t)picSizes[size-3];
1439 duration[count+2] = (int64_t)picSizes[size-2];
1440 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1441 j++;
1442 }
1443 } else {
1444 if (j > 0) {
1445 if (count > tmp_size)
1446 break;
1447 duration[count+0] = (int64_t)(picSizes[size-4]);
1448 duration[count+1] = (int64_t)(picSizes[size-3]);
1449 duration[count+2] = (int64_t)(picSizes[size-2]);
1450 if (framerate == 5) {
1451 duration[count+3] = (int64_t)200000000L;
1452 } else if (framerate == 10) {
1453 duration[count+3] = (int64_t)100000000L;
1454 } else if (framerate == 15) {
1455 duration[count+3] = (int64_t)66666666L;
1456 } else if (framerate == 30) {
1457 duration[count+3] = (int64_t)33333333L;
1458 } else {
1459 duration[count+3] = (int64_t)66666666L;
1460 }
1461 count += 4;
1462 break;
1463 } else {
1464 break;
1465 }
1466 }
1467 }
1468 j=0;
1469 }
1470 size = tmp_size;
1471 }
1472
1473 return count;
1474
1475}
1476
1477int64_t Sensor::getMinFrameDuration()
1478{
1479 int64_t tmpDuration = 66666666L; // 1/15 s
1480 int64_t frameDuration = 66666666L; // 1/15 s
1481 struct v4l2_frmivalenum fival;
1482 int i,j;
1483
1484 uint32_t pixelfmt_tbl[]={
1485 V4L2_PIX_FMT_MJPEG,
1486 V4L2_PIX_FMT_YUYV,
1487 V4L2_PIX_FMT_NV21,
1488 };
1489 struct v4l2_frmsize_discrete resolution_tbl[]={
1490 {1920, 1080},
1491 {1280, 960},
1492 {640, 480},
1493 {320, 240},
1494 };
1495
1496 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1497 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1498 memset(&fival, 0, sizeof(fival));
1499 fival.index = 0;
1500 fival.pixel_format = pixelfmt_tbl[i];
1501 fival.width = resolution_tbl[j].width;
1502 fival.height = resolution_tbl[j].height;
1503
1504 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1505 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1506 tmpDuration =
1507 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1508
1509 if (frameDuration > tmpDuration)
1510 frameDuration = tmpDuration;
1511 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1512 frameDuration =
1513 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1514 break;
1515 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1516 frameDuration =
1517 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1518 break;
1519 }
1520 fival.index++;
1521 }
1522 }
1523
1524 if (fival.index > 0) {
1525 break;
1526 }
1527 }
1528
1529 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1530 return frameDuration;
1531}
1532
1533int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1534 int res;
1535 int i;
1536 int count = 0;
1537 struct v4l2_frmsizeenum frmsize;
1538 char property[PROPERTY_VALUE_MAX];
1539 unsigned int support_w,support_h;
1540 int preview_fmt;
1541
1542 support_w = 10000;
1543 support_h = 10000;
1544 memset(property, 0, sizeof(property));
1545 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1546 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1547 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1548 support_w = 10000;
1549 support_h = 10000;
1550 }
1551 }
1552
1553
1554 memset(&frmsize,0,sizeof(frmsize));
1555 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1556
1557 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1558 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1559 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1560 if (preview == true)
1561 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1562 else
1563 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1564 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1565 if (preview == true)
1566 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1567 else
1568 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1569 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1570 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1571
1572 for(i=0;;i++, count += 2){
1573 frmsize.index = i;
1574 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1575 if (res < 0){
1576 DBG_LOGB("index=%d, break\n", i);
1577 break;
1578 }
1579
1580
1581 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1582
1583 if (0 != (frmsize.discrete.width%16))
1584 continue;
1585
1586 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1587 continue;
1588
1589 if (count >= size)
1590 break;
1591
1592 picSizes[count] = frmsize.discrete.width;
1593 picSizes[count+1] = frmsize.discrete.height;
1594
1595 if (0 == i)
1596 continue;
1597
1598 //TODO insert in descend order
1599 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1600 picSizes[count + 0] = picSizes[count - 2];
1601 picSizes[count + 1] = picSizes[count - 1];
1602
1603 picSizes[count - 2] = frmsize.discrete.width;
1604 picSizes[count - 1] = frmsize.discrete.height;
1605 }
1606
1607 }
1608
1609 }
1610
1611 return count;
1612
1613}
1614
1615void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1616 float totalGain = gain/100.0 * kBaseGainFactor;
1617 float noiseVarGain = totalGain * totalGain;
1618 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1619 + kReadNoiseVarAfterGain;
1620
1621 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1622 mScene.setReadoutPixel(0,0);
1623 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1624 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1625 uint16_t *px = (uint16_t*)img + y * stride;
1626 for (unsigned int x = 0; x < kResolution[0]; x++) {
1627 uint32_t electronCount;
1628 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1629
1630 // TODO: Better pixel saturation curve?
1631 electronCount = (electronCount < kSaturationElectrons) ?
1632 electronCount : kSaturationElectrons;
1633
1634 // TODO: Better A/D saturation curve?
1635 uint16_t rawCount = electronCount * totalGain;
1636 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1637
1638 // Calculate noise value
1639 // TODO: Use more-correct Gaussian instead of uniform noise
1640 float photonNoiseVar = electronCount * noiseVarGain;
1641 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1642 // Scaled to roughly match gaussian/uniform noise stddev
1643 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1644
1645 rawCount += kBlackLevel;
1646 rawCount += noiseStddev * noiseSample;
1647
1648 *px++ = rawCount;
1649 }
1650 // TODO: Handle this better
1651 //simulatedTime += kRowReadoutTime;
1652 }
1653 ALOGVV("Raw sensor image captured");
1654}
1655
1656void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1657 float totalGain = gain/100.0 * kBaseGainFactor;
1658 // In fixed-point math, calculate total scaling from electrons to 8bpp
1659 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1660 uint32_t inc = kResolution[0] / stride;
1661
1662 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1663 uint8_t *px = img + outY * stride * 4;
1664 mScene.setReadoutPixel(0, y);
1665 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1666 uint32_t rCount, gCount, bCount;
1667 // TODO: Perfect demosaicing is a cheat
1668 const uint32_t *pixel = mScene.getPixelElectrons();
1669 rCount = pixel[Scene::R] * scale64x;
1670 gCount = pixel[Scene::Gr] * scale64x;
1671 bCount = pixel[Scene::B] * scale64x;
1672
1673 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1674 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1675 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1676 *px++ = 255;
1677 for (unsigned int j = 1; j < inc; j++)
1678 mScene.getPixelElectrons();
1679 }
1680 // TODO: Handle this better
1681 //simulatedTime += kRowReadoutTime;
1682 }
1683 ALOGVV("RGBA sensor image captured");
1684}
1685
1686void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1687#if 0
1688 float totalGain = gain/100.0 * kBaseGainFactor;
1689 // In fixed-point math, calculate total scaling from electrons to 8bpp
1690 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1691 uint32_t inc = kResolution[0] / stride;
1692
1693 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1694 mScene.setReadoutPixel(0, y);
1695 uint8_t *px = img + outY * stride * 3;
1696 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1697 uint32_t rCount, gCount, bCount;
1698 // TODO: Perfect demosaicing is a cheat
1699 const uint32_t *pixel = mScene.getPixelElectrons();
1700 rCount = pixel[Scene::R] * scale64x;
1701 gCount = pixel[Scene::Gr] * scale64x;
1702 bCount = pixel[Scene::B] * scale64x;
1703
1704 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1705 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1706 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1707 for (unsigned int j = 1; j < inc; j++)
1708 mScene.getPixelElectrons();
1709 }
1710 // TODO: Handle this better
1711 //simulatedTime += kRowReadoutTime;
1712 }
1713#else
1714 uint8_t *src = NULL;
1715 int ret = 0, rotate = 0;
1716 uint32_t width = 0, height = 0;
1717
1718 rotate = getPictureRotate();
1719 width = vinfo->picture.format.fmt.pix.width;
1720 height = vinfo->picture.format.fmt.pix.height;
1721
1722 if (mSensorType == SENSOR_USB) {
1723 releasebuf_and_stop_capturing(vinfo);
1724 } else {
1725 stop_capturing(vinfo);
1726 }
1727
1728 ret = start_picture(vinfo,rotate);
1729 if (ret < 0)
1730 {
1731 ALOGD("start picture failed!");
1732 }
1733 while(1)
1734 {
1735 src = (uint8_t *)get_picture(vinfo);
1736 if (NULL != src) {
1737 break;
1738 }
1739
1740 usleep(5000);
1741 }
1742 ALOGD("get picture success !");
1743
1744 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG){
1745 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1746 if ( tmp_buffer == NULL) {
1747 ALOGE("new buffer failed!\n");
1748 return;
1749 }
1750 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
1751 width, tmp_buffer + width * height, (width + 1) / 2, width,
1752 height, width, height, libyuv::FOURCC_MJPG) != 0) {
1753 DBG_LOGA("Decode MJPEG frame failed\n");
1754 }
1755 nv21_to_rgb24(tmp_buffer,img,width,height);
1756 if (tmp_buffer != NULL)
1757 delete [] tmp_buffer;
1758 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1759 yuyv422_to_rgb24(src,img,width,height);
1760 }
1761
1762 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24){
1763 if (vinfo->picture.buf.length == width*height*3) {
1764 memcpy(img, src, vinfo->picture.buf.length);
1765 } else {
1766 rgb24_memcpy( img, src, width, height);
1767 }
1768 }
1769
1770 if (mSensorType == SENSOR_USB) {
1771 releasebuf_and_stop_picture(vinfo);
1772 } else {
1773 stop_picture(vinfo);
1774 }
1775
1776#endif
1777}
1778
1779void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
1780{
1781 for (int i = 0; i < width * height * 2; i += 2) {
1782 *dst++ = *(src + i);
1783 }
1784
1785 for (int y = 0; y < height - 1; y +=2) {
1786 for (int j = 0; j < width * 2; j += 4) {
1787 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
1788 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
1789 }
1790 src += width * 2 * 2;
1791 }
1792
1793 if (height & 1)
1794 for (int j = 0; j < width * 2; j += 4) {
1795 *dst++ = *(src + 3 + j); //v
1796 *dst++ = *(src + 1 + j); //u
1797 }
1798}
1799
1800void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
1801{
1802 //width should be an even number.
1803 //uv ALIGN 32.
1804 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
1805 unsigned char *dst_copy,*src_copy;
1806
1807 dst_copy = dst;
1808 src_copy = src;
1809
1810 y_size = width*height;
1811 c_stride = ALIGN(width/2, 16);
1812 c_size = c_stride * height/2;
1813 cr_offset = y_size;
1814 cb_offset = y_size+c_size;
1815
1816 for(i=0;i< y_size;i++){
1817 *dst++ = *src;
1818 src += 2;
1819 }
1820
1821 dst = dst_copy;
1822 src = src_copy;
1823
1824 for(i=0;i<height;i+=2){
1825 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
1826 //ceil(u1+u2)/2
1827 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
1828 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
1829 }
1830 dst += c_stride;
1831 src += width*4;
1832 }
1833}
1834
1835
1836void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
1837#if 0
1838 float totalGain = gain/100.0 * kBaseGainFactor;
1839 // Using fixed-point math with 6 bits of fractional precision.
1840 // In fixed-point math, calculate total scaling from electrons to 8bpp
1841 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1842 // In fixed-point math, saturation point of sensor after gain
1843 const int saturationPoint = 64 * 255;
1844 // Fixed-point coefficients for RGB-YUV transform
1845 // Based on JFIF RGB->YUV transform.
1846 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
1847 const int rgbToY[] = {19, 37, 7};
1848 const int rgbToCb[] = {-10,-21, 32, 524288};
1849 const int rgbToCr[] = {32,-26, -5, 524288};
1850 // Scale back to 8bpp non-fixed-point
1851 const int scaleOut = 64;
1852 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
1853
1854 uint32_t inc = kResolution[0] / stride;
1855 uint32_t outH = kResolution[1] / inc;
1856 for (unsigned int y = 0, outY = 0;
1857 y < kResolution[1]; y+=inc, outY++) {
1858 uint8_t *pxY = img + outY * stride;
1859 uint8_t *pxVU = img + (outH + outY / 2) * stride;
1860 mScene.setReadoutPixel(0,y);
1861 for (unsigned int outX = 0; outX < stride; outX++) {
1862 int32_t rCount, gCount, bCount;
1863 // TODO: Perfect demosaicing is a cheat
1864 const uint32_t *pixel = mScene.getPixelElectrons();
1865 rCount = pixel[Scene::R] * scale64x;
1866 rCount = rCount < saturationPoint ? rCount : saturationPoint;
1867 gCount = pixel[Scene::Gr] * scale64x;
1868 gCount = gCount < saturationPoint ? gCount : saturationPoint;
1869 bCount = pixel[Scene::B] * scale64x;
1870 bCount = bCount < saturationPoint ? bCount : saturationPoint;
1871
1872 *pxY++ = (rgbToY[0] * rCount +
1873 rgbToY[1] * gCount +
1874 rgbToY[2] * bCount) / scaleOutSq;
1875 if (outY % 2 == 0 && outX % 2 == 0) {
1876 *pxVU++ = (rgbToCr[0] * rCount +
1877 rgbToCr[1] * gCount +
1878 rgbToCr[2] * bCount +
1879 rgbToCr[3]) / scaleOutSq;
1880 *pxVU++ = (rgbToCb[0] * rCount +
1881 rgbToCb[1] * gCount +
1882 rgbToCb[2] * bCount +
1883 rgbToCb[3]) / scaleOutSq;
1884 }
1885 for (unsigned int j = 1; j < inc; j++)
1886 mScene.getPixelElectrons();
1887 }
1888 }
1889#else
1890 uint8_t *src;
1891
1892 if (mKernelBuffer) {
1893 src = mKernelBuffer;
1894 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
1895 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
1896 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
1897 (mmInt32)vinfo->preview.format.fmt.pix.height,
1898 (mmInt32)vinfo->preview.format.fmt.pix.width,
1899 IC_FORMAT_YCbCr420_lp,
1900 (mmByte *) src,
1901 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
1902 0};
1903
1904 structConvImage output = {(mmInt32)b.width,
1905 (mmInt32)b.height,
1906 (mmInt32)b.width,
1907 IC_FORMAT_YCbCr420_lp,
1908 (mmByte *) b.img,
1909 (mmByte *) b.img + b.width * b.height,
1910 0};
1911
1912 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
1913 ALOGE("Sclale NV21 frame down failed!\n");
1914 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1915 int width = vinfo->preview.format.fmt.pix.width;
1916 int height = vinfo->preview.format.fmt.pix.height;
1917 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1918
1919 if ( tmp_buffer == NULL) {
1920 ALOGE("new buffer failed!\n");
1921 return;
1922 }
1923
1924 YUYVToNV21(src, tmp_buffer, width, height);
1925
1926 structConvImage input = {(mmInt32)width,
1927 (mmInt32)height,
1928 (mmInt32)width,
1929 IC_FORMAT_YCbCr420_lp,
1930 (mmByte *) tmp_buffer,
1931 (mmByte *) tmp_buffer + width * height,
1932 0};
1933
1934 structConvImage output = {(mmInt32)b.width,
1935 (mmInt32)b.height,
1936 (mmInt32)b.width,
1937 IC_FORMAT_YCbCr420_lp,
1938 (mmByte *) b.img,
1939 (mmByte *) b.img + b.width * b.height,
1940 0};
1941
1942 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
1943 ALOGE("Sclale NV21 frame down failed!\n");
1944
1945 delete [] tmp_buffer;
1946 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
1947 int width = vinfo->preview.format.fmt.pix.width;
1948 int height = vinfo->preview.format.fmt.pix.height;
1949
1950#if 0
1951 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1952
1953 if ( tmp_buffer == NULL) {
1954 ALOGE("new buffer failed!\n");
1955 return;
1956 }
1957#endif
1958
1959#if 0
1960 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused,
1961 b.img,
1962 b.width, b.img + b.width * b.height, (b.width + 1) / 2, b.width,
1963 b.height, b.width, b.height, libyuv::FOURCC_MJPG) != 0) {
1964 DBG_LOGA("Decode MJPEG frame failed\n");
1965 }
1966#else
1967 memcpy(b.img, src, b.width * b.height * 3/2);
1968#endif
1969
1970#if 0
1971 structConvImage input = {(mmInt32)width,
1972 (mmInt32)height,
1973 (mmInt32)width,
1974 IC_FORMAT_YCbCr420_lp,
1975 (mmByte *) tmp_buffer,
1976 (mmByte *) tmp_buffer + width * height,
1977 0};
1978
1979 structConvImage output = {(mmInt32)b.width,
1980 (mmInt32)b.height,
1981 (mmInt32)b.width,
1982 IC_FORMAT_YCbCr420_lp,
1983 (mmByte *) b.img,
1984 (mmByte *) b.img + b.width * b.height,
1985 0};
1986
1987 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
1988 ALOGE("Sclale NV21 frame down failed!\n");
1989
1990 delete [] tmp_buffer;
1991#endif
1992 } else {
1993 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
1994 }
1995 return ;
1996 }
1997 while(1){
1998 src = (uint8_t *)get_frame(vinfo);
1999 if (NULL == src) {
2000 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2001 usleep(5000);
2002 continue;
2003 }
2004 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2005 memcpy(b.img, src, vinfo->preview.buf.length);
2006 mKernelBuffer = src;
2007 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2008 int width = vinfo->preview.format.fmt.pix.width;
2009 int height = vinfo->preview.format.fmt.pix.height;
2010 YUYVToNV21(src, b.img, width, height);
2011 mKernelBuffer = src;
2012 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2013 int width = vinfo->preview.format.fmt.pix.width;
2014 int height = vinfo->preview.format.fmt.pix.height;
2015 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, b.img,
2016 width, b.img + width * height, (width + 1) / 2, width,
2017 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2018 putback_frame(vinfo);
2019 DBG_LOGA("Decode MJPEG frame failed\n");
2020 continue;
2021 }
2022 mKernelBuffer = b.img;
2023 }
2024
2025 break;
2026 }
2027#endif
2028
2029 ALOGVV("NV21 sensor image captured");
2030}
2031
2032void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2033#if 0
2034 float totalGain = gain/100.0 * kBaseGainFactor;
2035 // Using fixed-point math with 6 bits of fractional precision.
2036 // In fixed-point math, calculate total scaling from electrons to 8bpp
2037 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2038 // In fixed-point math, saturation point of sensor after gain
2039 const int saturationPoint = 64 * 255;
2040 // Fixed-point coefficients for RGB-YUV transform
2041 // Based on JFIF RGB->YUV transform.
2042 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2043 const int rgbToY[] = {19, 37, 7};
2044 const int rgbToCb[] = {-10,-21, 32, 524288};
2045 const int rgbToCr[] = {32,-26, -5, 524288};
2046 // Scale back to 8bpp non-fixed-point
2047 const int scaleOut = 64;
2048 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2049
2050 uint32_t inc = kResolution[0] / stride;
2051 uint32_t outH = kResolution[1] / inc;
2052 for (unsigned int y = 0, outY = 0;
2053 y < kResolution[1]; y+=inc, outY++) {
2054 uint8_t *pxY = img + outY * stride;
2055 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2056 mScene.setReadoutPixel(0,y);
2057 for (unsigned int outX = 0; outX < stride; outX++) {
2058 int32_t rCount, gCount, bCount;
2059 // TODO: Perfect demosaicing is a cheat
2060 const uint32_t *pixel = mScene.getPixelElectrons();
2061 rCount = pixel[Scene::R] * scale64x;
2062 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2063 gCount = pixel[Scene::Gr] * scale64x;
2064 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2065 bCount = pixel[Scene::B] * scale64x;
2066 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2067
2068 *pxY++ = (rgbToY[0] * rCount +
2069 rgbToY[1] * gCount +
2070 rgbToY[2] * bCount) / scaleOutSq;
2071 if (outY % 2 == 0 && outX % 2 == 0) {
2072 *pxVU++ = (rgbToCr[0] * rCount +
2073 rgbToCr[1] * gCount +
2074 rgbToCr[2] * bCount +
2075 rgbToCr[3]) / scaleOutSq;
2076 *pxVU++ = (rgbToCb[0] * rCount +
2077 rgbToCb[1] * gCount +
2078 rgbToCb[2] * bCount +
2079 rgbToCb[3]) / scaleOutSq;
2080 }
2081 for (unsigned int j = 1; j < inc; j++)
2082 mScene.getPixelElectrons();
2083 }
2084 }
2085#else
2086 uint8_t *src;
2087 if (mKernelBuffer) {
2088 src = mKernelBuffer;
2089 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2090 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2091 ALOGI("Sclale YV12 frame down \n");
2092
2093 int width = vinfo->preview.format.fmt.pix.width;
2094 int height = vinfo->preview.format.fmt.pix.height;
2095 int ret = libyuv::I420Scale(src, width,
2096 src + width * height, width / 2,
2097 src + width * height + width * height / 4, width / 2,
2098 width, height,
2099 b.img, b.width,
2100 b.img + b.width * b.height, b.width / 2,
2101 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2102 b.width, b.height,
2103 libyuv::kFilterNone);
2104 if (ret < 0)
2105 ALOGE("Sclale YV12 frame down failed!\n");
2106 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2107 int width = vinfo->preview.format.fmt.pix.width;
2108 int height = vinfo->preview.format.fmt.pix.height;
2109 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2110
2111 if ( tmp_buffer == NULL) {
2112 ALOGE("new buffer failed!\n");
2113 return;
2114 }
2115
2116 YUYVToYV12(src, tmp_buffer, width, height);
2117
2118 int ret = libyuv::I420Scale(tmp_buffer, width,
2119 tmp_buffer + width * height, width / 2,
2120 tmp_buffer + width * height + width * height / 4, width / 2,
2121 width, height,
2122 b.img, b.width,
2123 b.img + b.width * b.height, b.width / 2,
2124 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2125 b.width, b.height,
2126 libyuv::kFilterNone);
2127 if (ret < 0)
2128 ALOGE("Sclale YV12 frame down failed!\n");
2129 delete [] tmp_buffer;
2130 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2131 int width = vinfo->preview.format.fmt.pix.width;
2132 int height = vinfo->preview.format.fmt.pix.height;
2133 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2134
2135 if ( tmp_buffer == NULL) {
2136 ALOGE("new buffer failed!\n");
2137 return;
2138 }
2139
2140 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2141 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2142 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2143 DBG_LOGA("Decode MJPEG frame failed\n");
2144 }
2145
2146 int ret = libyuv::I420Scale(tmp_buffer, width,
2147 tmp_buffer + width * height, width / 2,
2148 tmp_buffer + width * height + width * height / 4, width / 2,
2149 width, height,
2150 b.img, b.width,
2151 b.img + b.width * b.height, b.width / 2,
2152 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2153 b.width, b.height,
2154 libyuv::kFilterNone);
2155 if (ret < 0)
2156 ALOGE("Sclale YV12 frame down failed!\n");
2157
2158 delete [] tmp_buffer;
2159 } else {
2160 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2161 }
2162 return ;
2163 }
2164 while(1){
2165 src = (uint8_t *)get_frame(vinfo);
2166
2167 if (NULL == src) {
2168 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2169 usleep(5000);
2170 continue;
2171 }
2172 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2173 memcpy(b.img, src, vinfo->preview.buf.length);
2174 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2175 int width = vinfo->preview.format.fmt.pix.width;
2176 int height = vinfo->preview.format.fmt.pix.height;
2177 YUYVToYV12(src, b.img, width, height);
2178 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2179 int width = vinfo->preview.format.fmt.pix.width;
2180 int height = vinfo->preview.format.fmt.pix.height;
2181 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2182 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2183 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2184 putback_frame(vinfo);
2185 DBG_LOGA("Decode MJPEG frame failed\n");
2186 continue;
2187 }
2188 } else {
2189 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2190 }
2191
2192 break;
2193 }
2194#endif
2195 mKernelBuffer = src;
2196 ALOGVV("YV12 sensor image captured");
2197}
2198
2199void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2200#if 0
2201 float totalGain = gain/100.0 * kBaseGainFactor;
2202 // Using fixed-point math with 6 bits of fractional precision.
2203 // In fixed-point math, calculate total scaling from electrons to 8bpp
2204 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2205 // In fixed-point math, saturation point of sensor after gain
2206 const int saturationPoint = 64 * 255;
2207 // Fixed-point coefficients for RGB-YUV transform
2208 // Based on JFIF RGB->YUV transform.
2209 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2210 const int rgbToY[] = {19, 37, 7};
2211 const int rgbToCb[] = {-10,-21, 32, 524288};
2212 const int rgbToCr[] = {32,-26, -5, 524288};
2213 // Scale back to 8bpp non-fixed-point
2214 const int scaleOut = 64;
2215 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2216
2217 uint32_t inc = kResolution[0] / stride;
2218 uint32_t outH = kResolution[1] / inc;
2219 for (unsigned int y = 0, outY = 0;
2220 y < kResolution[1]; y+=inc, outY++) {
2221 uint8_t *pxY = img + outY * stride;
2222 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2223 mScene.setReadoutPixel(0,y);
2224 for (unsigned int outX = 0; outX < stride; outX++) {
2225 int32_t rCount, gCount, bCount;
2226 // TODO: Perfect demosaicing is a cheat
2227 const uint32_t *pixel = mScene.getPixelElectrons();
2228 rCount = pixel[Scene::R] * scale64x;
2229 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2230 gCount = pixel[Scene::Gr] * scale64x;
2231 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2232 bCount = pixel[Scene::B] * scale64x;
2233 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2234
2235 *pxY++ = (rgbToY[0] * rCount +
2236 rgbToY[1] * gCount +
2237 rgbToY[2] * bCount) / scaleOutSq;
2238 if (outY % 2 == 0 && outX % 2 == 0) {
2239 *pxVU++ = (rgbToCr[0] * rCount +
2240 rgbToCr[1] * gCount +
2241 rgbToCr[2] * bCount +
2242 rgbToCr[3]) / scaleOutSq;
2243 *pxVU++ = (rgbToCb[0] * rCount +
2244 rgbToCb[1] * gCount +
2245 rgbToCb[2] * bCount +
2246 rgbToCb[3]) / scaleOutSq;
2247 }
2248 for (unsigned int j = 1; j < inc; j++)
2249 mScene.getPixelElectrons();
2250 }
2251 }
2252#else
2253 uint8_t *src;
2254 if (mKernelBuffer) {
2255 src = mKernelBuffer;
2256 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2257 //TODO YUYV scale
2258 //memcpy(img, src, vinfo->preview.buf.length);
2259
2260 } else
2261 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2262
2263 return ;
2264 }
2265
2266 while(1) {
2267 src = (uint8_t *)get_frame(vinfo);
2268 if (NULL == src) {
2269 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2270 usleep(5000);
2271 continue;
2272 }
2273 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2274 memcpy(img, src, vinfo->preview.buf.length);
2275 } else {
2276 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2277 }
2278
2279 break;
2280 }
2281#endif
2282 mKernelBuffer = src;
2283 ALOGVV("YUYV sensor image captured");
2284}
2285
2286void Sensor::dump(int fd) {
2287 String8 result;
2288 result = String8::format("%s, sensor preview information: \n", __FILE__);
2289 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2290 result.appendFormat("camera preview width: %d , height =%d\n",
2291 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2292
2293 result.appendFormat("camera preview format: %.4s\n\n",
2294 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2295
2296 write(fd, result.string(), result.size());
2297}
2298
2299} // namespace android
2300
2301