summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: a32369feefbc5bc6d18751ae4e5417d2afba7f7f
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
45
46namespace android {
47
48const unsigned int Sensor::kResolution[2] = {1600, 1200};
49
50const nsecs_t Sensor::kExposureTimeRange[2] =
51 {1000L, 30000000000L} ; // 1 us - 30 sec
52const nsecs_t Sensor::kFrameDurationRange[2] =
53 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
54const nsecs_t Sensor::kMinVerticalBlank = 10000L;
55
56const uint8_t Sensor::kColorFilterArrangement =
57 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
58
59// Output image data characteristics
60const uint32_t Sensor::kMaxRawValue = 4000;
61const uint32_t Sensor::kBlackLevel = 1000;
62
63// Sensor sensitivity
64const float Sensor::kSaturationVoltage = 0.520f;
65const uint32_t Sensor::kSaturationElectrons = 2000;
66const float Sensor::kVoltsPerLuxSecond = 0.100f;
67
68const float Sensor::kElectronsPerLuxSecond =
69 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
70 * Sensor::kVoltsPerLuxSecond;
71
72const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
73 Sensor::kSaturationElectrons;
74
75const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
76const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
77const float Sensor::kReadNoiseVarBeforeGain =
78 Sensor::kReadNoiseStddevBeforeGain *
79 Sensor::kReadNoiseStddevBeforeGain;
80const float Sensor::kReadNoiseVarAfterGain =
81 Sensor::kReadNoiseStddevAfterGain *
82 Sensor::kReadNoiseStddevAfterGain;
83
84// While each row has to read out, reset, and then expose, the (reset +
85// expose) sequence can be overlapped by other row readouts, so the final
86// minimum frame duration is purely a function of row readout time, at least
87// if there's a reasonable number of rows.
88const nsecs_t Sensor::kRowReadoutTime =
89 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
90
91const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
92const uint32_t Sensor::kDefaultSensitivity = 100;
93
94/** A few utility functions for math, normal distributions */
95
96// Take advantage of IEEE floating-point format to calculate an approximate
97// square root. Accurate to within +-3.6%
98float sqrtf_approx(float r) {
99 // Modifier is based on IEEE floating-point representation; the
100 // manipulations boil down to finding approximate log2, dividing by two, and
101 // then inverting the log2. A bias is added to make the relative error
102 // symmetric about the real answer.
103 const int32_t modifier = 0x1FBB4000;
104
105 int32_t r_i = *(int32_t*)(&r);
106 r_i = (r_i >> 1) + modifier;
107
108 return *(float*)(&r_i);
109}
110
111void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
112{
113 int stride = (width + 31) & ( ~31);
114 int w, h;
115 for (h=0; h<height; h++)
116 {
117 memcpy( dst, src, width*3);
118 dst += width*3;
119 src += stride*3;
120 }
121}
122
123static int ALIGN(int x, int y) {
124 // y must be a power of 2.
125 return (x + y - 1) & ~(y - 1);
126}
127
128Sensor::Sensor():
129 Thread(false),
130 mGotVSync(false),
131 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
132 mFrameDuration(kFrameDurationRange[0]),
133 mGainFactor(kDefaultSensitivity),
134 mNextBuffers(NULL),
135 mFrameNumber(0),
136 mCapturedBuffers(NULL),
137 mListener(NULL),
138 mIoctlSupport(0),
139 msupportrotate(0),
140 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
141{
142
143}
144
145Sensor::~Sensor() {
146 shutDown();
147}
148
149status_t Sensor::startUp(int idx) {
150 ALOGV("%s: E", __FUNCTION__);
151 DBG_LOGA("ddd");
152
153 int res;
154 mCapturedBuffers = NULL;
155 res = run("EmulatedFakeCamera2::Sensor",
156 ANDROID_PRIORITY_URGENT_DISPLAY);
157
158 if (res != OK) {
159 ALOGE("Unable to start up sensor capture thread: %d", res);
160 }
161
162 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
163 vinfo->idx = idx;
164
165 res = camera_open(vinfo);
166 if (res < 0) {
167 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
168 }
169
170 mSensorType = SENSOR_MMAP;
171 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
172 mSensorType = SENSOR_USB;
173 }
174
175 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
176 mSensorType = SENSOR_SHARE_FD;
177 }
178
179 if (strstr((const char *)vinfo->cap.card, "front"))
180 mSensorFace = SENSOR_FACE_FRONT;
181 else if (strstr((const char *)vinfo->cap.card, "back"))
182 mSensorFace = SENSOR_FACE_BACK;
183 else
184 mSensorFace = SENSOR_FACE_NONE;
185
186 return res;
187}
188
189sensor_type_e Sensor::getSensorType(void)
190{
191 return mSensorType;
192}
193status_t Sensor::IoctlStateProbe(void) {
194 struct v4l2_queryctrl qc;
195 int ret = 0;
196 mIoctlSupport = 0;
197 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
198 qc.id = V4L2_ROTATE_ID;
199 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
200 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
201 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
202 }else{
203 mIoctlSupport |= IOCTL_MASK_ROTATE;
204 }
205
206 if(mIoctlSupport & IOCTL_MASK_ROTATE){
207 msupportrotate = true;
208 DBG_LOGA("camera support capture rotate");
209 }
210 return mIoctlSupport;
211}
212
213uint32_t Sensor::getStreamUsage(int stream_type)
214{
215 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
216
217 switch (stream_type) {
218 case CAMERA3_STREAM_OUTPUT:
219 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
220 break;
221 case CAMERA3_STREAM_INPUT:
222 usage = GRALLOC_USAGE_HW_CAMERA_READ;
223 break;
224 case CAMERA3_STREAM_BIDIRECTIONAL:
225 usage = GRALLOC_USAGE_HW_CAMERA_READ |
226 GRALLOC_USAGE_HW_CAMERA_WRITE;
227 break;
228 }
229 if ((mSensorType == SENSOR_MMAP)
230 || (mSensorType == SENSOR_USB)) {
231 usage = (GRALLOC_USAGE_HW_TEXTURE
232 | GRALLOC_USAGE_HW_RENDER
233 | GRALLOC_USAGE_SW_READ_MASK
234 | GRALLOC_USAGE_SW_WRITE_MASK
235 );
236 }
237
238 return usage;
239}
240
241status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
242{
243 int res;
244
245 mFramecount = 0;
246 mCurFps = 0;
247 gettimeofday(&mTimeStart, NULL);
248
249 if (isjpeg) {
250 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
251 vinfo->picture.format.fmt.pix.width = width;
252 vinfo->picture.format.fmt.pix.height = height;
253 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
254 } else {
255 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
256 vinfo->preview.format.fmt.pix.width = width;
257 vinfo->preview.format.fmt.pix.height = height;
258 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
259
260 res = setBuffersFormat(vinfo);
261 if (res < 0) {
262 ALOGE("set buffer failed\n");
263 return res;
264 }
265 }
266
267 return OK;
268
269}
270
271status_t Sensor::streamOn() {
272
273 return start_capturing(vinfo);
274}
275
276bool Sensor::isStreaming() {
277
278 return vinfo->isStreaming;
279}
280
281bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
282{
283 if ((vinfo->preview.format.fmt.pix.width != width)
284 ||(vinfo->preview.format.fmt.pix.height != height)
285 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
286 ) {
287
288 return true;
289
290 }
291
292 return false;
293}
294status_t Sensor::streamOff() {
295 if (mSensorType == SENSOR_USB) {
296 return releasebuf_and_stop_capturing(vinfo);
297 } else {
298 return stop_capturing(vinfo);
299 }
300}
301
302int Sensor::getOutputFormat()
303{
304 struct v4l2_fmtdesc fmt;
305 int ret;
306 memset(&fmt,0,sizeof(fmt));
307 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
308
309 fmt.index = 0;
310 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
311 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
312 return V4L2_PIX_FMT_MJPEG;
313 fmt.index++;
314 }
315
316 fmt.index = 0;
317 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
318 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
319 return V4L2_PIX_FMT_NV21;
320 fmt.index++;
321 }
322
323 fmt.index = 0;
324 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
325 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
326 return V4L2_PIX_FMT_YUYV;
327 fmt.index++;
328 }
329
330 ALOGE("Unable to find a supported sensor format!");
331 return BAD_VALUE;
332}
333
334/* if sensor supports MJPEG, return it first, otherwise
335 * trasform HAL format to v4l2 format then check whether
336 * it is supported.
337 */
338int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
339{
340 struct v4l2_fmtdesc fmt;
341 int ret;
342 memset(&fmt,0,sizeof(fmt));
343 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
344
345 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
346 pixelfmt = V4L2_PIX_FMT_YVU420;
347 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
348 pixelfmt = V4L2_PIX_FMT_NV21;
349 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
350 pixelfmt = V4L2_PIX_FMT_YUYV;
351 } else {
352 pixelfmt = V4L2_PIX_FMT_NV21;
353 }
354
355 fmt.index = 0;
356 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
357 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
358 return V4L2_PIX_FMT_MJPEG;
359 fmt.index++;
360 }
361
362 fmt.index = 0;
363 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
364 if (fmt.pixelformat == pixelfmt)
365 return pixelfmt;
366 fmt.index++;
367 }
368
369 ALOGE("Unable to find a supported sensor format!");
370 return BAD_VALUE;
371}
372
373void Sensor::setPictureRotate(int rotate)
374{
375 mRotateValue = rotate;
376}
377int Sensor::getPictureRotate()
378{
379 return mRotateValue;
380}
381status_t Sensor::shutDown() {
382 ALOGV("%s: E", __FUNCTION__);
383
384 int res;
385 res = requestExitAndWait();
386 if (res != OK) {
387 ALOGE("Unable to shut down sensor capture thread: %d", res);
388 }
389
390 if (vinfo != NULL) {
391 if (mSensorType == SENSOR_USB) {
392 releasebuf_and_stop_capturing(vinfo);
393 } else {
394 stop_capturing(vinfo);
395 }
396 }
397
398 camera_close(vinfo);
399
400 if (vinfo){
401 free(vinfo);
402 vinfo = NULL;
403 }
404 ALOGD("%s: Exit", __FUNCTION__);
405 return res;
406}
407
408Scene &Sensor::getScene() {
409 return mScene;
410}
411
412int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
413{
414 int ret = 0;
415 struct v4l2_queryctrl qc;
416
417 memset(&qc, 0, sizeof(qc));
418 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
419 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
420
421 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
422 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
423 ret = -1;
424 *zoomMin = 0;
425 *zoomMax = 0;
426 *zoomStep = 1;
427 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
428 } else {
429 *zoomMin = qc.minimum;
430 *zoomMax = qc.maximum;
431 *zoomStep = qc.step;
432 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
433 }
434
435 return ret ;
436}
437
438int Sensor::setZoom(int zoomValue)
439{
440 int ret = 0;
441 struct v4l2_control ctl;
442
443 memset( &ctl, 0, sizeof(ctl));
444 ctl.value = zoomValue;
445 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
446 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
447 if (ret < 0) {
448 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
449 }
450 return ret ;
451}
452
453status_t Sensor::setEffect(uint8_t effect)
454{
455 int ret = 0;
456 struct v4l2_control ctl;
457 ctl.id = V4L2_CID_COLORFX;
458
459 switch (effect) {
460 case ANDROID_CONTROL_EFFECT_MODE_OFF:
461 ctl.value= CAM_EFFECT_ENC_NORMAL;
462 break;
463 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
464 ctl.value= CAM_EFFECT_ENC_COLORINV;
465 break;
466 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
467 ctl.value= CAM_EFFECT_ENC_SEPIA;
468 break;
469 default:
470 ALOGE("%s: Doesn't support effect mode %d",
471 __FUNCTION__, effect);
472 return BAD_VALUE;
473 }
474
475 DBG_LOGB("set effect mode:%d", effect);
476 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
477 if (ret < 0) {
478 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
479 }
480 return ret ;
481}
482
483#define MAX_LEVEL_FOR_EXPOSURE 16
484#define MIN_LEVEL_FOR_EXPOSURE 3
485
486int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
487{
488 struct v4l2_queryctrl qc;
489 int ret=0;
490 int level = 0;
491 int middle = 0;
492
493 memset( &qc, 0, sizeof(qc));
494
495 DBG_LOGA("getExposure\n");
496 qc.id = V4L2_CID_EXPOSURE;
497 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
498 if(ret < 0) {
499 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
500 *minExp = -4;
501 *maxExp = 4;
502 *def = 0;
503 step->numerator = 1;
504 step->denominator = 1;
505 return ret;
506 }
507
508 if(0 < qc.step)
509 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
510
511 if((level > MAX_LEVEL_FOR_EXPOSURE)
512 || (level < MIN_LEVEL_FOR_EXPOSURE)){
513 *minExp = -4;
514 *maxExp = 4;
515 *def = 0;
516 step->numerator = 1;
517 step->denominator = 1;
518 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
519 *minExp, *maxExp, *def);
520 return true;
521 }
522
523 middle = (qc.minimum+qc.maximum)/2;
524 *minExp = qc.minimum - middle;
525 *maxExp = qc.maximum - middle;
526 *def = qc.default_value - middle;
527 step->numerator = 1;
528 step->denominator = 2;//qc.step;
529 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
530 return ret;
531}
532
533status_t Sensor::setExposure(int expCmp)
534{
535 int ret = 0;
536 struct v4l2_control ctl;
537 struct v4l2_queryctrl qc;
538
539 if(mEV == expCmp){
540 return 0;
541 }else{
542 mEV = expCmp;
543 }
544 memset(&ctl, 0, sizeof(ctl));
545 memset(&qc, 0, sizeof(qc));
546
547 qc.id = V4L2_CID_EXPOSURE;
548
549 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
550 if (ret < 0) {
551 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
552 }
553
554 ctl.id = V4L2_CID_EXPOSURE;
555 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
556
557 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
558 if (ret < 0) {
559 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
560 }
561 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
562 return ret ;
563}
564
565int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
566{
567 struct v4l2_queryctrl qc;
568 struct v4l2_querymenu qm;
569 int ret;
570 int mode_count = -1;
571
572 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
573 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
574 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
575 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
576 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
577 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
578 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
579 } else {
580 memset(&qm, 0, sizeof(qm));
581
582 int index = 0;
583 mode_count = 1;
584 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
585
586 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
587 if (mode_count >= maxCont)
588 break;
589
590 memset(&qm, 0, sizeof(struct v4l2_querymenu));
591 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
592 qm.index = index;
593 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
594 continue;
595 } else {
596 if (strcmp((char*)qm.name,"50hz") == 0) {
597 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
598 mode_count++;
599 } else if (strcmp((char*)qm.name,"60hz") == 0) {
600 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
601 mode_count++;
602 } else if (strcmp((char*)qm.name,"auto") == 0) {
603 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
604 mode_count++;
605 }
606
607 }
608 }
609 }
610
611 return mode_count;
612}
613
614status_t Sensor::setAntiBanding(uint8_t antiBanding)
615{
616 int ret = 0;
617 struct v4l2_control ctl;
618 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
619
620 switch (antiBanding) {
621 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
622 ctl.value= CAM_ANTIBANDING_OFF;
623 break;
624 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
625 ctl.value= CAM_ANTIBANDING_50HZ;
626 break;
627 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
628 ctl.value= CAM_ANTIBANDING_60HZ;
629 break;
630 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
631 ctl.value= CAM_ANTIBANDING_AUTO;
632 break;
633 default:
634 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
635 __FUNCTION__, antiBanding);
636 return BAD_VALUE;
637 }
638
639 DBG_LOGB("anti banding mode:%d", antiBanding);
640 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
641 if ( ret < 0) {
642 CAMHAL_LOGDA("failed to set anti banding mode!\n");
643 return BAD_VALUE;
644 }
645 return ret;
646}
647
648status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
649{
650 int ret = 0;
651 struct v4l2_control ctl;
652 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
653 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
654 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
655
656 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
657 return ret;
658}
659
660
661int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
662{
663 struct v4l2_queryctrl qc;
664 struct v4l2_querymenu qm;
665 int ret;
666 int mode_count = -1;
667
668 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
669 qc.id = V4L2_CID_FOCUS_AUTO;
670 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
671 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
672 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
673 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
674 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
675 }else{
676 memset(&qm, 0, sizeof(qm));
677
678 int index = 0;
679 mode_count = 1;
680 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
681
682 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
683 if (mode_count >= maxCount)
684 break;
685
686 memset(&qm, 0, sizeof(struct v4l2_querymenu));
687 qm.id = V4L2_CID_FOCUS_AUTO;
688 qm.index = index;
689 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
690 continue;
691 } else {
692 if (strcmp((char*)qm.name,"auto") == 0) {
693 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
694 mode_count++;
695 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
696 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
697 mode_count++;
698 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
699 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
700 mode_count++;
701 }
702
703 }
704 }
705 }
706
707 return mode_count;
708}
709
710status_t Sensor::setAutoFocuas(uint8_t afMode)
711{
712 struct v4l2_control ctl;
713 ctl.id = V4L2_CID_FOCUS_AUTO;
714
715 switch (afMode) {
716 case ANDROID_CONTROL_AF_MODE_AUTO:
717 ctl.value = CAM_FOCUS_MODE_AUTO;
718 break;
719 case ANDROID_CONTROL_AF_MODE_MACRO:
720 ctl.value = CAM_FOCUS_MODE_MACRO;
721 break;
722 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
723 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
724 break;
725 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
726 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
727 break;
728 default:
729 ALOGE("%s: Emulator doesn't support AF mode %d",
730 __FUNCTION__, afMode);
731 return BAD_VALUE;
732 }
733
734 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
735 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
736 return BAD_VALUE;
737 }
738
739 return OK;
740}
741
742int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
743{
744 struct v4l2_queryctrl qc;
745 struct v4l2_querymenu qm;
746 int ret;
747 int mode_count = -1;
748
749 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
750 qc.id = V4L2_CID_DO_WHITE_BALANCE;
751 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
752 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
753 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
754 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
755 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
756 }else{
757 memset(&qm, 0, sizeof(qm));
758
759 int index = 0;
760 mode_count = 1;
761 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
762
763 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
764 if (mode_count >= maxCount)
765 break;
766
767 memset(&qm, 0, sizeof(struct v4l2_querymenu));
768 qm.id = V4L2_CID_DO_WHITE_BALANCE;
769 qm.index = index;
770 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
771 continue;
772 } else {
773 if (strcmp((char*)qm.name,"auto") == 0) {
774 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
775 mode_count++;
776 } else if (strcmp((char*)qm.name,"daylight") == 0) {
777 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
778 mode_count++;
779 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
780 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
781 mode_count++;
782 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
783 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
784 mode_count++;
785 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
786 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
787 mode_count++;
788 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
789 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
790 mode_count++;
791 } else if (strcmp((char*)qm.name,"twilight") == 0) {
792 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
793 mode_count++;
794 } else if (strcmp((char*)qm.name,"shade") == 0) {
795 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
796 mode_count++;
797 }
798
799 }
800 }
801 }
802
803 return mode_count;
804}
805
806status_t Sensor::setAWB(uint8_t awbMode)
807{
808 int ret = 0;
809 struct v4l2_control ctl;
810 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
811
812 switch (awbMode) {
813 case ANDROID_CONTROL_AWB_MODE_AUTO:
814 ctl.value = CAM_WB_AUTO;
815 break;
816 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
817 ctl.value = CAM_WB_INCANDESCENCE;
818 break;
819 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
820 ctl.value = CAM_WB_FLUORESCENT;
821 break;
822 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
823 ctl.value = CAM_WB_DAYLIGHT;
824 break;
825 case ANDROID_CONTROL_AWB_MODE_SHADE:
826 ctl.value = CAM_WB_SHADE;
827 break;
828 default:
829 ALOGE("%s: Emulator doesn't support AWB mode %d",
830 __FUNCTION__, awbMode);
831 return BAD_VALUE;
832 }
833 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
834 return ret;
835}
836
837void Sensor::setExposureTime(uint64_t ns) {
838 Mutex::Autolock lock(mControlMutex);
839 ALOGVV("Exposure set to %f", ns/1000000.f);
840 mExposureTime = ns;
841}
842
843void Sensor::setFrameDuration(uint64_t ns) {
844 Mutex::Autolock lock(mControlMutex);
845 ALOGVV("Frame duration set to %f", ns/1000000.f);
846 mFrameDuration = ns;
847}
848
849void Sensor::setSensitivity(uint32_t gain) {
850 Mutex::Autolock lock(mControlMutex);
851 ALOGVV("Gain set to %d", gain);
852 mGainFactor = gain;
853}
854
855void Sensor::setDestinationBuffers(Buffers *buffers) {
856 Mutex::Autolock lock(mControlMutex);
857 mNextBuffers = buffers;
858}
859
860void Sensor::setFrameNumber(uint32_t frameNumber) {
861 Mutex::Autolock lock(mControlMutex);
862 mFrameNumber = frameNumber;
863}
864
865bool Sensor::waitForVSync(nsecs_t reltime) {
866 int res;
867 Mutex::Autolock lock(mControlMutex);
868
869 mGotVSync = false;
870 res = mVSync.waitRelative(mControlMutex, reltime);
871 if (res != OK && res != TIMED_OUT) {
872 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
873 return false;
874 }
875 return mGotVSync;
876}
877
878bool Sensor::waitForNewFrame(nsecs_t reltime,
879 nsecs_t *captureTime) {
880 Mutex::Autolock lock(mReadoutMutex);
881 uint8_t *ret;
882 if (mCapturedBuffers == NULL) {
883 int res;
884 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
885 if (res == TIMED_OUT) {
886 return false;
887 } else if (res != OK || mCapturedBuffers == NULL) {
888 ALOGE("Error waiting for sensor readout signal: %d", res);
889 return false;
890 }
891 } else {
892 mReadoutComplete.signal();
893 }
894
895 *captureTime = mCaptureTime;
896 mCapturedBuffers = NULL;
897 return true;
898}
899
900Sensor::SensorListener::~SensorListener() {
901}
902
903void Sensor::setSensorListener(SensorListener *listener) {
904 Mutex::Autolock lock(mControlMutex);
905 mListener = listener;
906}
907
908status_t Sensor::readyToRun() {
909 int res;
910 ALOGV("Starting up sensor thread");
911 mStartupTime = systemTime();
912 mNextCaptureTime = 0;
913 mNextCapturedBuffers = NULL;
914
915 DBG_LOGA("");
916
917 return OK;
918}
919
920bool Sensor::threadLoop() {
921 /**
922 * Sensor capture operation main loop.
923 *
924 * Stages are out-of-order relative to a single frame's processing, but
925 * in-order in time.
926 */
927
928 /**
929 * Stage 1: Read in latest control parameters
930 */
931 uint64_t exposureDuration;
932 uint64_t frameDuration;
933 uint32_t gain;
934 Buffers *nextBuffers;
935 uint32_t frameNumber;
936 SensorListener *listener = NULL;
937 {
938 Mutex::Autolock lock(mControlMutex);
939 exposureDuration = mExposureTime;
940 frameDuration = mFrameDuration;
941 gain = mGainFactor;
942 nextBuffers = mNextBuffers;
943 frameNumber = mFrameNumber;
944 listener = mListener;
945 // Don't reuse a buffer set
946 mNextBuffers = NULL;
947
948 // Signal VSync for start of readout
949 ALOGVV("Sensor VSync");
950 mGotVSync = true;
951 mVSync.signal();
952 }
953
954 /**
955 * Stage 3: Read out latest captured image
956 */
957
958 Buffers *capturedBuffers = NULL;
959 nsecs_t captureTime = 0;
960
961 nsecs_t startRealTime = systemTime();
962 // Stagefright cares about system time for timestamps, so base simulated
963 // time on that.
964 nsecs_t simulatedTime = startRealTime;
965 nsecs_t frameEndRealTime = startRealTime + frameDuration;
966 nsecs_t frameReadoutEndRealTime = startRealTime +
967 kRowReadoutTime * kResolution[1];
968
969 if (mNextCapturedBuffers != NULL) {
970 ALOGVV("Sensor starting readout");
971 // Pretend we're doing readout now; will signal once enough time has elapsed
972 capturedBuffers = mNextCapturedBuffers;
973 captureTime = mNextCaptureTime;
974 }
975 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
976
977 // TODO: Move this signal to another thread to simulate readout
978 // time properly
979 if (capturedBuffers != NULL) {
980 ALOGVV("Sensor readout complete");
981 Mutex::Autolock lock(mReadoutMutex);
982 if (mCapturedBuffers != NULL) {
983 ALOGV("Waiting for readout thread to catch up!");
984 mReadoutComplete.wait(mReadoutMutex);
985 }
986
987 mCapturedBuffers = capturedBuffers;
988 mCaptureTime = captureTime;
989 mReadoutAvailable.signal();
990 capturedBuffers = NULL;
991 }
992
993 /**
994 * Stage 2: Capture new image
995 */
996 mNextCaptureTime = simulatedTime;
997 mNextCapturedBuffers = nextBuffers;
998
999 if (mNextCapturedBuffers != NULL) {
1000 if (listener != NULL) {
1001 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1002 mNextCaptureTime);
1003 }
1004
1005 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1006 (float)exposureDuration/1e6, gain);
1007 mScene.setExposureDuration((float)exposureDuration/1e9);
1008 mScene.calculateScene(mNextCaptureTime);
1009
1010 if ( mSensorType == SENSOR_SHARE_FD) {
1011 captureNewImageWithGe2d();
1012 } else {
1013 captureNewImage();
1014 }
1015 mFramecount ++;
1016 }
1017 if (mFramecount == 100) {
1018 gettimeofday(&mTimeEnd, NULL);
1019 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1020 mCurFps = mFramecount/(interval/1000000.0f);
1021 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1022 mFramecount = 0;
1023 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1024 }
1025 ALOGVV("Sensor vertical blanking interval");
1026 nsecs_t workDoneRealTime = systemTime();
1027 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1028 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1029 timespec t;
1030 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1031 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1032
1033 int ret;
1034 do {
1035 ret = nanosleep(&t, &t);
1036 } while (ret != 0);
1037 }
1038 nsecs_t endRealTime = systemTime();
1039 ALOGVV("Frame cycle took %d ms, target %d ms",
1040 (int)((endRealTime - startRealTime)/1000000),
1041 (int)(frameDuration / 1000000));
1042 return true;
1043};
1044
1045int Sensor::captureNewImageWithGe2d() {
1046
1047 uint32_t gain = mGainFactor;
1048 mKernelPhysAddr = 0;
1049
1050
1051 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1052 usleep(5000);
1053 }
1054
1055 // Might be adding more buffers, so size isn't constant
1056 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1057 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1058 fillStream(vinfo, mKernelPhysAddr, b);
1059 }
1060 putback_frame(vinfo);
1061 mKernelPhysAddr = 0;
1062
1063 return 0;
1064
1065}
1066
1067int Sensor::captureNewImage() {
1068 bool isjpeg = false;
1069 uint32_t gain = mGainFactor;
1070 mKernelBuffer = NULL;
1071
1072 // Might be adding more buffers, so size isn't constant
1073 DBG_LOGB("size=%d\n", mNextCapturedBuffers->size());
1074 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1075 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1076 ALOGVV("Sensor capturing buffer %d: stream %d,"
1077 " %d x %d, format %x, stride %d, buf %p, img %p",
1078 i, b.streamId, b.width, b.height, b.format, b.stride,
1079 b.buffer, b.img);
1080 switch (b.format) {
1081 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1082 captureRaw(b.img, gain, b.stride);
1083 break;
1084 case HAL_PIXEL_FORMAT_RGB_888:
1085 captureRGB(b.img, gain, b.stride);
1086 break;
1087 case HAL_PIXEL_FORMAT_RGBA_8888:
1088 captureRGBA(b.img, gain, b.stride);
1089 break;
1090 case HAL_PIXEL_FORMAT_BLOB:
1091 // Add auxillary buffer of the right size
1092 // Assumes only one BLOB (JPEG) buffer in
1093 // mNextCapturedBuffers
1094 isjpeg = true;
1095 StreamBuffer bAux;
1096 int orientation;
1097 orientation = getPictureRotate();
1098 ALOGD("bAux orientation=%d",orientation);
1099 if (!msupportrotate) {
1100 bAux.streamId = 0;
1101 bAux.width = b.width;
1102 bAux.height = b.height;
1103 bAux.format = HAL_PIXEL_FORMAT_RGB_888;
1104 bAux.stride = b.width;
1105 bAux.buffer = NULL;
1106 } else {
1107 if ((orientation == 90) || (orientation == 270)) {
1108 bAux.streamId = 0;
1109 bAux.width = b.height;
1110 bAux.height = b.width;
1111 bAux.format = HAL_PIXEL_FORMAT_RGB_888;
1112 bAux.stride = b.height;
1113 bAux.buffer = NULL;
1114 } else {
1115 bAux.streamId = 0;
1116 bAux.width = b.width;
1117 bAux.height = b.height;
1118 bAux.format = HAL_PIXEL_FORMAT_RGB_888;
1119 bAux.stride = b.width;
1120 bAux.buffer = NULL;
1121 }
1122 }
1123 // TODO: Reuse these
1124 bAux.img = new uint8_t[b.width * b.height * 3];
1125 mNextCapturedBuffers->push_back(bAux);
1126 break;
1127 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1128 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1129 captureNV21(b, gain);
1130 break;
1131 case HAL_PIXEL_FORMAT_YV12:
1132 captureYV12(b, gain);
1133 break;
1134 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1135 captureYUYV(b.img, gain, b.stride);
1136 break;
1137 default:
1138 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1139 b.format);
1140 break;
1141 }
1142 }
1143 if (!isjpeg) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1144 // whose buffer putback separately.
1145 putback_frame(vinfo);
1146 }
1147 mKernelBuffer = NULL;
1148
1149 return 0;
1150}
1151
1152int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1153 int res;
1154 int i, j, k, START;
1155 int count = 0;
1156 int pixelfmt;
1157 struct v4l2_frmsizeenum frmsize;
1158 char property[PROPERTY_VALUE_MAX];
1159 unsigned int support_w,support_h;
1160
1161 support_w = 10000;
1162 support_h = 10000;
1163 memset(property, 0, sizeof(property));
1164 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1165 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1166 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1167 support_w = 10000;
1168 support_h = 10000;
1169 }
1170 }
1171
1172 memset(&frmsize,0,sizeof(frmsize));
1173 frmsize.pixel_format = getOutputFormat();
1174
1175 START = 0;
1176 for (i = 0; ; i++) {
1177 frmsize.index = i;
1178 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1179 if (res < 0){
1180 DBG_LOGB("index=%d, break\n", i);
1181 break;
1182 }
1183
1184 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1185
1186 if (0 != (frmsize.discrete.width%16))
1187 continue;
1188
1189 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1190 continue;
1191
1192 if (count >= size)
1193 break;
1194
1195 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1196 picSizes[count+1] = frmsize.discrete.width;
1197 picSizes[count+2] = frmsize.discrete.height;
1198 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1199
1200 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1201 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1202 if (0 == i)
1203 continue;
1204
1205 for (k = count; k > START; k -= 4) {
1206 if (frmsize.discrete.width * frmsize.discrete.height >
1207 picSizes[k - 3] * picSizes[k - 2]) {
1208 picSizes[k + 1] = picSizes[k - 3];
1209 picSizes[k + 2] = picSizes[k - 2];
1210
1211 } else {
1212 break;
1213 }
1214 }
1215 picSizes[k + 1] = frmsize.discrete.width;
1216 picSizes[k + 2] = frmsize.discrete.height;
1217
1218 count+=4;
1219 }
1220 }
1221
1222 START = count;
1223 for (i = 0; ; i++) {
1224 frmsize.index = i;
1225 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1226 if (res < 0){
1227 DBG_LOGB("index=%d, break\n", i);
1228 break;
1229 }
1230
1231 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1232
1233 if (0 != (frmsize.discrete.width%16))
1234 continue;
1235
1236 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1237 continue;
1238
1239 if (count >= size)
1240 break;
1241
1242 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1243 picSizes[count+1] = frmsize.discrete.width;
1244 picSizes[count+2] = frmsize.discrete.height;
1245 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1246
1247 DBG_LOGB("get output width=%d, height=%d, format =\
1248 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1249 frmsize.discrete.height);
1250 if (0 == i)
1251 continue;
1252
1253 for (k = count; k > START; k -= 4) {
1254 if (frmsize.discrete.width * frmsize.discrete.height >
1255 picSizes[k - 3] * picSizes[k - 2]) {
1256 picSizes[k + 1] = picSizes[k - 3];
1257 picSizes[k + 2] = picSizes[k - 2];
1258
1259 } else {
1260 break;
1261 }
1262 }
1263 picSizes[k + 1] = frmsize.discrete.width;
1264 picSizes[k + 2] = frmsize.discrete.height;
1265
1266 count+=4;
1267 }
1268 }
1269
1270#if 0
1271 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1272 START = count;
1273 for (i = 0; ; i++) {
1274 frmsize.index = i;
1275 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1276 if (res < 0){
1277 DBG_LOGB("index=%d, break\n", i);
1278 break;
1279 }
1280
1281 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1282
1283 if (0 != (frmsize.discrete.width%16))
1284 continue;
1285
1286 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1287 continue;
1288
1289 if (count >= size)
1290 break;
1291
1292 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1293 picSizes[count+1] = frmsize.discrete.width;
1294 picSizes[count+2] = frmsize.discrete.height;
1295 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1296
1297 DBG_LOGB("get output width=%d, height=%d, format =\
1298 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1299 frmsize.discrete.height);
1300 if (0 == i)
1301 continue;
1302
1303 for (k = count; k > START; k -= 4) {
1304 if (frmsize.discrete.width * frmsize.discrete.height >
1305 picSizes[k - 3] * picSizes[k - 2]) {
1306 picSizes[k + 1] = picSizes[k - 3];
1307 picSizes[k + 2] = picSizes[k - 2];
1308
1309 } else {
1310 break;
1311 }
1312 }
1313 picSizes[k + 1] = frmsize.discrete.width;
1314 picSizes[k + 2] = frmsize.discrete.height;
1315
1316 count+=4;
1317 }
1318 }
1319 }
1320#endif
1321
1322 uint32_t jpgSrcfmt[] = {
1323 V4L2_PIX_FMT_RGB24,
1324 V4L2_PIX_FMT_MJPEG,
1325 V4L2_PIX_FMT_YUYV,
1326 };
1327
1328 START = count;
1329 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1330 memset(&frmsize,0,sizeof(frmsize));
1331 frmsize.pixel_format = jpgSrcfmt[j];
1332
1333 for (i = 0; ; i++) {
1334 frmsize.index = i;
1335 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1336 if (res < 0){
1337 DBG_LOGB("index=%d, break\n", i);
1338 break;
1339 }
1340
1341 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1342
1343 if (0 != (frmsize.discrete.width%16))
1344 continue;
1345
1346 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1347 // continue;
1348
1349 if (count >= size)
1350 break;
1351
1352 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1353 picSizes[count+1] = frmsize.discrete.width;
1354 picSizes[count+2] = frmsize.discrete.height;
1355 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1356
1357 if (0 == i)
1358 continue;
1359
1360 //TODO insert in descend order
1361 for (k = count; k > START; k -= 4) {
1362 if (frmsize.discrete.width * frmsize.discrete.height >
1363 picSizes[k - 3] * picSizes[k - 2]) {
1364 picSizes[k + 1] = picSizes[k - 3];
1365 picSizes[k + 2] = picSizes[k - 2];
1366
1367 } else {
1368 break;
1369 }
1370 }
1371
1372 picSizes[k + 1] = frmsize.discrete.width;
1373 picSizes[k + 2] = frmsize.discrete.height;
1374
1375 count+=4;
1376 }
1377 }
1378
1379 if (frmsize.index > 0)
1380 break;
1381 }
1382
1383 if (frmsize.index == 0)
1384 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1385
1386 return count;
1387
1388}
1389
1390int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1391{
1392 int ret=0; int framerate=0; int temp_rate=0;
1393 struct v4l2_frmivalenum fival;
1394 int i,j=0;
1395 int count = 0;
1396 int tmp_size = size;
1397 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1398 int pixelfmt_tbl[] = {
1399 V4L2_PIX_FMT_MJPEG,
1400 V4L2_PIX_FMT_YVU420,
1401 V4L2_PIX_FMT_NV21,
1402 V4L2_PIX_FMT_RGB24,
1403 V4L2_PIX_FMT_YUYV,
1404 // V4L2_PIX_FMT_YVU420
1405 };
1406
1407 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1408 {
1409 for( ; size > 0; size-=4)
1410 {
1411 memset(&fival, 0, sizeof(fival));
1412
1413 for (fival.index = 0;;fival.index++)
1414 {
1415 fival.pixel_format = pixelfmt_tbl[i];
1416 fival.width = picSizes[size-3];
1417 fival.height = picSizes[size-2];
1418 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1419 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1420 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1421 if(framerate < temp_rate)
1422 framerate = temp_rate;
1423 duration[count+0] = (int64_t)(picSizes[size-4]);
1424 duration[count+1] = (int64_t)(picSizes[size-3]);
1425 duration[count+2] = (int64_t)(picSizes[size-2]);
1426 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1427 j++;
1428 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1429 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1430 if(framerate < temp_rate)
1431 framerate = temp_rate;
1432 duration[count+0] = (int64_t)picSizes[size-4];
1433 duration[count+1] = (int64_t)picSizes[size-3];
1434 duration[count+2] = (int64_t)picSizes[size-2];
1435 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1436 j++;
1437 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1438 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1439 if(framerate < temp_rate)
1440 framerate = temp_rate;
1441 duration[count+0] = (int64_t)picSizes[size-4];
1442 duration[count+1] = (int64_t)picSizes[size-3];
1443 duration[count+2] = (int64_t)picSizes[size-2];
1444 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1445 j++;
1446 }
1447 } else {
1448 if (j > 0) {
1449 if (count > tmp_size)
1450 break;
1451 duration[count+0] = (int64_t)(picSizes[size-4]);
1452 duration[count+1] = (int64_t)(picSizes[size-3]);
1453 duration[count+2] = (int64_t)(picSizes[size-2]);
1454 if (framerate == 5) {
1455 duration[count+3] = (int64_t)200000000L;
1456 } else if (framerate == 10) {
1457 duration[count+3] = (int64_t)100000000L;
1458 } else if (framerate == 15) {
1459 duration[count+3] = (int64_t)66666666L;
1460 } else if (framerate == 30) {
1461 duration[count+3] = (int64_t)33333333L;
1462 } else {
1463 duration[count+3] = (int64_t)66666666L;
1464 }
1465 count += 4;
1466 break;
1467 } else {
1468 break;
1469 }
1470 }
1471 }
1472 j=0;
1473 }
1474 size = tmp_size;
1475 }
1476
1477 return count;
1478
1479}
1480
1481int64_t Sensor::getMinFrameDuration()
1482{
1483 int64_t tmpDuration = 66666666L; // 1/15 s
1484 int64_t frameDuration = 66666666L; // 1/15 s
1485 struct v4l2_frmivalenum fival;
1486 int i,j;
1487
1488 uint32_t pixelfmt_tbl[]={
1489 V4L2_PIX_FMT_MJPEG,
1490 V4L2_PIX_FMT_YUYV,
1491 V4L2_PIX_FMT_NV21,
1492 };
1493 struct v4l2_frmsize_discrete resolution_tbl[]={
1494 {1920, 1080},
1495 {1280, 960},
1496 {640, 480},
1497 {320, 240},
1498 };
1499
1500 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1501 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1502 memset(&fival, 0, sizeof(fival));
1503 fival.index = 0;
1504 fival.pixel_format = pixelfmt_tbl[i];
1505 fival.width = resolution_tbl[j].width;
1506 fival.height = resolution_tbl[j].height;
1507
1508 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1509 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1510 tmpDuration =
1511 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1512
1513 if (frameDuration > tmpDuration)
1514 frameDuration = tmpDuration;
1515 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1516 frameDuration =
1517 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1518 break;
1519 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1520 frameDuration =
1521 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1522 break;
1523 }
1524 fival.index++;
1525 }
1526 }
1527
1528 if (fival.index > 0) {
1529 break;
1530 }
1531 }
1532
1533 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1534 return frameDuration;
1535}
1536
1537int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1538 int res;
1539 int i;
1540 int count = 0;
1541 struct v4l2_frmsizeenum frmsize;
1542 char property[PROPERTY_VALUE_MAX];
1543 unsigned int support_w,support_h;
1544 int preview_fmt;
1545
1546 support_w = 10000;
1547 support_h = 10000;
1548 memset(property, 0, sizeof(property));
1549 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1550 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1551 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1552 support_w = 10000;
1553 support_h = 10000;
1554 }
1555 }
1556
1557
1558 memset(&frmsize,0,sizeof(frmsize));
1559 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1560
1561 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1562 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1563 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1564 if (preview == true)
1565 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1566 else
1567 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1568 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1569 if (preview == true)
1570 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1571 else
1572 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1573 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1574 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1575
1576 for (i = 0; ; i++) {
1577 frmsize.index = i;
1578 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1579 if (res < 0){
1580 DBG_LOGB("index=%d, break\n", i);
1581 break;
1582 }
1583
1584
1585 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1586
1587 if (0 != (frmsize.discrete.width%16))
1588 continue;
1589
1590 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1591 continue;
1592
1593 if (count >= size)
1594 break;
1595
1596 picSizes[count] = frmsize.discrete.width;
1597 picSizes[count+1] = frmsize.discrete.height;
1598
1599 if (0 == i)
1600 continue;
1601
1602 //TODO insert in descend order
1603 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1604 picSizes[count + 0] = picSizes[count - 2];
1605 picSizes[count + 1] = picSizes[count - 1];
1606
1607 picSizes[count - 2] = frmsize.discrete.width;
1608 picSizes[count - 1] = frmsize.discrete.height;
1609 }
1610
1611 count += 2;
1612 }
1613 }
1614
1615 return count;
1616
1617}
1618
1619void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1620 float totalGain = gain/100.0 * kBaseGainFactor;
1621 float noiseVarGain = totalGain * totalGain;
1622 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1623 + kReadNoiseVarAfterGain;
1624
1625 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1626 mScene.setReadoutPixel(0,0);
1627 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1628 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1629 uint16_t *px = (uint16_t*)img + y * stride;
1630 for (unsigned int x = 0; x < kResolution[0]; x++) {
1631 uint32_t electronCount;
1632 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1633
1634 // TODO: Better pixel saturation curve?
1635 electronCount = (electronCount < kSaturationElectrons) ?
1636 electronCount : kSaturationElectrons;
1637
1638 // TODO: Better A/D saturation curve?
1639 uint16_t rawCount = electronCount * totalGain;
1640 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1641
1642 // Calculate noise value
1643 // TODO: Use more-correct Gaussian instead of uniform noise
1644 float photonNoiseVar = electronCount * noiseVarGain;
1645 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1646 // Scaled to roughly match gaussian/uniform noise stddev
1647 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1648
1649 rawCount += kBlackLevel;
1650 rawCount += noiseStddev * noiseSample;
1651
1652 *px++ = rawCount;
1653 }
1654 // TODO: Handle this better
1655 //simulatedTime += kRowReadoutTime;
1656 }
1657 ALOGVV("Raw sensor image captured");
1658}
1659
1660void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1661 float totalGain = gain/100.0 * kBaseGainFactor;
1662 // In fixed-point math, calculate total scaling from electrons to 8bpp
1663 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1664 uint32_t inc = kResolution[0] / stride;
1665
1666 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1667 uint8_t *px = img + outY * stride * 4;
1668 mScene.setReadoutPixel(0, y);
1669 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1670 uint32_t rCount, gCount, bCount;
1671 // TODO: Perfect demosaicing is a cheat
1672 const uint32_t *pixel = mScene.getPixelElectrons();
1673 rCount = pixel[Scene::R] * scale64x;
1674 gCount = pixel[Scene::Gr] * scale64x;
1675 bCount = pixel[Scene::B] * scale64x;
1676
1677 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1678 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1679 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1680 *px++ = 255;
1681 for (unsigned int j = 1; j < inc; j++)
1682 mScene.getPixelElectrons();
1683 }
1684 // TODO: Handle this better
1685 //simulatedTime += kRowReadoutTime;
1686 }
1687 ALOGVV("RGBA sensor image captured");
1688}
1689
1690void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1691#if 0
1692 float totalGain = gain/100.0 * kBaseGainFactor;
1693 // In fixed-point math, calculate total scaling from electrons to 8bpp
1694 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1695 uint32_t inc = kResolution[0] / stride;
1696
1697 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1698 mScene.setReadoutPixel(0, y);
1699 uint8_t *px = img + outY * stride * 3;
1700 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1701 uint32_t rCount, gCount, bCount;
1702 // TODO: Perfect demosaicing is a cheat
1703 const uint32_t *pixel = mScene.getPixelElectrons();
1704 rCount = pixel[Scene::R] * scale64x;
1705 gCount = pixel[Scene::Gr] * scale64x;
1706 bCount = pixel[Scene::B] * scale64x;
1707
1708 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1709 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1710 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1711 for (unsigned int j = 1; j < inc; j++)
1712 mScene.getPixelElectrons();
1713 }
1714 // TODO: Handle this better
1715 //simulatedTime += kRowReadoutTime;
1716 }
1717#else
1718 uint8_t *src = NULL;
1719 int ret = 0, rotate = 0;
1720 uint32_t width = 0, height = 0;
1721
1722 rotate = getPictureRotate();
1723 width = vinfo->picture.format.fmt.pix.width;
1724 height = vinfo->picture.format.fmt.pix.height;
1725
1726 if (mSensorType == SENSOR_USB) {
1727 releasebuf_and_stop_capturing(vinfo);
1728 } else {
1729 stop_capturing(vinfo);
1730 }
1731
1732 ret = start_picture(vinfo,rotate);
1733 if (ret < 0)
1734 {
1735 ALOGD("start picture failed!");
1736 }
1737 while(1)
1738 {
1739 src = (uint8_t *)get_picture(vinfo);
1740 if (NULL != src) {
1741 break;
1742 }
1743
1744 usleep(5000);
1745 }
1746 ALOGD("get picture success !");
1747
1748 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG){
1749 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1750 if ( tmp_buffer == NULL) {
1751 ALOGE("new buffer failed!\n");
1752 return;
1753 }
1754 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
1755 width, tmp_buffer + width * height, (width + 1) / 2, width,
1756 height, width, height, libyuv::FOURCC_MJPG) != 0) {
1757 DBG_LOGA("Decode MJPEG frame failed\n");
1758 }
1759 nv21_to_rgb24(tmp_buffer,img,width,height);
1760 if (tmp_buffer != NULL)
1761 delete [] tmp_buffer;
1762 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1763 yuyv422_to_rgb24(src,img,width,height);
1764 }
1765
1766 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24){
1767 if (vinfo->picture.buf.length == width*height*3) {
1768 memcpy(img, src, vinfo->picture.buf.length);
1769 } else {
1770 rgb24_memcpy( img, src, width, height);
1771 }
1772 }
1773
1774 if (mSensorType == SENSOR_USB) {
1775 releasebuf_and_stop_picture(vinfo);
1776 } else {
1777 stop_picture(vinfo);
1778 }
1779
1780#endif
1781}
1782
1783void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
1784{
1785 for (int i = 0; i < width * height * 2; i += 2) {
1786 *dst++ = *(src + i);
1787 }
1788
1789 for (int y = 0; y < height - 1; y +=2) {
1790 for (int j = 0; j < width * 2; j += 4) {
1791 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
1792 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
1793 }
1794 src += width * 2 * 2;
1795 }
1796
1797 if (height & 1)
1798 for (int j = 0; j < width * 2; j += 4) {
1799 *dst++ = *(src + 3 + j); //v
1800 *dst++ = *(src + 1 + j); //u
1801 }
1802}
1803
1804void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
1805{
1806 //width should be an even number.
1807 //uv ALIGN 32.
1808 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
1809 unsigned char *dst_copy,*src_copy;
1810
1811 dst_copy = dst;
1812 src_copy = src;
1813
1814 y_size = width*height;
1815 c_stride = ALIGN(width/2, 16);
1816 c_size = c_stride * height/2;
1817 cr_offset = y_size;
1818 cb_offset = y_size+c_size;
1819
1820 for(i=0;i< y_size;i++){
1821 *dst++ = *src;
1822 src += 2;
1823 }
1824
1825 dst = dst_copy;
1826 src = src_copy;
1827
1828 for(i=0;i<height;i+=2){
1829 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
1830 //ceil(u1+u2)/2
1831 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
1832 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
1833 }
1834 dst += c_stride;
1835 src += width*4;
1836 }
1837}
1838
1839
1840void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
1841#if 0
1842 float totalGain = gain/100.0 * kBaseGainFactor;
1843 // Using fixed-point math with 6 bits of fractional precision.
1844 // In fixed-point math, calculate total scaling from electrons to 8bpp
1845 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1846 // In fixed-point math, saturation point of sensor after gain
1847 const int saturationPoint = 64 * 255;
1848 // Fixed-point coefficients for RGB-YUV transform
1849 // Based on JFIF RGB->YUV transform.
1850 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
1851 const int rgbToY[] = {19, 37, 7};
1852 const int rgbToCb[] = {-10,-21, 32, 524288};
1853 const int rgbToCr[] = {32,-26, -5, 524288};
1854 // Scale back to 8bpp non-fixed-point
1855 const int scaleOut = 64;
1856 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
1857
1858 uint32_t inc = kResolution[0] / stride;
1859 uint32_t outH = kResolution[1] / inc;
1860 for (unsigned int y = 0, outY = 0;
1861 y < kResolution[1]; y+=inc, outY++) {
1862 uint8_t *pxY = img + outY * stride;
1863 uint8_t *pxVU = img + (outH + outY / 2) * stride;
1864 mScene.setReadoutPixel(0,y);
1865 for (unsigned int outX = 0; outX < stride; outX++) {
1866 int32_t rCount, gCount, bCount;
1867 // TODO: Perfect demosaicing is a cheat
1868 const uint32_t *pixel = mScene.getPixelElectrons();
1869 rCount = pixel[Scene::R] * scale64x;
1870 rCount = rCount < saturationPoint ? rCount : saturationPoint;
1871 gCount = pixel[Scene::Gr] * scale64x;
1872 gCount = gCount < saturationPoint ? gCount : saturationPoint;
1873 bCount = pixel[Scene::B] * scale64x;
1874 bCount = bCount < saturationPoint ? bCount : saturationPoint;
1875
1876 *pxY++ = (rgbToY[0] * rCount +
1877 rgbToY[1] * gCount +
1878 rgbToY[2] * bCount) / scaleOutSq;
1879 if (outY % 2 == 0 && outX % 2 == 0) {
1880 *pxVU++ = (rgbToCr[0] * rCount +
1881 rgbToCr[1] * gCount +
1882 rgbToCr[2] * bCount +
1883 rgbToCr[3]) / scaleOutSq;
1884 *pxVU++ = (rgbToCb[0] * rCount +
1885 rgbToCb[1] * gCount +
1886 rgbToCb[2] * bCount +
1887 rgbToCb[3]) / scaleOutSq;
1888 }
1889 for (unsigned int j = 1; j < inc; j++)
1890 mScene.getPixelElectrons();
1891 }
1892 }
1893#else
1894 uint8_t *src;
1895
1896 if (mKernelBuffer) {
1897 src = mKernelBuffer;
1898 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
1899 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
1900 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
1901 (mmInt32)vinfo->preview.format.fmt.pix.height,
1902 (mmInt32)vinfo->preview.format.fmt.pix.width,
1903 IC_FORMAT_YCbCr420_lp,
1904 (mmByte *) src,
1905 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
1906 0};
1907
1908 structConvImage output = {(mmInt32)b.width,
1909 (mmInt32)b.height,
1910 (mmInt32)b.width,
1911 IC_FORMAT_YCbCr420_lp,
1912 (mmByte *) b.img,
1913 (mmByte *) b.img + b.width * b.height,
1914 0};
1915
1916 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
1917 ALOGE("Sclale NV21 frame down failed!\n");
1918 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1919 int width = vinfo->preview.format.fmt.pix.width;
1920 int height = vinfo->preview.format.fmt.pix.height;
1921 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1922
1923 if ( tmp_buffer == NULL) {
1924 ALOGE("new buffer failed!\n");
1925 return;
1926 }
1927
1928 YUYVToNV21(src, tmp_buffer, width, height);
1929
1930 structConvImage input = {(mmInt32)width,
1931 (mmInt32)height,
1932 (mmInt32)width,
1933 IC_FORMAT_YCbCr420_lp,
1934 (mmByte *) tmp_buffer,
1935 (mmByte *) tmp_buffer + width * height,
1936 0};
1937
1938 structConvImage output = {(mmInt32)b.width,
1939 (mmInt32)b.height,
1940 (mmInt32)b.width,
1941 IC_FORMAT_YCbCr420_lp,
1942 (mmByte *) b.img,
1943 (mmByte *) b.img + b.width * b.height,
1944 0};
1945
1946 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
1947 ALOGE("Sclale NV21 frame down failed!\n");
1948
1949 delete [] tmp_buffer;
1950 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
1951 int width = vinfo->preview.format.fmt.pix.width;
1952 int height = vinfo->preview.format.fmt.pix.height;
1953
1954#if 0
1955 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1956
1957 if ( tmp_buffer == NULL) {
1958 ALOGE("new buffer failed!\n");
1959 return;
1960 }
1961#endif
1962
1963#if 0
1964 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused,
1965 b.img,
1966 b.width, b.img + b.width * b.height, (b.width + 1) / 2, b.width,
1967 b.height, b.width, b.height, libyuv::FOURCC_MJPG) != 0) {
1968 DBG_LOGA("Decode MJPEG frame failed\n");
1969 }
1970#else
1971 memcpy(b.img, src, b.width * b.height * 3/2);
1972#endif
1973
1974#if 0
1975 structConvImage input = {(mmInt32)width,
1976 (mmInt32)height,
1977 (mmInt32)width,
1978 IC_FORMAT_YCbCr420_lp,
1979 (mmByte *) tmp_buffer,
1980 (mmByte *) tmp_buffer + width * height,
1981 0};
1982
1983 structConvImage output = {(mmInt32)b.width,
1984 (mmInt32)b.height,
1985 (mmInt32)b.width,
1986 IC_FORMAT_YCbCr420_lp,
1987 (mmByte *) b.img,
1988 (mmByte *) b.img + b.width * b.height,
1989 0};
1990
1991 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
1992 ALOGE("Sclale NV21 frame down failed!\n");
1993
1994 delete [] tmp_buffer;
1995#endif
1996 } else {
1997 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
1998 }
1999 return ;
2000 }
2001 while(1){
2002 src = (uint8_t *)get_frame(vinfo);
2003 if (NULL == src) {
2004 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2005 usleep(5000);
2006 continue;
2007 }
2008 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2009 memcpy(b.img, src, vinfo->preview.buf.length);
2010 mKernelBuffer = src;
2011 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2012 int width = vinfo->preview.format.fmt.pix.width;
2013 int height = vinfo->preview.format.fmt.pix.height;
2014 YUYVToNV21(src, b.img, width, height);
2015 mKernelBuffer = src;
2016 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2017 int width = vinfo->preview.format.fmt.pix.width;
2018 int height = vinfo->preview.format.fmt.pix.height;
2019 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, b.img,
2020 width, b.img + width * height, (width + 1) / 2, width,
2021 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2022 putback_frame(vinfo);
2023 DBG_LOGA("Decode MJPEG frame failed\n");
2024 continue;
2025 }
2026 mKernelBuffer = b.img;
2027 }
2028
2029 break;
2030 }
2031#endif
2032
2033 ALOGVV("NV21 sensor image captured");
2034}
2035
2036void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2037#if 0
2038 float totalGain = gain/100.0 * kBaseGainFactor;
2039 // Using fixed-point math with 6 bits of fractional precision.
2040 // In fixed-point math, calculate total scaling from electrons to 8bpp
2041 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2042 // In fixed-point math, saturation point of sensor after gain
2043 const int saturationPoint = 64 * 255;
2044 // Fixed-point coefficients for RGB-YUV transform
2045 // Based on JFIF RGB->YUV transform.
2046 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2047 const int rgbToY[] = {19, 37, 7};
2048 const int rgbToCb[] = {-10,-21, 32, 524288};
2049 const int rgbToCr[] = {32,-26, -5, 524288};
2050 // Scale back to 8bpp non-fixed-point
2051 const int scaleOut = 64;
2052 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2053
2054 uint32_t inc = kResolution[0] / stride;
2055 uint32_t outH = kResolution[1] / inc;
2056 for (unsigned int y = 0, outY = 0;
2057 y < kResolution[1]; y+=inc, outY++) {
2058 uint8_t *pxY = img + outY * stride;
2059 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2060 mScene.setReadoutPixel(0,y);
2061 for (unsigned int outX = 0; outX < stride; outX++) {
2062 int32_t rCount, gCount, bCount;
2063 // TODO: Perfect demosaicing is a cheat
2064 const uint32_t *pixel = mScene.getPixelElectrons();
2065 rCount = pixel[Scene::R] * scale64x;
2066 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2067 gCount = pixel[Scene::Gr] * scale64x;
2068 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2069 bCount = pixel[Scene::B] * scale64x;
2070 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2071
2072 *pxY++ = (rgbToY[0] * rCount +
2073 rgbToY[1] * gCount +
2074 rgbToY[2] * bCount) / scaleOutSq;
2075 if (outY % 2 == 0 && outX % 2 == 0) {
2076 *pxVU++ = (rgbToCr[0] * rCount +
2077 rgbToCr[1] * gCount +
2078 rgbToCr[2] * bCount +
2079 rgbToCr[3]) / scaleOutSq;
2080 *pxVU++ = (rgbToCb[0] * rCount +
2081 rgbToCb[1] * gCount +
2082 rgbToCb[2] * bCount +
2083 rgbToCb[3]) / scaleOutSq;
2084 }
2085 for (unsigned int j = 1; j < inc; j++)
2086 mScene.getPixelElectrons();
2087 }
2088 }
2089#else
2090 uint8_t *src;
2091 if (mKernelBuffer) {
2092 src = mKernelBuffer;
2093 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2094 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2095 ALOGI("Sclale YV12 frame down \n");
2096
2097 int width = vinfo->preview.format.fmt.pix.width;
2098 int height = vinfo->preview.format.fmt.pix.height;
2099 int ret = libyuv::I420Scale(src, width,
2100 src + width * height, width / 2,
2101 src + width * height + width * height / 4, width / 2,
2102 width, height,
2103 b.img, b.width,
2104 b.img + b.width * b.height, b.width / 2,
2105 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2106 b.width, b.height,
2107 libyuv::kFilterNone);
2108 if (ret < 0)
2109 ALOGE("Sclale YV12 frame down failed!\n");
2110 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2111 int width = vinfo->preview.format.fmt.pix.width;
2112 int height = vinfo->preview.format.fmt.pix.height;
2113 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2114
2115 if ( tmp_buffer == NULL) {
2116 ALOGE("new buffer failed!\n");
2117 return;
2118 }
2119
2120 YUYVToYV12(src, tmp_buffer, width, height);
2121
2122 int ret = libyuv::I420Scale(tmp_buffer, width,
2123 tmp_buffer + width * height, width / 2,
2124 tmp_buffer + width * height + width * height / 4, width / 2,
2125 width, height,
2126 b.img, b.width,
2127 b.img + b.width * b.height, b.width / 2,
2128 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2129 b.width, b.height,
2130 libyuv::kFilterNone);
2131 if (ret < 0)
2132 ALOGE("Sclale YV12 frame down failed!\n");
2133 delete [] tmp_buffer;
2134 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2135 int width = vinfo->preview.format.fmt.pix.width;
2136 int height = vinfo->preview.format.fmt.pix.height;
2137 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2138
2139 if ( tmp_buffer == NULL) {
2140 ALOGE("new buffer failed!\n");
2141 return;
2142 }
2143
2144 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2145 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2146 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2147 DBG_LOGA("Decode MJPEG frame failed\n");
2148 }
2149
2150 int ret = libyuv::I420Scale(tmp_buffer, width,
2151 tmp_buffer + width * height, width / 2,
2152 tmp_buffer + width * height + width * height / 4, width / 2,
2153 width, height,
2154 b.img, b.width,
2155 b.img + b.width * b.height, b.width / 2,
2156 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2157 b.width, b.height,
2158 libyuv::kFilterNone);
2159 if (ret < 0)
2160 ALOGE("Sclale YV12 frame down failed!\n");
2161
2162 delete [] tmp_buffer;
2163 } else {
2164 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2165 }
2166 return ;
2167 }
2168 while(1){
2169 src = (uint8_t *)get_frame(vinfo);
2170
2171 if (NULL == src) {
2172 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2173 usleep(5000);
2174 continue;
2175 }
2176 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2177 memcpy(b.img, src, vinfo->preview.buf.length);
2178 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2179 int width = vinfo->preview.format.fmt.pix.width;
2180 int height = vinfo->preview.format.fmt.pix.height;
2181 YUYVToYV12(src, b.img, width, height);
2182 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2183 int width = vinfo->preview.format.fmt.pix.width;
2184 int height = vinfo->preview.format.fmt.pix.height;
2185 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2186 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2187 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2188 putback_frame(vinfo);
2189 DBG_LOGA("Decode MJPEG frame failed\n");
2190 continue;
2191 }
2192 } else {
2193 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2194 }
2195
2196 break;
2197 }
2198#endif
2199 mKernelBuffer = src;
2200 ALOGVV("YV12 sensor image captured");
2201}
2202
2203void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2204#if 0
2205 float totalGain = gain/100.0 * kBaseGainFactor;
2206 // Using fixed-point math with 6 bits of fractional precision.
2207 // In fixed-point math, calculate total scaling from electrons to 8bpp
2208 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2209 // In fixed-point math, saturation point of sensor after gain
2210 const int saturationPoint = 64 * 255;
2211 // Fixed-point coefficients for RGB-YUV transform
2212 // Based on JFIF RGB->YUV transform.
2213 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2214 const int rgbToY[] = {19, 37, 7};
2215 const int rgbToCb[] = {-10,-21, 32, 524288};
2216 const int rgbToCr[] = {32,-26, -5, 524288};
2217 // Scale back to 8bpp non-fixed-point
2218 const int scaleOut = 64;
2219 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2220
2221 uint32_t inc = kResolution[0] / stride;
2222 uint32_t outH = kResolution[1] / inc;
2223 for (unsigned int y = 0, outY = 0;
2224 y < kResolution[1]; y+=inc, outY++) {
2225 uint8_t *pxY = img + outY * stride;
2226 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2227 mScene.setReadoutPixel(0,y);
2228 for (unsigned int outX = 0; outX < stride; outX++) {
2229 int32_t rCount, gCount, bCount;
2230 // TODO: Perfect demosaicing is a cheat
2231 const uint32_t *pixel = mScene.getPixelElectrons();
2232 rCount = pixel[Scene::R] * scale64x;
2233 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2234 gCount = pixel[Scene::Gr] * scale64x;
2235 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2236 bCount = pixel[Scene::B] * scale64x;
2237 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2238
2239 *pxY++ = (rgbToY[0] * rCount +
2240 rgbToY[1] * gCount +
2241 rgbToY[2] * bCount) / scaleOutSq;
2242 if (outY % 2 == 0 && outX % 2 == 0) {
2243 *pxVU++ = (rgbToCr[0] * rCount +
2244 rgbToCr[1] * gCount +
2245 rgbToCr[2] * bCount +
2246 rgbToCr[3]) / scaleOutSq;
2247 *pxVU++ = (rgbToCb[0] * rCount +
2248 rgbToCb[1] * gCount +
2249 rgbToCb[2] * bCount +
2250 rgbToCb[3]) / scaleOutSq;
2251 }
2252 for (unsigned int j = 1; j < inc; j++)
2253 mScene.getPixelElectrons();
2254 }
2255 }
2256#else
2257 uint8_t *src;
2258 if (mKernelBuffer) {
2259 src = mKernelBuffer;
2260 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2261 //TODO YUYV scale
2262 //memcpy(img, src, vinfo->preview.buf.length);
2263
2264 } else
2265 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2266
2267 return ;
2268 }
2269
2270 while(1) {
2271 src = (uint8_t *)get_frame(vinfo);
2272 if (NULL == src) {
2273 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2274 usleep(5000);
2275 continue;
2276 }
2277 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2278 memcpy(img, src, vinfo->preview.buf.length);
2279 } else {
2280 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2281 }
2282
2283 break;
2284 }
2285#endif
2286 mKernelBuffer = src;
2287 ALOGVV("YUYV sensor image captured");
2288}
2289
2290void Sensor::dump(int fd) {
2291 String8 result;
2292 result = String8::format("%s, sensor preview information: \n", __FILE__);
2293 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2294 result.appendFormat("camera preview width: %d , height =%d\n",
2295 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2296
2297 result.appendFormat("camera preview format: %.4s\n\n",
2298 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2299
2300 write(fd, result.string(), result.size());
2301}
2302
2303} // namespace android
2304
2305