summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: a651a5f97e75ebd006d8a5d17acc29d212ee770b
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44
45#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
46
47namespace android {
48
49const unsigned int Sensor::kResolution[2] = {1600, 1200};
50
51const nsecs_t Sensor::kExposureTimeRange[2] =
52 {1000L, 30000000000L} ; // 1 us - 30 sec
53const nsecs_t Sensor::kFrameDurationRange[2] =
54 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
55const nsecs_t Sensor::kMinVerticalBlank = 10000L;
56
57const uint8_t Sensor::kColorFilterArrangement =
58 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
59
60// Output image data characteristics
61const uint32_t Sensor::kMaxRawValue = 4000;
62const uint32_t Sensor::kBlackLevel = 1000;
63
64// Sensor sensitivity
65const float Sensor::kSaturationVoltage = 0.520f;
66const uint32_t Sensor::kSaturationElectrons = 2000;
67const float Sensor::kVoltsPerLuxSecond = 0.100f;
68
69const float Sensor::kElectronsPerLuxSecond =
70 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
71 * Sensor::kVoltsPerLuxSecond;
72
73const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
74 Sensor::kSaturationElectrons;
75
76const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
77const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
78const float Sensor::kReadNoiseVarBeforeGain =
79 Sensor::kReadNoiseStddevBeforeGain *
80 Sensor::kReadNoiseStddevBeforeGain;
81const float Sensor::kReadNoiseVarAfterGain =
82 Sensor::kReadNoiseStddevAfterGain *
83 Sensor::kReadNoiseStddevAfterGain;
84
85// While each row has to read out, reset, and then expose, the (reset +
86// expose) sequence can be overlapped by other row readouts, so the final
87// minimum frame duration is purely a function of row readout time, at least
88// if there's a reasonable number of rows.
89const nsecs_t Sensor::kRowReadoutTime =
90 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
91
92const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
93const uint32_t Sensor::kDefaultSensitivity = 100;
94
95const uint32_t kUsbAvailableSize [10] = {176, 144, 320, 240, 352, 288, 640, 480, 1280, 720};
96
97/** A few utility functions for math, normal distributions */
98
99// Take advantage of IEEE floating-point format to calculate an approximate
100// square root. Accurate to within +-3.6%
101float sqrtf_approx(float r) {
102 // Modifier is based on IEEE floating-point representation; the
103 // manipulations boil down to finding approximate log2, dividing by two, and
104 // then inverting the log2. A bias is added to make the relative error
105 // symmetric about the real answer.
106 const int32_t modifier = 0x1FBB4000;
107
108 int32_t r_i = *(int32_t*)(&r);
109 r_i = (r_i >> 1) + modifier;
110
111 return *(float*)(&r_i);
112}
113
114void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
115{
116 int stride = (width + 31) & ( ~31);
117 int w, h;
118 for (h=0; h<height; h++)
119 {
120 memcpy( dst, src, width*3);
121 dst += width*3;
122 src += stride*3;
123 }
124}
125
126static int ALIGN(int x, int y) {
127 // y must be a power of 2.
128 return (x + y - 1) & ~(y - 1);
129}
130
131bool IsUsbAvailableSize(const uint32_t kUsbAvailableSize[], uint32_t width, uint32_t height, int count)
132{
133 int i;
134 bool ret = false;
135 for (i = 0; i < count; i += 2) {
136 if ((width == kUsbAvailableSize[i]) && (height == kUsbAvailableSize[i+1])) {
137 ret = true;
138 } else {
139 continue;
140 }
141 }
142 return ret;
143}
144
145void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
146{
147 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
148 (mmInt32)vinfo->preview.format.fmt.pix.height,
149 (mmInt32)vinfo->preview.format.fmt.pix.width,
150 IC_FORMAT_YCbCr420_lp,
151 (mmByte *) src,
152 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
153 0};
154
155 structConvImage output = {(mmInt32)width,
156 (mmInt32)height,
157 (mmInt32)width,
158 IC_FORMAT_YCbCr420_lp,
159 (mmByte *) img,
160 (mmByte *) img + width * height,
161 0};
162
163 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
164 ALOGE("Sclale NV21 frame down failed!\n");
165}
166
167Sensor::Sensor():
168 Thread(false),
169 mGotVSync(false),
170 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
171 mFrameDuration(kFrameDurationRange[0]),
172 mGainFactor(kDefaultSensitivity),
173 mNextBuffers(NULL),
174 mFrameNumber(0),
175 mCapturedBuffers(NULL),
176 mListener(NULL),
177 mIoctlSupport(0),
178 msupportrotate(0),
179 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
180{
181
182}
183
184Sensor::~Sensor() {
185 //shutDown();
186}
187
188status_t Sensor::startUp(int idx) {
189 ALOGV("%s: E", __FUNCTION__);
190 DBG_LOGA("ddd");
191
192 int res;
193 mCapturedBuffers = NULL;
194 res = run("EmulatedFakeCamera3::Sensor",
195 ANDROID_PRIORITY_URGENT_DISPLAY);
196
197 if (res != OK) {
198 ALOGE("Unable to start up sensor capture thread: %d", res);
199 }
200
201 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
202 vinfo->idx = idx;
203
204 res = camera_open(vinfo);
205 if (res < 0) {
206 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
207 }
208
209 mSensorType = SENSOR_MMAP;
210 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
211 mSensorType = SENSOR_USB;
212 }
213
214 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
215 mSensorType = SENSOR_SHARE_FD;
216 }
217
218 if (strstr((const char *)vinfo->cap.card, "front"))
219 mSensorFace = SENSOR_FACE_FRONT;
220 else if (strstr((const char *)vinfo->cap.card, "back"))
221 mSensorFace = SENSOR_FACE_BACK;
222 else
223 mSensorFace = SENSOR_FACE_NONE;
224
225 return res;
226}
227
228sensor_type_e Sensor::getSensorType(void)
229{
230 return mSensorType;
231}
232status_t Sensor::IoctlStateProbe(void) {
233 struct v4l2_queryctrl qc;
234 int ret = 0;
235 mIoctlSupport = 0;
236 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
237 qc.id = V4L2_ROTATE_ID;
238 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
239 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
240 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
241 }else{
242 mIoctlSupport |= IOCTL_MASK_ROTATE;
243 }
244
245 if(mIoctlSupport & IOCTL_MASK_ROTATE){
246 msupportrotate = true;
247 DBG_LOGA("camera support capture rotate");
248 }
249 return mIoctlSupport;
250}
251
252uint32_t Sensor::getStreamUsage(int stream_type)
253{
254 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
255
256 switch (stream_type) {
257 case CAMERA3_STREAM_OUTPUT:
258 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
259 break;
260 case CAMERA3_STREAM_INPUT:
261 usage = GRALLOC_USAGE_HW_CAMERA_READ;
262 break;
263 case CAMERA3_STREAM_BIDIRECTIONAL:
264 usage = GRALLOC_USAGE_HW_CAMERA_READ |
265 GRALLOC_USAGE_HW_CAMERA_WRITE;
266 break;
267 }
268 if ((mSensorType == SENSOR_MMAP)
269 || (mSensorType == SENSOR_USB)) {
270 usage = (GRALLOC_USAGE_HW_TEXTURE
271 | GRALLOC_USAGE_HW_RENDER
272 | GRALLOC_USAGE_SW_READ_MASK
273 | GRALLOC_USAGE_SW_WRITE_MASK
274 );
275 }
276
277 return usage;
278}
279
280status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
281{
282 int res;
283
284 mFramecount = 0;
285 mCurFps = 0;
286 gettimeofday(&mTimeStart, NULL);
287
288 if (isjpeg) {
289 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
290 vinfo->picture.format.fmt.pix.width = width;
291 vinfo->picture.format.fmt.pix.height = height;
292 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
293 } else {
294 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
295 vinfo->preview.format.fmt.pix.width = width;
296 vinfo->preview.format.fmt.pix.height = height;
297 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
298
299 res = setBuffersFormat(vinfo);
300 if (res < 0) {
301 ALOGE("set buffer failed\n");
302 return res;
303 }
304 }
305
306 return OK;
307
308}
309
310status_t Sensor::streamOn() {
311
312 return start_capturing(vinfo);
313}
314
315bool Sensor::isStreaming() {
316
317 return vinfo->isStreaming;
318}
319
320bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
321{
322 if ((vinfo->preview.format.fmt.pix.width != width)
323 ||(vinfo->preview.format.fmt.pix.height != height)
324 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
325 ) {
326
327 return true;
328
329 }
330
331 return false;
332}
333status_t Sensor::streamOff() {
334 if (mSensorType == SENSOR_USB) {
335 return releasebuf_and_stop_capturing(vinfo);
336 } else {
337 return stop_capturing(vinfo);
338 }
339}
340
341int Sensor::getOutputFormat()
342{
343 struct v4l2_fmtdesc fmt;
344 int ret;
345 memset(&fmt,0,sizeof(fmt));
346 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
347
348 fmt.index = 0;
349 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
350 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
351 return V4L2_PIX_FMT_MJPEG;
352 fmt.index++;
353 }
354
355 fmt.index = 0;
356 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
357 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
358 return V4L2_PIX_FMT_NV21;
359 fmt.index++;
360 }
361
362 fmt.index = 0;
363 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
364 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
365 return V4L2_PIX_FMT_YUYV;
366 fmt.index++;
367 }
368
369 ALOGE("Unable to find a supported sensor format!");
370 return BAD_VALUE;
371}
372
373/* if sensor supports MJPEG, return it first, otherwise
374 * trasform HAL format to v4l2 format then check whether
375 * it is supported.
376 */
377int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
378{
379 struct v4l2_fmtdesc fmt;
380 int ret;
381 memset(&fmt,0,sizeof(fmt));
382 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
383
384 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
385 pixelfmt = V4L2_PIX_FMT_YVU420;
386 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
387 pixelfmt = V4L2_PIX_FMT_NV21;
388 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
389 pixelfmt = V4L2_PIX_FMT_YUYV;
390 } else {
391 pixelfmt = V4L2_PIX_FMT_NV21;
392 }
393
394 fmt.index = 0;
395 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
396 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
397 return V4L2_PIX_FMT_MJPEG;
398 fmt.index++;
399 }
400
401 fmt.index = 0;
402 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
403 if (fmt.pixelformat == pixelfmt)
404 return pixelfmt;
405 fmt.index++;
406 }
407
408 fmt.index = 0;
409 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0) {
410 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
411 return V4L2_PIX_FMT_YUYV;
412 fmt.index++;
413 }
414 ALOGE("%s, Unable to find a supported sensor format!", __FUNCTION__);
415 return BAD_VALUE;
416}
417
418void Sensor::setPictureRotate(int rotate)
419{
420 mRotateValue = rotate;
421}
422int Sensor::getPictureRotate()
423{
424 return mRotateValue;
425}
426status_t Sensor::shutDown() {
427 ALOGV("%s: E", __FUNCTION__);
428
429 int res;
430 res = requestExitAndWait();
431 if (res != OK) {
432 ALOGE("Unable to shut down sensor capture thread: %d", res);
433 }
434
435 if (vinfo != NULL) {
436 if (mSensorType == SENSOR_USB) {
437 releasebuf_and_stop_capturing(vinfo);
438 } else {
439 stop_capturing(vinfo);
440 }
441 }
442
443 camera_close(vinfo);
444
445 if (vinfo){
446 free(vinfo);
447 vinfo = NULL;
448 }
449 ALOGD("%s: Exit", __FUNCTION__);
450 return res;
451}
452
453Scene &Sensor::getScene() {
454 return mScene;
455}
456
457int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
458{
459 int ret = 0;
460 struct v4l2_queryctrl qc;
461
462 memset(&qc, 0, sizeof(qc));
463 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
464 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
465
466 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
467 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
468 ret = -1;
469 *zoomMin = 0;
470 *zoomMax = 0;
471 *zoomStep = 1;
472 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
473 } else {
474 *zoomMin = qc.minimum;
475 *zoomMax = qc.maximum;
476 *zoomStep = qc.step;
477 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
478 }
479
480 return ret ;
481}
482
483int Sensor::setZoom(int zoomValue)
484{
485 int ret = 0;
486 struct v4l2_control ctl;
487
488 memset( &ctl, 0, sizeof(ctl));
489 ctl.value = zoomValue;
490 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
491 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
492 if (ret < 0) {
493 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
494 }
495 return ret ;
496}
497
498status_t Sensor::setEffect(uint8_t effect)
499{
500 int ret = 0;
501 struct v4l2_control ctl;
502 ctl.id = V4L2_CID_COLORFX;
503
504 switch (effect) {
505 case ANDROID_CONTROL_EFFECT_MODE_OFF:
506 ctl.value= CAM_EFFECT_ENC_NORMAL;
507 break;
508 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
509 ctl.value= CAM_EFFECT_ENC_COLORINV;
510 break;
511 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
512 ctl.value= CAM_EFFECT_ENC_SEPIA;
513 break;
514 default:
515 ALOGE("%s: Doesn't support effect mode %d",
516 __FUNCTION__, effect);
517 return BAD_VALUE;
518 }
519
520 DBG_LOGB("set effect mode:%d", effect);
521 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
522 if (ret < 0) {
523 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
524 }
525 return ret ;
526}
527
528#define MAX_LEVEL_FOR_EXPOSURE 16
529#define MIN_LEVEL_FOR_EXPOSURE 3
530
531int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
532{
533 struct v4l2_queryctrl qc;
534 int ret=0;
535 int level = 0;
536 int middle = 0;
537
538 memset( &qc, 0, sizeof(qc));
539
540 DBG_LOGA("getExposure\n");
541 qc.id = V4L2_CID_EXPOSURE;
542 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
543 if(ret < 0) {
544 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
545 *minExp = -4;
546 *maxExp = 4;
547 *def = 0;
548 step->numerator = 1;
549 step->denominator = 1;
550 return ret;
551 }
552
553 if(0 < qc.step)
554 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
555
556 if((level > MAX_LEVEL_FOR_EXPOSURE)
557 || (level < MIN_LEVEL_FOR_EXPOSURE)){
558 *minExp = -4;
559 *maxExp = 4;
560 *def = 0;
561 step->numerator = 1;
562 step->denominator = 1;
563 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
564 *minExp, *maxExp, *def);
565 return true;
566 }
567
568 middle = (qc.minimum+qc.maximum)/2;
569 *minExp = qc.minimum - middle;
570 *maxExp = qc.maximum - middle;
571 *def = qc.default_value - middle;
572 step->numerator = 1;
573 step->denominator = 2;//qc.step;
574 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
575 return ret;
576}
577
578status_t Sensor::setExposure(int expCmp)
579{
580 int ret = 0;
581 struct v4l2_control ctl;
582 struct v4l2_queryctrl qc;
583
584 if(mEV == expCmp){
585 return 0;
586 }else{
587 mEV = expCmp;
588 }
589 memset(&ctl, 0, sizeof(ctl));
590 memset(&qc, 0, sizeof(qc));
591
592 qc.id = V4L2_CID_EXPOSURE;
593
594 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
595 if (ret < 0) {
596 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
597 }
598
599 ctl.id = V4L2_CID_EXPOSURE;
600 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
601
602 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
603 if (ret < 0) {
604 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
605 }
606 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
607 return ret ;
608}
609
610int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
611{
612 struct v4l2_queryctrl qc;
613 struct v4l2_querymenu qm;
614 int ret;
615 int mode_count = -1;
616
617 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
618 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
619 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
620 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
621 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
622 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
623 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
624 } else {
625 memset(&qm, 0, sizeof(qm));
626
627 int index = 0;
628 mode_count = 1;
629 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
630
631 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
632 if (mode_count >= maxCont)
633 break;
634
635 memset(&qm, 0, sizeof(struct v4l2_querymenu));
636 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
637 qm.index = index;
638 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
639 continue;
640 } else {
641 if (strcmp((char*)qm.name,"50hz") == 0) {
642 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
643 mode_count++;
644 } else if (strcmp((char*)qm.name,"60hz") == 0) {
645 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
646 mode_count++;
647 } else if (strcmp((char*)qm.name,"auto") == 0) {
648 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
649 mode_count++;
650 }
651
652 }
653 }
654 }
655
656 return mode_count;
657}
658
659status_t Sensor::setAntiBanding(uint8_t antiBanding)
660{
661 int ret = 0;
662 struct v4l2_control ctl;
663 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
664
665 switch (antiBanding) {
666 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
667 ctl.value= CAM_ANTIBANDING_OFF;
668 break;
669 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
670 ctl.value= CAM_ANTIBANDING_50HZ;
671 break;
672 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
673 ctl.value= CAM_ANTIBANDING_60HZ;
674 break;
675 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
676 ctl.value= CAM_ANTIBANDING_AUTO;
677 break;
678 default:
679 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
680 __FUNCTION__, antiBanding);
681 return BAD_VALUE;
682 }
683
684 DBG_LOGB("anti banding mode:%d", antiBanding);
685 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
686 if ( ret < 0) {
687 CAMHAL_LOGDA("failed to set anti banding mode!\n");
688 return BAD_VALUE;
689 }
690 return ret;
691}
692
693status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
694{
695 int ret = 0;
696 struct v4l2_control ctl;
697 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
698 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
699 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
700
701 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
702 return ret;
703}
704
705
706int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
707{
708 struct v4l2_queryctrl qc;
709 struct v4l2_querymenu qm;
710 int ret;
711 int mode_count = -1;
712
713 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
714 qc.id = V4L2_CID_FOCUS_AUTO;
715 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
716 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
717 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
718 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
719 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
720 }else{
721 memset(&qm, 0, sizeof(qm));
722
723 int index = 0;
724 mode_count = 1;
725 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
726
727 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
728 if (mode_count >= maxCount)
729 break;
730
731 memset(&qm, 0, sizeof(struct v4l2_querymenu));
732 qm.id = V4L2_CID_FOCUS_AUTO;
733 qm.index = index;
734 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
735 continue;
736 } else {
737 if (strcmp((char*)qm.name,"auto") == 0) {
738 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
739 mode_count++;
740 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
741 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
742 mode_count++;
743 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
744 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
745 mode_count++;
746 }
747
748 }
749 }
750 }
751
752 return mode_count;
753}
754
755status_t Sensor::setAutoFocuas(uint8_t afMode)
756{
757 struct v4l2_control ctl;
758 ctl.id = V4L2_CID_FOCUS_AUTO;
759
760 switch (afMode) {
761 case ANDROID_CONTROL_AF_MODE_AUTO:
762 ctl.value = CAM_FOCUS_MODE_AUTO;
763 break;
764 case ANDROID_CONTROL_AF_MODE_MACRO:
765 ctl.value = CAM_FOCUS_MODE_MACRO;
766 break;
767 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
768 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
769 break;
770 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
771 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
772 break;
773 default:
774 ALOGE("%s: Emulator doesn't support AF mode %d",
775 __FUNCTION__, afMode);
776 return BAD_VALUE;
777 }
778
779 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
780 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
781 return BAD_VALUE;
782 }
783
784 return OK;
785}
786
787int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
788{
789 struct v4l2_queryctrl qc;
790 struct v4l2_querymenu qm;
791 int ret;
792 int mode_count = -1;
793
794 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
795 qc.id = V4L2_CID_DO_WHITE_BALANCE;
796 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
797 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
798 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
799 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
800 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
801 }else{
802 memset(&qm, 0, sizeof(qm));
803
804 int index = 0;
805 mode_count = 1;
806 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
807
808 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
809 if (mode_count >= maxCount)
810 break;
811
812 memset(&qm, 0, sizeof(struct v4l2_querymenu));
813 qm.id = V4L2_CID_DO_WHITE_BALANCE;
814 qm.index = index;
815 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
816 continue;
817 } else {
818 if (strcmp((char*)qm.name,"auto") == 0) {
819 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
820 mode_count++;
821 } else if (strcmp((char*)qm.name,"daylight") == 0) {
822 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
823 mode_count++;
824 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
825 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
826 mode_count++;
827 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
828 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
829 mode_count++;
830 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
831 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
832 mode_count++;
833 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
834 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
835 mode_count++;
836 } else if (strcmp((char*)qm.name,"twilight") == 0) {
837 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
838 mode_count++;
839 } else if (strcmp((char*)qm.name,"shade") == 0) {
840 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
841 mode_count++;
842 }
843
844 }
845 }
846 }
847
848 return mode_count;
849}
850
851status_t Sensor::setAWB(uint8_t awbMode)
852{
853 int ret = 0;
854 struct v4l2_control ctl;
855 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
856
857 switch (awbMode) {
858 case ANDROID_CONTROL_AWB_MODE_AUTO:
859 ctl.value = CAM_WB_AUTO;
860 break;
861 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
862 ctl.value = CAM_WB_INCANDESCENCE;
863 break;
864 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
865 ctl.value = CAM_WB_FLUORESCENT;
866 break;
867 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
868 ctl.value = CAM_WB_DAYLIGHT;
869 break;
870 case ANDROID_CONTROL_AWB_MODE_SHADE:
871 ctl.value = CAM_WB_SHADE;
872 break;
873 default:
874 ALOGE("%s: Emulator doesn't support AWB mode %d",
875 __FUNCTION__, awbMode);
876 return BAD_VALUE;
877 }
878 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
879 return ret;
880}
881
882void Sensor::setExposureTime(uint64_t ns) {
883 Mutex::Autolock lock(mControlMutex);
884 ALOGVV("Exposure set to %f", ns/1000000.f);
885 mExposureTime = ns;
886}
887
888void Sensor::setFrameDuration(uint64_t ns) {
889 Mutex::Autolock lock(mControlMutex);
890 ALOGVV("Frame duration set to %f", ns/1000000.f);
891 mFrameDuration = ns;
892}
893
894void Sensor::setSensitivity(uint32_t gain) {
895 Mutex::Autolock lock(mControlMutex);
896 ALOGVV("Gain set to %d", gain);
897 mGainFactor = gain;
898}
899
900void Sensor::setDestinationBuffers(Buffers *buffers) {
901 Mutex::Autolock lock(mControlMutex);
902 mNextBuffers = buffers;
903}
904
905void Sensor::setFrameNumber(uint32_t frameNumber) {
906 Mutex::Autolock lock(mControlMutex);
907 mFrameNumber = frameNumber;
908}
909
910bool Sensor::waitForVSync(nsecs_t reltime) {
911 int res;
912 Mutex::Autolock lock(mControlMutex);
913
914 mGotVSync = false;
915 res = mVSync.waitRelative(mControlMutex, reltime);
916 if (res != OK && res != TIMED_OUT) {
917 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
918 return false;
919 }
920 return mGotVSync;
921}
922
923bool Sensor::waitForNewFrame(nsecs_t reltime,
924 nsecs_t *captureTime) {
925 Mutex::Autolock lock(mReadoutMutex);
926 uint8_t *ret;
927 if (mCapturedBuffers == NULL) {
928 int res;
929 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
930 if (res == TIMED_OUT) {
931 return false;
932 } else if (res != OK || mCapturedBuffers == NULL) {
933 ALOGE("Error waiting for sensor readout signal: %d", res);
934 return false;
935 }
936 } else {
937 mReadoutComplete.signal();
938 }
939
940 *captureTime = mCaptureTime;
941 mCapturedBuffers = NULL;
942 return true;
943}
944
945Sensor::SensorListener::~SensorListener() {
946}
947
948void Sensor::setSensorListener(SensorListener *listener) {
949 Mutex::Autolock lock(mControlMutex);
950 mListener = listener;
951}
952
953status_t Sensor::readyToRun() {
954 int res;
955 ALOGV("Starting up sensor thread");
956 mStartupTime = systemTime();
957 mNextCaptureTime = 0;
958 mNextCapturedBuffers = NULL;
959
960 DBG_LOGA("");
961
962 return OK;
963}
964
965bool Sensor::threadLoop() {
966 /**
967 * Sensor capture operation main loop.
968 *
969 * Stages are out-of-order relative to a single frame's processing, but
970 * in-order in time.
971 */
972
973 /**
974 * Stage 1: Read in latest control parameters
975 */
976 uint64_t exposureDuration;
977 uint64_t frameDuration;
978 uint32_t gain;
979 Buffers *nextBuffers;
980 uint32_t frameNumber;
981 SensorListener *listener = NULL;
982 {
983 Mutex::Autolock lock(mControlMutex);
984 exposureDuration = mExposureTime;
985 frameDuration = mFrameDuration;
986 gain = mGainFactor;
987 nextBuffers = mNextBuffers;
988 frameNumber = mFrameNumber;
989 listener = mListener;
990 // Don't reuse a buffer set
991 mNextBuffers = NULL;
992
993 // Signal VSync for start of readout
994 ALOGVV("Sensor VSync");
995 mGotVSync = true;
996 mVSync.signal();
997 }
998
999 /**
1000 * Stage 3: Read out latest captured image
1001 */
1002
1003 Buffers *capturedBuffers = NULL;
1004 nsecs_t captureTime = 0;
1005
1006 nsecs_t startRealTime = systemTime();
1007 // Stagefright cares about system time for timestamps, so base simulated
1008 // time on that.
1009 nsecs_t simulatedTime = startRealTime;
1010 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1011 nsecs_t frameReadoutEndRealTime = startRealTime +
1012 kRowReadoutTime * kResolution[1];
1013
1014 if (mNextCapturedBuffers != NULL) {
1015 ALOGVV("Sensor starting readout");
1016 // Pretend we're doing readout now; will signal once enough time has elapsed
1017 capturedBuffers = mNextCapturedBuffers;
1018 captureTime = mNextCaptureTime;
1019 }
1020 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1021
1022 // TODO: Move this signal to another thread to simulate readout
1023 // time properly
1024 if (capturedBuffers != NULL) {
1025 ALOGVV("Sensor readout complete");
1026 Mutex::Autolock lock(mReadoutMutex);
1027 if (mCapturedBuffers != NULL) {
1028 ALOGV("Waiting for readout thread to catch up!");
1029 mReadoutComplete.wait(mReadoutMutex);
1030 }
1031
1032 mCapturedBuffers = capturedBuffers;
1033 mCaptureTime = captureTime;
1034 mReadoutAvailable.signal();
1035 capturedBuffers = NULL;
1036 }
1037
1038 /**
1039 * Stage 2: Capture new image
1040 */
1041 mNextCaptureTime = simulatedTime;
1042 mNextCapturedBuffers = nextBuffers;
1043
1044 if (mNextCapturedBuffers != NULL) {
1045 if (listener != NULL) {
1046#if 0
1047 if (get_device_status(vinfo)) {
1048 listener->onSensorEvent(frameNumber, SensorListener::ERROR_CAMERA_DEVICE, mNextCaptureTime);
1049 }
1050#endif
1051 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1052 mNextCaptureTime);
1053 }
1054
1055 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1056 (float)exposureDuration/1e6, gain);
1057 mScene.setExposureDuration((float)exposureDuration/1e9);
1058 mScene.calculateScene(mNextCaptureTime);
1059
1060 if ( mSensorType == SENSOR_SHARE_FD) {
1061 captureNewImageWithGe2d();
1062 } else {
1063 captureNewImage();
1064 }
1065 mFramecount ++;
1066 }
1067 if (mFramecount == 100) {
1068 gettimeofday(&mTimeEnd, NULL);
1069 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1070 mCurFps = mFramecount/(interval/1000000.0f);
1071 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1072 mFramecount = 0;
1073 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1074 }
1075 ALOGVV("Sensor vertical blanking interval");
1076 nsecs_t workDoneRealTime = systemTime();
1077 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1078 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1079 timespec t;
1080 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1081 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1082
1083 int ret;
1084 do {
1085 ret = nanosleep(&t, &t);
1086 } while (ret != 0);
1087 }
1088 nsecs_t endRealTime = systemTime();
1089 ALOGVV("Frame cycle took %d ms, target %d ms",
1090 (int)((endRealTime - startRealTime)/1000000),
1091 (int)(frameDuration / 1000000));
1092 return true;
1093};
1094
1095int Sensor::captureNewImageWithGe2d() {
1096
1097 uint32_t gain = mGainFactor;
1098 mKernelPhysAddr = 0;
1099
1100
1101 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1102 usleep(5000);
1103 }
1104
1105 // Might be adding more buffers, so size isn't constant
1106 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1107 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1108 fillStream(vinfo, mKernelPhysAddr, b);
1109 }
1110 putback_frame(vinfo);
1111 mKernelPhysAddr = 0;
1112
1113 return 0;
1114
1115}
1116
1117int Sensor::captureNewImage() {
1118 bool isjpeg = false;
1119 uint32_t gain = mGainFactor;
1120 mKernelBuffer = NULL;
1121
1122 // Might be adding more buffers, so size isn't constant
1123 CAMHAL_LOGDB("size=%d\n", mNextCapturedBuffers->size());
1124 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1125 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1126 ALOGVV("Sensor capturing buffer %d: stream %d,"
1127 " %d x %d, format %x, stride %d, buf %p, img %p",
1128 i, b.streamId, b.width, b.height, b.format, b.stride,
1129 b.buffer, b.img);
1130 switch (b.format) {
1131 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1132 captureRaw(b.img, gain, b.stride);
1133 break;
1134 case HAL_PIXEL_FORMAT_RGB_888:
1135 captureRGB(b.img, gain, b.stride);
1136 break;
1137 case HAL_PIXEL_FORMAT_RGBA_8888:
1138 captureRGBA(b.img, gain, b.stride);
1139 break;
1140 case HAL_PIXEL_FORMAT_BLOB:
1141 // Add auxillary buffer of the right size
1142 // Assumes only one BLOB (JPEG) buffer in
1143 // mNextCapturedBuffers
1144 StreamBuffer bAux;
1145 int orientation;
1146 orientation = getPictureRotate();
1147 ALOGD("bAux orientation=%d",orientation);
1148 uint32_t pixelfmt;
1149 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1150 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1151
1152 pixelfmt = getOutputFormat();
1153 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1154 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1155 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1156 DBG_LOGA("");
1157 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1158 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1159 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1160 } else {
1161 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1162 }
1163 } else {
1164 isjpeg = true;
1165 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1166 }
1167
1168 if (!msupportrotate) {
1169 bAux.streamId = 0;
1170 bAux.width = b.width;
1171 bAux.height = b.height;
1172 bAux.format = pixelfmt;
1173 bAux.stride = b.width;
1174 bAux.buffer = NULL;
1175 } else {
1176 if ((orientation == 90) || (orientation == 270)) {
1177 bAux.streamId = 0;
1178 bAux.width = b.height;
1179 bAux.height = b.width;
1180 bAux.format = pixelfmt;
1181 bAux.stride = b.height;
1182 bAux.buffer = NULL;
1183 } else {
1184 bAux.streamId = 0;
1185 bAux.width = b.width;
1186 bAux.height = b.height;
1187 bAux.format = pixelfmt;
1188 bAux.stride = b.width;
1189 bAux.buffer = NULL;
1190 }
1191 }
1192 // TODO: Reuse these
1193 bAux.img = new uint8_t[b.width * b.height * 3];
1194 mNextCapturedBuffers->push_back(bAux);
1195 break;
1196 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1197 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1198 captureNV21(b, gain);
1199 break;
1200 case HAL_PIXEL_FORMAT_YV12:
1201 captureYV12(b, gain);
1202 break;
1203 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1204 captureYUYV(b.img, gain, b.stride);
1205 break;
1206 default:
1207 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1208 b.format);
1209 break;
1210 }
1211 }
1212 if (!isjpeg) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1213 // whose buffer putback separately.
1214 putback_frame(vinfo);
1215 }
1216 mKernelBuffer = NULL;
1217
1218 return 0;
1219}
1220
1221int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1222 int res;
1223 int i, j, k, START;
1224 int count = 0;
1225 int pixelfmt;
1226 struct v4l2_frmsizeenum frmsize;
1227 char property[PROPERTY_VALUE_MAX];
1228 unsigned int support_w,support_h;
1229
1230 support_w = 10000;
1231 support_h = 10000;
1232 memset(property, 0, sizeof(property));
1233 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1234 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1235 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1236 support_w = 10000;
1237 support_h = 10000;
1238 }
1239 }
1240
1241 memset(&frmsize,0,sizeof(frmsize));
1242 frmsize.pixel_format = getOutputFormat();
1243
1244 START = 0;
1245 for (i = 0; ; i++) {
1246 frmsize.index = i;
1247 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1248 if (res < 0){
1249 DBG_LOGB("index=%d, break\n", i);
1250 break;
1251 }
1252
1253 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1254
1255 if (0 != (frmsize.discrete.width%16))
1256 continue;
1257
1258 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1259 continue;
1260
1261 if (count >= size)
1262 break;
1263#if 0
1264 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1265 int count = sizeof(kUsbAvailableSize)/sizeof(kUsbAvailableSize[0]);
1266 if (!IsUsbAvailableSize(kUsbAvailableSize, frmsize.discrete.width, frmsize.discrete.height,count))
1267 continue;
1268 }
1269#endif
1270 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1271 picSizes[count+1] = frmsize.discrete.width;
1272 picSizes[count+2] = frmsize.discrete.height;
1273 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1274
1275 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1276 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1277 if (0 == i) {
1278 count += 4;
1279 continue;
1280 }
1281
1282 for (k = count; k > START; k -= 4) {
1283 if (frmsize.discrete.width * frmsize.discrete.height >
1284 picSizes[k - 3] * picSizes[k - 2]) {
1285 picSizes[k + 1] = picSizes[k - 3];
1286 picSizes[k + 2] = picSizes[k - 2];
1287
1288 } else {
1289 break;
1290 }
1291 }
1292 picSizes[k + 1] = frmsize.discrete.width;
1293 picSizes[k + 2] = frmsize.discrete.height;
1294
1295 count += 4;
1296 }
1297 }
1298
1299 START = count;
1300 for (i = 0; ; i++) {
1301 frmsize.index = i;
1302 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1303 if (res < 0){
1304 DBG_LOGB("index=%d, break\n", i);
1305 break;
1306 }
1307
1308 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1309
1310 if (0 != (frmsize.discrete.width%16))
1311 continue;
1312
1313 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1314 continue;
1315
1316 if (count >= size)
1317 break;
1318#if 0
1319 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1320 int count = sizeof(kUsbAvailableSize)/sizeof(kUsbAvailableSize[0]);
1321 if (!IsUsbAvailableSize(kUsbAvailableSize, frmsize.discrete.width, frmsize.discrete.height,count))
1322 continue;
1323 }
1324#endif
1325 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1326 picSizes[count+1] = frmsize.discrete.width;
1327 picSizes[count+2] = frmsize.discrete.height;
1328 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1329
1330 DBG_LOGB("get output width=%d, height=%d, format =\
1331 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1332 frmsize.discrete.height);
1333 if (0 == i) {
1334 count += 4;
1335 continue;
1336 }
1337
1338 for (k = count; k > START; k -= 4) {
1339 if (frmsize.discrete.width * frmsize.discrete.height >
1340 picSizes[k - 3] * picSizes[k - 2]) {
1341 picSizes[k + 1] = picSizes[k - 3];
1342 picSizes[k + 2] = picSizes[k - 2];
1343
1344 } else {
1345 break;
1346 }
1347 }
1348 picSizes[k + 1] = frmsize.discrete.width;
1349 picSizes[k + 2] = frmsize.discrete.height;
1350
1351 count += 4;
1352 }
1353 }
1354
1355#if 0
1356 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1357 START = count;
1358 for (i = 0; ; i++) {
1359 frmsize.index = i;
1360 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1361 if (res < 0){
1362 DBG_LOGB("index=%d, break\n", i);
1363 break;
1364 }
1365
1366 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1367
1368 if (0 != (frmsize.discrete.width%16))
1369 continue;
1370
1371 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1372 continue;
1373
1374 if (count >= size)
1375 break;
1376
1377 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1378 picSizes[count+1] = frmsize.discrete.width;
1379 picSizes[count+2] = frmsize.discrete.height;
1380 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1381
1382 DBG_LOGB("get output width=%d, height=%d, format =\
1383 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1384 frmsize.discrete.height);
1385 if (0 == i) {
1386 count += 4;
1387 continue;
1388 }
1389
1390 for (k = count; k > START; k -= 4) {
1391 if (frmsize.discrete.width * frmsize.discrete.height >
1392 picSizes[k - 3] * picSizes[k - 2]) {
1393 picSizes[k + 1] = picSizes[k - 3];
1394 picSizes[k + 2] = picSizes[k - 2];
1395
1396 } else {
1397 break;
1398 }
1399 }
1400 picSizes[k + 1] = frmsize.discrete.width;
1401 picSizes[k + 2] = frmsize.discrete.height;
1402
1403 count += 4;
1404 }
1405 }
1406 }
1407#endif
1408
1409 uint32_t jpgSrcfmt[] = {
1410 V4L2_PIX_FMT_RGB24,
1411 V4L2_PIX_FMT_MJPEG,
1412 V4L2_PIX_FMT_YUYV,
1413 };
1414
1415 START = count;
1416 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1417 memset(&frmsize,0,sizeof(frmsize));
1418 frmsize.pixel_format = jpgSrcfmt[j];
1419
1420 for (i = 0; ; i++) {
1421 frmsize.index = i;
1422 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1423 if (res < 0){
1424 DBG_LOGB("index=%d, break\n", i);
1425 break;
1426 }
1427
1428 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1429
1430 if (0 != (frmsize.discrete.width%16))
1431 continue;
1432
1433 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1434 // continue;
1435
1436 if (count >= size)
1437 break;
1438
1439 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1440 picSizes[count+1] = frmsize.discrete.width;
1441 picSizes[count+2] = frmsize.discrete.height;
1442 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1443
1444 if (0 == i) {
1445 count += 4;
1446 continue;
1447 }
1448
1449 //TODO insert in descend order
1450 for (k = count; k > START; k -= 4) {
1451 if (frmsize.discrete.width * frmsize.discrete.height >
1452 picSizes[k - 3] * picSizes[k - 2]) {
1453 picSizes[k + 1] = picSizes[k - 3];
1454 picSizes[k + 2] = picSizes[k - 2];
1455
1456 } else {
1457 break;
1458 }
1459 }
1460
1461 picSizes[k + 1] = frmsize.discrete.width;
1462 picSizes[k + 2] = frmsize.discrete.height;
1463
1464 count += 4;
1465 }
1466 }
1467
1468 if (frmsize.index > 0)
1469 break;
1470 }
1471
1472 if (frmsize.index == 0)
1473 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1474
1475 return count;
1476
1477}
1478
1479int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1480{
1481 int ret=0; int framerate=0; int temp_rate=0;
1482 struct v4l2_frmivalenum fival;
1483 int i,j=0;
1484 int count = 0;
1485 int tmp_size = size;
1486 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1487 int pixelfmt_tbl[] = {
1488 V4L2_PIX_FMT_MJPEG,
1489 V4L2_PIX_FMT_YVU420,
1490 V4L2_PIX_FMT_NV21,
1491 V4L2_PIX_FMT_RGB24,
1492 V4L2_PIX_FMT_YUYV,
1493 //V4L2_PIX_FMT_YVU420
1494 };
1495
1496 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1497 {
1498 /* we got all duration for each resolution for prev format*/
1499 if (count >= tmp_size)
1500 break;
1501
1502 for( ; size > 0; size-=4)
1503 {
1504 memset(&fival, 0, sizeof(fival));
1505
1506 for (fival.index = 0;;fival.index++)
1507 {
1508 fival.pixel_format = pixelfmt_tbl[i];
1509 fival.width = picSizes[size-3];
1510 fival.height = picSizes[size-2];
1511 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1512 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1513 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1514 if(framerate < temp_rate)
1515 framerate = temp_rate;
1516 duration[count+0] = (int64_t)(picSizes[size-4]);
1517 duration[count+1] = (int64_t)(picSizes[size-3]);
1518 duration[count+2] = (int64_t)(picSizes[size-2]);
1519 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1520 j++;
1521 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1522 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1523 if(framerate < temp_rate)
1524 framerate = temp_rate;
1525 duration[count+0] = (int64_t)picSizes[size-4];
1526 duration[count+1] = (int64_t)picSizes[size-3];
1527 duration[count+2] = (int64_t)picSizes[size-2];
1528 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1529 j++;
1530 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1531 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1532 if(framerate < temp_rate)
1533 framerate = temp_rate;
1534 duration[count+0] = (int64_t)picSizes[size-4];
1535 duration[count+1] = (int64_t)picSizes[size-3];
1536 duration[count+2] = (int64_t)picSizes[size-2];
1537 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1538 j++;
1539 }
1540 } else {
1541 if (j > 0) {
1542 if (count >= tmp_size)
1543 break;
1544 duration[count+0] = (int64_t)(picSizes[size-4]);
1545 duration[count+1] = (int64_t)(picSizes[size-3]);
1546 duration[count+2] = (int64_t)(picSizes[size-2]);
1547 if (framerate == 5) {
1548 duration[count+3] = (int64_t)200000000L;
1549 } else if (framerate == 10) {
1550 duration[count+3] = (int64_t)100000000L;
1551 } else if (framerate == 15) {
1552 duration[count+3] = (int64_t)66666666L;
1553 } else if (framerate == 30) {
1554 duration[count+3] = (int64_t)33333333L;
1555 } else {
1556 duration[count+3] = (int64_t)66666666L;
1557 }
1558 count += 4;
1559 break;
1560 } else {
1561 break;
1562 }
1563 }
1564 }
1565 j=0;
1566 }
1567 size = tmp_size;
1568 }
1569
1570 return count;
1571
1572}
1573
1574int64_t Sensor::getMinFrameDuration()
1575{
1576 int64_t tmpDuration = 66666666L; // 1/15 s
1577 int64_t frameDuration = 66666666L; // 1/15 s
1578 struct v4l2_frmivalenum fival;
1579 int i,j;
1580
1581 uint32_t pixelfmt_tbl[]={
1582 V4L2_PIX_FMT_MJPEG,
1583 V4L2_PIX_FMT_YUYV,
1584 V4L2_PIX_FMT_NV21,
1585 };
1586 struct v4l2_frmsize_discrete resolution_tbl[]={
1587 {1920, 1080},
1588 {1280, 960},
1589 {640, 480},
1590 {320, 240},
1591 };
1592
1593 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1594 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1595 memset(&fival, 0, sizeof(fival));
1596 fival.index = 0;
1597 fival.pixel_format = pixelfmt_tbl[i];
1598 fival.width = resolution_tbl[j].width;
1599 fival.height = resolution_tbl[j].height;
1600
1601 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1602 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1603 tmpDuration =
1604 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1605
1606 if (frameDuration > tmpDuration)
1607 frameDuration = tmpDuration;
1608 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1609 frameDuration =
1610 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1611 break;
1612 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1613 frameDuration =
1614 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1615 break;
1616 }
1617 fival.index++;
1618 }
1619 }
1620
1621 if (fival.index > 0) {
1622 break;
1623 }
1624 }
1625
1626 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1627 return frameDuration;
1628}
1629
1630int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1631 int res;
1632 int i;
1633 int count = 0;
1634 struct v4l2_frmsizeenum frmsize;
1635 char property[PROPERTY_VALUE_MAX];
1636 unsigned int support_w,support_h;
1637 int preview_fmt;
1638
1639 support_w = 10000;
1640 support_h = 10000;
1641 memset(property, 0, sizeof(property));
1642 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1643 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1644 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1645 support_w = 10000;
1646 support_h = 10000;
1647 }
1648 }
1649
1650
1651 memset(&frmsize,0,sizeof(frmsize));
1652 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1653
1654 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1655 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1656 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1657 if (preview == true)
1658 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1659 else
1660 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1661 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1662 if (preview == true)
1663 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1664 else
1665 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1666 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1667 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1668
1669 for (i = 0; ; i++) {
1670 frmsize.index = i;
1671 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1672 if (res < 0){
1673 DBG_LOGB("index=%d, break\n", i);
1674 break;
1675 }
1676
1677
1678 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1679
1680 if (0 != (frmsize.discrete.width%16))
1681 continue;
1682
1683 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1684 continue;
1685
1686 if (count >= size)
1687 break;
1688
1689 picSizes[count] = frmsize.discrete.width;
1690 picSizes[count+1] = frmsize.discrete.height;
1691
1692 if (0 == i) {
1693 count += 2;
1694 continue;
1695 }
1696
1697 //TODO insert in descend order
1698 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1699 picSizes[count + 0] = picSizes[count - 2];
1700 picSizes[count + 1] = picSizes[count - 1];
1701
1702 picSizes[count - 2] = frmsize.discrete.width;
1703 picSizes[count - 1] = frmsize.discrete.height;
1704 }
1705
1706 count += 2;
1707 }
1708 }
1709
1710 return count;
1711
1712}
1713
1714void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1715 float totalGain = gain/100.0 * kBaseGainFactor;
1716 float noiseVarGain = totalGain * totalGain;
1717 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1718 + kReadNoiseVarAfterGain;
1719
1720 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1721 mScene.setReadoutPixel(0,0);
1722 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1723 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1724 uint16_t *px = (uint16_t*)img + y * stride;
1725 for (unsigned int x = 0; x < kResolution[0]; x++) {
1726 uint32_t electronCount;
1727 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1728
1729 // TODO: Better pixel saturation curve?
1730 electronCount = (electronCount < kSaturationElectrons) ?
1731 electronCount : kSaturationElectrons;
1732
1733 // TODO: Better A/D saturation curve?
1734 uint16_t rawCount = electronCount * totalGain;
1735 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1736
1737 // Calculate noise value
1738 // TODO: Use more-correct Gaussian instead of uniform noise
1739 float photonNoiseVar = electronCount * noiseVarGain;
1740 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1741 // Scaled to roughly match gaussian/uniform noise stddev
1742 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1743
1744 rawCount += kBlackLevel;
1745 rawCount += noiseStddev * noiseSample;
1746
1747 *px++ = rawCount;
1748 }
1749 // TODO: Handle this better
1750 //simulatedTime += kRowReadoutTime;
1751 }
1752 ALOGVV("Raw sensor image captured");
1753}
1754
1755void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1756 float totalGain = gain/100.0 * kBaseGainFactor;
1757 // In fixed-point math, calculate total scaling from electrons to 8bpp
1758 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1759 uint32_t inc = kResolution[0] / stride;
1760
1761 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1762 uint8_t *px = img + outY * stride * 4;
1763 mScene.setReadoutPixel(0, y);
1764 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1765 uint32_t rCount, gCount, bCount;
1766 // TODO: Perfect demosaicing is a cheat
1767 const uint32_t *pixel = mScene.getPixelElectrons();
1768 rCount = pixel[Scene::R] * scale64x;
1769 gCount = pixel[Scene::Gr] * scale64x;
1770 bCount = pixel[Scene::B] * scale64x;
1771
1772 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1773 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1774 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1775 *px++ = 255;
1776 for (unsigned int j = 1; j < inc; j++)
1777 mScene.getPixelElectrons();
1778 }
1779 // TODO: Handle this better
1780 //simulatedTime += kRowReadoutTime;
1781 }
1782 ALOGVV("RGBA sensor image captured");
1783}
1784
1785void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1786#if 0
1787 float totalGain = gain/100.0 * kBaseGainFactor;
1788 // In fixed-point math, calculate total scaling from electrons to 8bpp
1789 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1790 uint32_t inc = kResolution[0] / stride;
1791
1792 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1793 mScene.setReadoutPixel(0, y);
1794 uint8_t *px = img + outY * stride * 3;
1795 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1796 uint32_t rCount, gCount, bCount;
1797 // TODO: Perfect demosaicing is a cheat
1798 const uint32_t *pixel = mScene.getPixelElectrons();
1799 rCount = pixel[Scene::R] * scale64x;
1800 gCount = pixel[Scene::Gr] * scale64x;
1801 bCount = pixel[Scene::B] * scale64x;
1802
1803 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1804 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1805 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1806 for (unsigned int j = 1; j < inc; j++)
1807 mScene.getPixelElectrons();
1808 }
1809 // TODO: Handle this better
1810 //simulatedTime += kRowReadoutTime;
1811 }
1812#else
1813 uint8_t *src = NULL;
1814 int ret = 0, rotate = 0;
1815 uint32_t width = 0, height = 0;
1816
1817 rotate = getPictureRotate();
1818 width = vinfo->picture.format.fmt.pix.width;
1819 height = vinfo->picture.format.fmt.pix.height;
1820
1821 if (mSensorType == SENSOR_USB) {
1822 releasebuf_and_stop_capturing(vinfo);
1823 } else {
1824 stop_capturing(vinfo);
1825 }
1826
1827 ret = start_picture(vinfo,rotate);
1828 if (ret < 0)
1829 {
1830 ALOGD("start picture failed!");
1831 }
1832 while(1)
1833 {
1834 src = (uint8_t *)get_picture(vinfo);
1835 if (NULL != src) {
1836 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
1837 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1838 if ( tmp_buffer == NULL) {
1839 ALOGE("new buffer failed!\n");
1840 return;
1841 }
1842 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
1843 width, tmp_buffer + width * height, (width + 1) / 2, width,
1844 height, width, height, libyuv::FOURCC_MJPG) != 0) {
1845 DBG_LOGA("Decode MJPEG frame failed\n");
1846 putback_picture_frame(vinfo);
1847 usleep(5000);
1848 } else {
1849 nv21_to_rgb24(tmp_buffer,img,width,height);
1850 if (tmp_buffer != NULL)
1851 delete [] tmp_buffer;
1852 break;
1853 }
1854 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1855 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
1856 yuyv422_to_rgb24(src,img,width,height);
1857 break;
1858 } else {
1859 putback_picture_frame(vinfo);
1860 usleep(5000);
1861 }
1862 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
1863 if (vinfo->picture.buf.length == width * height * 3) {
1864 memcpy(img, src, vinfo->picture.buf.length);
1865 } else {
1866 rgb24_memcpy(img, src, width, height);
1867 }
1868 break;
1869 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
1870 memcpy(img, src, vinfo->picture.buf.length);
1871 break;
1872 }
1873 }
1874 }
1875 ALOGD("get picture success !");
1876
1877 if (mSensorType == SENSOR_USB) {
1878 releasebuf_and_stop_picture(vinfo);
1879 } else {
1880 stop_picture(vinfo);
1881 }
1882
1883#endif
1884}
1885
1886void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
1887{
1888 for (int i = 0; i < width * height * 2; i += 2) {
1889 *dst++ = *(src + i);
1890 }
1891
1892 for (int y = 0; y < height - 1; y +=2) {
1893 for (int j = 0; j < width * 2; j += 4) {
1894 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
1895 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
1896 }
1897 src += width * 2 * 2;
1898 }
1899
1900 if (height & 1)
1901 for (int j = 0; j < width * 2; j += 4) {
1902 *dst++ = *(src + 3 + j); //v
1903 *dst++ = *(src + 1 + j); //u
1904 }
1905}
1906
1907void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
1908{
1909 //width should be an even number.
1910 //uv ALIGN 32.
1911 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
1912 unsigned char *dst_copy,*src_copy;
1913
1914 dst_copy = dst;
1915 src_copy = src;
1916
1917 y_size = width*height;
1918 c_stride = ALIGN(width/2, 16);
1919 c_size = c_stride * height/2;
1920 cr_offset = y_size;
1921 cb_offset = y_size+c_size;
1922
1923 for(i=0;i< y_size;i++){
1924 *dst++ = *src;
1925 src += 2;
1926 }
1927
1928 dst = dst_copy;
1929 src = src_copy;
1930
1931 for(i=0;i<height;i+=2){
1932 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
1933 //ceil(u1+u2)/2
1934 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
1935 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
1936 }
1937 dst += c_stride;
1938 src += width*4;
1939 }
1940}
1941
1942
1943void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
1944#if 0
1945 float totalGain = gain/100.0 * kBaseGainFactor;
1946 // Using fixed-point math with 6 bits of fractional precision.
1947 // In fixed-point math, calculate total scaling from electrons to 8bpp
1948 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1949 // In fixed-point math, saturation point of sensor after gain
1950 const int saturationPoint = 64 * 255;
1951 // Fixed-point coefficients for RGB-YUV transform
1952 // Based on JFIF RGB->YUV transform.
1953 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
1954 const int rgbToY[] = {19, 37, 7};
1955 const int rgbToCb[] = {-10,-21, 32, 524288};
1956 const int rgbToCr[] = {32,-26, -5, 524288};
1957 // Scale back to 8bpp non-fixed-point
1958 const int scaleOut = 64;
1959 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
1960
1961 uint32_t inc = kResolution[0] / stride;
1962 uint32_t outH = kResolution[1] / inc;
1963 for (unsigned int y = 0, outY = 0;
1964 y < kResolution[1]; y+=inc, outY++) {
1965 uint8_t *pxY = img + outY * stride;
1966 uint8_t *pxVU = img + (outH + outY / 2) * stride;
1967 mScene.setReadoutPixel(0,y);
1968 for (unsigned int outX = 0; outX < stride; outX++) {
1969 int32_t rCount, gCount, bCount;
1970 // TODO: Perfect demosaicing is a cheat
1971 const uint32_t *pixel = mScene.getPixelElectrons();
1972 rCount = pixel[Scene::R] * scale64x;
1973 rCount = rCount < saturationPoint ? rCount : saturationPoint;
1974 gCount = pixel[Scene::Gr] * scale64x;
1975 gCount = gCount < saturationPoint ? gCount : saturationPoint;
1976 bCount = pixel[Scene::B] * scale64x;
1977 bCount = bCount < saturationPoint ? bCount : saturationPoint;
1978
1979 *pxY++ = (rgbToY[0] * rCount +
1980 rgbToY[1] * gCount +
1981 rgbToY[2] * bCount) / scaleOutSq;
1982 if (outY % 2 == 0 && outX % 2 == 0) {
1983 *pxVU++ = (rgbToCr[0] * rCount +
1984 rgbToCr[1] * gCount +
1985 rgbToCr[2] * bCount +
1986 rgbToCr[3]) / scaleOutSq;
1987 *pxVU++ = (rgbToCb[0] * rCount +
1988 rgbToCb[1] * gCount +
1989 rgbToCb[2] * bCount +
1990 rgbToCb[3]) / scaleOutSq;
1991 }
1992 for (unsigned int j = 1; j < inc; j++)
1993 mScene.getPixelElectrons();
1994 }
1995 }
1996#else
1997 uint8_t *src;
1998
1999 if (mKernelBuffer) {
2000 src = mKernelBuffer;
2001 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2002 uint32_t width = vinfo->preview.format.fmt.pix.width;
2003 uint32_t height = vinfo->preview.format.fmt.pix.height;
2004 if ((width == b.width) && (height == b.height)) {
2005 memcpy(b.img, src, b.width * b.height * 3/2);
2006 } else {
2007 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2008 }
2009 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2010 uint32_t width = vinfo->preview.format.fmt.pix.width;
2011 uint32_t height = vinfo->preview.format.fmt.pix.height;
2012
2013 if ((width == b.width) && (height == b.height)) {
2014 memcpy(b.img, src, b.width * b.height * 3/2);
2015 } else {
2016 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2017 }
2018 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2019 uint32_t width = vinfo->preview.format.fmt.pix.width;
2020 uint32_t height = vinfo->preview.format.fmt.pix.height;
2021
2022 if ((width == b.width) && (height == b.height)) {
2023 memcpy(b.img, src, b.width * b.height * 3/2);
2024 } else {
2025 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2026 }
2027 } else {
2028 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2029 }
2030 return ;
2031 }
2032 while(1){
2033 src = (uint8_t *)get_frame(vinfo);
2034 if (NULL == src) {
2035 if (get_device_status(vinfo)) {
2036 break;
2037 } else {
2038 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2039 usleep(5000);
2040 continue;
2041 }
2042 }
2043
2044 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2045 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2046 DBG_LOGB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2047 putback_frame(vinfo);
2048 continue;
2049 }
2050 }
2051 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2052 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2053 memcpy(b.img, src, vinfo->preview.buf.length);
2054 } else {
2055 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2056 }
2057 mKernelBuffer = b.img;
2058 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2059 int width = vinfo->preview.format.fmt.pix.width;
2060 int height = vinfo->preview.format.fmt.pix.height;
2061 YUYVToNV21(src, b.img, width, height);
2062 mKernelBuffer = b.img;
2063 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2064 int width = vinfo->preview.format.fmt.pix.width;
2065 int height = vinfo->preview.format.fmt.pix.height;
2066 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, b.img,
2067 width, b.img + width * height, (width + 1) / 2, width,
2068 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2069 putback_frame(vinfo);
2070 DBG_LOGA("Decode MJPEG frame failed\n");
2071 continue;
2072 }
2073 mKernelBuffer = b.img;
2074 }
2075
2076 break;
2077 }
2078#endif
2079
2080 ALOGVV("NV21 sensor image captured");
2081}
2082
2083void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2084#if 0
2085 float totalGain = gain/100.0 * kBaseGainFactor;
2086 // Using fixed-point math with 6 bits of fractional precision.
2087 // In fixed-point math, calculate total scaling from electrons to 8bpp
2088 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2089 // In fixed-point math, saturation point of sensor after gain
2090 const int saturationPoint = 64 * 255;
2091 // Fixed-point coefficients for RGB-YUV transform
2092 // Based on JFIF RGB->YUV transform.
2093 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2094 const int rgbToY[] = {19, 37, 7};
2095 const int rgbToCb[] = {-10,-21, 32, 524288};
2096 const int rgbToCr[] = {32,-26, -5, 524288};
2097 // Scale back to 8bpp non-fixed-point
2098 const int scaleOut = 64;
2099 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2100
2101 uint32_t inc = kResolution[0] / stride;
2102 uint32_t outH = kResolution[1] / inc;
2103 for (unsigned int y = 0, outY = 0;
2104 y < kResolution[1]; y+=inc, outY++) {
2105 uint8_t *pxY = img + outY * stride;
2106 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2107 mScene.setReadoutPixel(0,y);
2108 for (unsigned int outX = 0; outX < stride; outX++) {
2109 int32_t rCount, gCount, bCount;
2110 // TODO: Perfect demosaicing is a cheat
2111 const uint32_t *pixel = mScene.getPixelElectrons();
2112 rCount = pixel[Scene::R] * scale64x;
2113 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2114 gCount = pixel[Scene::Gr] * scale64x;
2115 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2116 bCount = pixel[Scene::B] * scale64x;
2117 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2118
2119 *pxY++ = (rgbToY[0] * rCount +
2120 rgbToY[1] * gCount +
2121 rgbToY[2] * bCount) / scaleOutSq;
2122 if (outY % 2 == 0 && outX % 2 == 0) {
2123 *pxVU++ = (rgbToCr[0] * rCount +
2124 rgbToCr[1] * gCount +
2125 rgbToCr[2] * bCount +
2126 rgbToCr[3]) / scaleOutSq;
2127 *pxVU++ = (rgbToCb[0] * rCount +
2128 rgbToCb[1] * gCount +
2129 rgbToCb[2] * bCount +
2130 rgbToCb[3]) / scaleOutSq;
2131 }
2132 for (unsigned int j = 1; j < inc; j++)
2133 mScene.getPixelElectrons();
2134 }
2135 }
2136#else
2137 uint8_t *src;
2138 if (mKernelBuffer) {
2139 src = mKernelBuffer;
2140 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2141 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2142 ALOGI("Sclale YV12 frame down \n");
2143
2144 int width = vinfo->preview.format.fmt.pix.width;
2145 int height = vinfo->preview.format.fmt.pix.height;
2146 int ret = libyuv::I420Scale(src, width,
2147 src + width * height, width / 2,
2148 src + width * height + width * height / 4, width / 2,
2149 width, height,
2150 b.img, b.width,
2151 b.img + b.width * b.height, b.width / 2,
2152 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2153 b.width, b.height,
2154 libyuv::kFilterNone);
2155 if (ret < 0)
2156 ALOGE("Sclale YV12 frame down failed!\n");
2157 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2158 int width = vinfo->preview.format.fmt.pix.width;
2159 int height = vinfo->preview.format.fmt.pix.height;
2160 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2161
2162 if ( tmp_buffer == NULL) {
2163 ALOGE("new buffer failed!\n");
2164 return;
2165 }
2166
2167 YUYVToYV12(src, tmp_buffer, width, height);
2168
2169 int ret = libyuv::I420Scale(tmp_buffer, width,
2170 tmp_buffer + width * height, width / 2,
2171 tmp_buffer + width * height + width * height / 4, width / 2,
2172 width, height,
2173 b.img, b.width,
2174 b.img + b.width * b.height, b.width / 2,
2175 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2176 b.width, b.height,
2177 libyuv::kFilterNone);
2178 if (ret < 0)
2179 ALOGE("Sclale YV12 frame down failed!\n");
2180 delete [] tmp_buffer;
2181 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2182 int width = vinfo->preview.format.fmt.pix.width;
2183 int height = vinfo->preview.format.fmt.pix.height;
2184 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2185
2186 if ( tmp_buffer == NULL) {
2187 ALOGE("new buffer failed!\n");
2188 return;
2189 }
2190
2191 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2192 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2193 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2194 DBG_LOGA("Decode MJPEG frame failed\n");
2195 }
2196
2197 int ret = libyuv::I420Scale(tmp_buffer, width,
2198 tmp_buffer + width * height, width / 2,
2199 tmp_buffer + width * height + width * height / 4, width / 2,
2200 width, height,
2201 b.img, b.width,
2202 b.img + b.width * b.height, b.width / 2,
2203 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2204 b.width, b.height,
2205 libyuv::kFilterNone);
2206 if (ret < 0)
2207 ALOGE("Sclale YV12 frame down failed!\n");
2208
2209 delete [] tmp_buffer;
2210 } else {
2211 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2212 }
2213 return ;
2214 }
2215 while(1){
2216 src = (uint8_t *)get_frame(vinfo);
2217
2218 if (NULL == src) {
2219 if (get_device_status(vinfo)) {
2220 break;
2221 } else {
2222 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2223 usleep(5000);
2224 continue;
2225 }
2226 }
2227 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2228 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2229 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2230 putback_frame(vinfo);
2231 continue;
2232 }
2233 }
2234 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2235 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2236 memcpy(b.img, src, vinfo->preview.buf.length);
2237 } else {
2238 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2239 }
2240 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2241 int width = vinfo->preview.format.fmt.pix.width;
2242 int height = vinfo->preview.format.fmt.pix.height;
2243 YUYVToYV12(src, b.img, width, height);
2244 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2245 int width = vinfo->preview.format.fmt.pix.width;
2246 int height = vinfo->preview.format.fmt.pix.height;
2247 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2248 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2249 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2250 putback_frame(vinfo);
2251 DBG_LOGA("Decode MJPEG frame failed\n");
2252 continue;
2253 }
2254 } else {
2255 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2256 }
2257
2258 break;
2259 }
2260#endif
2261 mKernelBuffer = src;
2262 ALOGVV("YV12 sensor image captured");
2263}
2264
2265void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2266#if 0
2267 float totalGain = gain/100.0 * kBaseGainFactor;
2268 // Using fixed-point math with 6 bits of fractional precision.
2269 // In fixed-point math, calculate total scaling from electrons to 8bpp
2270 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2271 // In fixed-point math, saturation point of sensor after gain
2272 const int saturationPoint = 64 * 255;
2273 // Fixed-point coefficients for RGB-YUV transform
2274 // Based on JFIF RGB->YUV transform.
2275 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2276 const int rgbToY[] = {19, 37, 7};
2277 const int rgbToCb[] = {-10,-21, 32, 524288};
2278 const int rgbToCr[] = {32,-26, -5, 524288};
2279 // Scale back to 8bpp non-fixed-point
2280 const int scaleOut = 64;
2281 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2282
2283 uint32_t inc = kResolution[0] / stride;
2284 uint32_t outH = kResolution[1] / inc;
2285 for (unsigned int y = 0, outY = 0;
2286 y < kResolution[1]; y+=inc, outY++) {
2287 uint8_t *pxY = img + outY * stride;
2288 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2289 mScene.setReadoutPixel(0,y);
2290 for (unsigned int outX = 0; outX < stride; outX++) {
2291 int32_t rCount, gCount, bCount;
2292 // TODO: Perfect demosaicing is a cheat
2293 const uint32_t *pixel = mScene.getPixelElectrons();
2294 rCount = pixel[Scene::R] * scale64x;
2295 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2296 gCount = pixel[Scene::Gr] * scale64x;
2297 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2298 bCount = pixel[Scene::B] * scale64x;
2299 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2300
2301 *pxY++ = (rgbToY[0] * rCount +
2302 rgbToY[1] * gCount +
2303 rgbToY[2] * bCount) / scaleOutSq;
2304 if (outY % 2 == 0 && outX % 2 == 0) {
2305 *pxVU++ = (rgbToCr[0] * rCount +
2306 rgbToCr[1] * gCount +
2307 rgbToCr[2] * bCount +
2308 rgbToCr[3]) / scaleOutSq;
2309 *pxVU++ = (rgbToCb[0] * rCount +
2310 rgbToCb[1] * gCount +
2311 rgbToCb[2] * bCount +
2312 rgbToCb[3]) / scaleOutSq;
2313 }
2314 for (unsigned int j = 1; j < inc; j++)
2315 mScene.getPixelElectrons();
2316 }
2317 }
2318#else
2319 uint8_t *src;
2320 if (mKernelBuffer) {
2321 src = mKernelBuffer;
2322 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2323 //TODO YUYV scale
2324 //memcpy(img, src, vinfo->preview.buf.length);
2325
2326 } else
2327 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2328
2329 return ;
2330 }
2331
2332 while(1) {
2333 src = (uint8_t *)get_frame(vinfo);
2334 if (NULL == src) {
2335 if (get_device_status(vinfo)) {
2336 break;
2337 } else {
2338 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2339 usleep(5000);
2340 continue;
2341 }
2342 }
2343 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2344 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2345 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2346 putback_frame(vinfo);
2347 continue;
2348 }
2349 }
2350 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2351 memcpy(img, src, vinfo->preview.buf.length);
2352 } else {
2353 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2354 }
2355
2356 break;
2357 }
2358#endif
2359 mKernelBuffer = src;
2360 ALOGVV("YUYV sensor image captured");
2361}
2362
2363void Sensor::dump(int fd) {
2364 String8 result;
2365 result = String8::format("%s, sensor preview information: \n", __FILE__);
2366 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2367 result.appendFormat("camera preview width: %d , height =%d\n",
2368 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2369
2370 result.appendFormat("camera preview format: %.4s\n\n",
2371 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2372
2373 write(fd, result.string(), result.size());
2374}
2375
2376} // namespace android
2377
2378