summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: bd4ab5b9bae38781ad470f2baaf6bde82bb88f0b
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44
45#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
46
47namespace android {
48
49const unsigned int Sensor::kResolution[2] = {1600, 1200};
50
51const nsecs_t Sensor::kExposureTimeRange[2] =
52 {1000L, 30000000000L} ; // 1 us - 30 sec
53const nsecs_t Sensor::kFrameDurationRange[2] =
54 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
55const nsecs_t Sensor::kMinVerticalBlank = 10000L;
56
57const uint8_t Sensor::kColorFilterArrangement =
58 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
59
60// Output image data characteristics
61const uint32_t Sensor::kMaxRawValue = 4000;
62const uint32_t Sensor::kBlackLevel = 1000;
63
64// Sensor sensitivity
65const float Sensor::kSaturationVoltage = 0.520f;
66const uint32_t Sensor::kSaturationElectrons = 2000;
67const float Sensor::kVoltsPerLuxSecond = 0.100f;
68
69const float Sensor::kElectronsPerLuxSecond =
70 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
71 * Sensor::kVoltsPerLuxSecond;
72
73const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
74 Sensor::kSaturationElectrons;
75
76const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
77const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
78const float Sensor::kReadNoiseVarBeforeGain =
79 Sensor::kReadNoiseStddevBeforeGain *
80 Sensor::kReadNoiseStddevBeforeGain;
81const float Sensor::kReadNoiseVarAfterGain =
82 Sensor::kReadNoiseStddevAfterGain *
83 Sensor::kReadNoiseStddevAfterGain;
84
85// While each row has to read out, reset, and then expose, the (reset +
86// expose) sequence can be overlapped by other row readouts, so the final
87// minimum frame duration is purely a function of row readout time, at least
88// if there's a reasonable number of rows.
89const nsecs_t Sensor::kRowReadoutTime =
90 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
91
92const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
93const uint32_t Sensor::kDefaultSensitivity = 100;
94
95const uint32_t kUsbAvailableSize [10] = {176, 144, 320, 240, 352, 288, 640, 480, 1280, 720};
96
97/** A few utility functions for math, normal distributions */
98
99// Take advantage of IEEE floating-point format to calculate an approximate
100// square root. Accurate to within +-3.6%
101float sqrtf_approx(float r) {
102 // Modifier is based on IEEE floating-point representation; the
103 // manipulations boil down to finding approximate log2, dividing by two, and
104 // then inverting the log2. A bias is added to make the relative error
105 // symmetric about the real answer.
106 const int32_t modifier = 0x1FBB4000;
107
108 int32_t r_i = *(int32_t*)(&r);
109 r_i = (r_i >> 1) + modifier;
110
111 return *(float*)(&r_i);
112}
113
114void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
115{
116 int stride = (width + 31) & ( ~31);
117 int w, h;
118 for (h=0; h<height; h++)
119 {
120 memcpy( dst, src, width*3);
121 dst += width*3;
122 src += stride*3;
123 }
124}
125
126static int ALIGN(int x, int y) {
127 // y must be a power of 2.
128 return (x + y - 1) & ~(y - 1);
129}
130
131bool IsUsbAvailableSize(const uint32_t kUsbAvailableSize[], uint32_t width, uint32_t height, int count)
132{
133 int i;
134 bool ret = false;
135 for (i = 0; i < count; i += 2) {
136 if ((width == kUsbAvailableSize[i]) && (height == kUsbAvailableSize[i+1])) {
137 ret = true;
138 } else {
139 continue;
140 }
141 }
142 return ret;
143}
144
145void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
146{
147 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
148 (mmInt32)vinfo->preview.format.fmt.pix.height,
149 (mmInt32)vinfo->preview.format.fmt.pix.width,
150 IC_FORMAT_YCbCr420_lp,
151 (mmByte *) src,
152 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
153 0};
154
155 structConvImage output = {(mmInt32)width,
156 (mmInt32)height,
157 (mmInt32)width,
158 IC_FORMAT_YCbCr420_lp,
159 (mmByte *) img,
160 (mmByte *) img + width * height,
161 0};
162
163 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
164 ALOGE("Sclale NV21 frame down failed!\n");
165}
166
167Sensor::Sensor():
168 Thread(false),
169 mGotVSync(false),
170 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
171 mFrameDuration(kFrameDurationRange[0]),
172 mGainFactor(kDefaultSensitivity),
173 mNextBuffers(NULL),
174 mFrameNumber(0),
175 mCapturedBuffers(NULL),
176 mListener(NULL),
177 mIoctlSupport(0),
178 msupportrotate(0),
179 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
180{
181
182}
183
184Sensor::~Sensor() {
185 //shutDown();
186}
187
188status_t Sensor::startUp(int idx) {
189 ALOGV("%s: E", __FUNCTION__);
190 DBG_LOGA("ddd");
191
192 int res;
193 mCapturedBuffers = NULL;
194 res = run("EmulatedFakeCamera3::Sensor",
195 ANDROID_PRIORITY_URGENT_DISPLAY);
196
197 if (res != OK) {
198 ALOGE("Unable to start up sensor capture thread: %d", res);
199 }
200
201 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
202 vinfo->idx = idx;
203
204 res = camera_open(vinfo);
205 if (res < 0) {
206 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
207 }
208
209 mSensorType = SENSOR_MMAP;
210 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
211 mSensorType = SENSOR_USB;
212 }
213
214 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
215 mSensorType = SENSOR_SHARE_FD;
216 }
217
218 if (strstr((const char *)vinfo->cap.card, "front"))
219 mSensorFace = SENSOR_FACE_FRONT;
220 else if (strstr((const char *)vinfo->cap.card, "back"))
221 mSensorFace = SENSOR_FACE_BACK;
222 else
223 mSensorFace = SENSOR_FACE_NONE;
224
225 return res;
226}
227
228sensor_type_e Sensor::getSensorType(void)
229{
230 return mSensorType;
231}
232status_t Sensor::IoctlStateProbe(void) {
233 struct v4l2_queryctrl qc;
234 int ret = 0;
235 mIoctlSupport = 0;
236 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
237 qc.id = V4L2_ROTATE_ID;
238 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
239 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
240 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
241 }else{
242 mIoctlSupport |= IOCTL_MASK_ROTATE;
243 }
244
245 if(mIoctlSupport & IOCTL_MASK_ROTATE){
246 msupportrotate = true;
247 DBG_LOGA("camera support capture rotate");
248 }
249 return mIoctlSupport;
250}
251
252uint32_t Sensor::getStreamUsage(int stream_type)
253{
254 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
255
256 switch (stream_type) {
257 case CAMERA3_STREAM_OUTPUT:
258 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
259 break;
260 case CAMERA3_STREAM_INPUT:
261 usage = GRALLOC_USAGE_HW_CAMERA_READ;
262 break;
263 case CAMERA3_STREAM_BIDIRECTIONAL:
264 usage = GRALLOC_USAGE_HW_CAMERA_READ |
265 GRALLOC_USAGE_HW_CAMERA_WRITE;
266 break;
267 }
268 if ((mSensorType == SENSOR_MMAP)
269 || (mSensorType == SENSOR_USB)) {
270 usage = (GRALLOC_USAGE_HW_TEXTURE
271 | GRALLOC_USAGE_HW_RENDER
272 | GRALLOC_USAGE_SW_READ_MASK
273 | GRALLOC_USAGE_SW_WRITE_MASK
274 );
275 }
276
277 return usage;
278}
279
280status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
281{
282 int res;
283
284 mFramecount = 0;
285 mCurFps = 0;
286 gettimeofday(&mTimeStart, NULL);
287
288 if (isjpeg) {
289 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
290 vinfo->picture.format.fmt.pix.width = width;
291 vinfo->picture.format.fmt.pix.height = height;
292 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
293 } else {
294 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
295 vinfo->preview.format.fmt.pix.width = width;
296 vinfo->preview.format.fmt.pix.height = height;
297 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
298
299 res = setBuffersFormat(vinfo);
300 if (res < 0) {
301 ALOGE("set buffer failed\n");
302 return res;
303 }
304 }
305
306 return OK;
307
308}
309
310status_t Sensor::streamOn() {
311
312 return start_capturing(vinfo);
313}
314
315bool Sensor::isStreaming() {
316
317 return vinfo->isStreaming;
318}
319
320bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
321{
322 if ((vinfo->preview.format.fmt.pix.width != width)
323 ||(vinfo->preview.format.fmt.pix.height != height)
324 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
325 ) {
326
327 return true;
328
329 }
330
331 return false;
332}
333status_t Sensor::streamOff() {
334 if (mSensorType == SENSOR_USB) {
335 return releasebuf_and_stop_capturing(vinfo);
336 } else {
337 return stop_capturing(vinfo);
338 }
339}
340
341int Sensor::getOutputFormat()
342{
343 struct v4l2_fmtdesc fmt;
344 int ret;
345 memset(&fmt,0,sizeof(fmt));
346 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
347
348 fmt.index = 0;
349 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
350 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
351 return V4L2_PIX_FMT_MJPEG;
352 fmt.index++;
353 }
354
355 fmt.index = 0;
356 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
357 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
358 return V4L2_PIX_FMT_NV21;
359 fmt.index++;
360 }
361
362 fmt.index = 0;
363 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
364 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
365 return V4L2_PIX_FMT_YUYV;
366 fmt.index++;
367 }
368
369 ALOGE("Unable to find a supported sensor format!");
370 return BAD_VALUE;
371}
372
373/* if sensor supports MJPEG, return it first, otherwise
374 * trasform HAL format to v4l2 format then check whether
375 * it is supported.
376 */
377int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
378{
379 struct v4l2_fmtdesc fmt;
380 int ret;
381 memset(&fmt,0,sizeof(fmt));
382 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
383
384 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
385 pixelfmt = V4L2_PIX_FMT_YVU420;
386 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
387 pixelfmt = V4L2_PIX_FMT_NV21;
388 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
389 pixelfmt = V4L2_PIX_FMT_YUYV;
390 } else {
391 pixelfmt = V4L2_PIX_FMT_NV21;
392 }
393
394 fmt.index = 0;
395 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
396 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
397 return V4L2_PIX_FMT_MJPEG;
398 fmt.index++;
399 }
400
401 fmt.index = 0;
402 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
403 if (fmt.pixelformat == pixelfmt)
404 return pixelfmt;
405 fmt.index++;
406 }
407
408 fmt.index = 0;
409 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0) {
410 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
411 return V4L2_PIX_FMT_YUYV;
412 fmt.index++;
413 }
414 ALOGE("%s, Unable to find a supported sensor format!", __FUNCTION__);
415 return BAD_VALUE;
416}
417
418void Sensor::setPictureRotate(int rotate)
419{
420 mRotateValue = rotate;
421}
422int Sensor::getPictureRotate()
423{
424 return mRotateValue;
425}
426status_t Sensor::shutDown() {
427 ALOGV("%s: E", __FUNCTION__);
428
429 int res;
430 res = requestExitAndWait();
431 if (res != OK) {
432 ALOGE("Unable to shut down sensor capture thread: %d", res);
433 }
434
435 if (vinfo != NULL) {
436 if (mSensorType == SENSOR_USB) {
437 releasebuf_and_stop_capturing(vinfo);
438 } else {
439 stop_capturing(vinfo);
440 }
441 }
442
443 camera_close(vinfo);
444
445 if (vinfo){
446 free(vinfo);
447 vinfo = NULL;
448 }
449 ALOGD("%s: Exit", __FUNCTION__);
450 return res;
451}
452
453Scene &Sensor::getScene() {
454 return mScene;
455}
456
457int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
458{
459 int ret = 0;
460 struct v4l2_queryctrl qc;
461
462 memset(&qc, 0, sizeof(qc));
463 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
464 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
465
466 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
467 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
468 ret = -1;
469 *zoomMin = 0;
470 *zoomMax = 0;
471 *zoomStep = 1;
472 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
473 } else {
474 *zoomMin = qc.minimum;
475 *zoomMax = qc.maximum;
476 *zoomStep = qc.step;
477 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
478 }
479
480 return ret ;
481}
482
483int Sensor::setZoom(int zoomValue)
484{
485 int ret = 0;
486 struct v4l2_control ctl;
487
488 memset( &ctl, 0, sizeof(ctl));
489 ctl.value = zoomValue;
490 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
491 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
492 if (ret < 0) {
493 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
494 }
495 return ret ;
496}
497
498status_t Sensor::setEffect(uint8_t effect)
499{
500 int ret = 0;
501 struct v4l2_control ctl;
502 ctl.id = V4L2_CID_COLORFX;
503
504 switch (effect) {
505 case ANDROID_CONTROL_EFFECT_MODE_OFF:
506 ctl.value= CAM_EFFECT_ENC_NORMAL;
507 break;
508 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
509 ctl.value= CAM_EFFECT_ENC_COLORINV;
510 break;
511 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
512 ctl.value= CAM_EFFECT_ENC_SEPIA;
513 break;
514 default:
515 ALOGE("%s: Doesn't support effect mode %d",
516 __FUNCTION__, effect);
517 return BAD_VALUE;
518 }
519
520 DBG_LOGB("set effect mode:%d", effect);
521 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
522 if (ret < 0) {
523 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
524 }
525 return ret ;
526}
527
528#define MAX_LEVEL_FOR_EXPOSURE 16
529#define MIN_LEVEL_FOR_EXPOSURE 3
530
531int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
532{
533 struct v4l2_queryctrl qc;
534 int ret=0;
535 int level = 0;
536 int middle = 0;
537
538 memset( &qc, 0, sizeof(qc));
539
540 DBG_LOGA("getExposure\n");
541 qc.id = V4L2_CID_EXPOSURE;
542 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
543 if(ret < 0) {
544 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
545 *minExp = -4;
546 *maxExp = 4;
547 *def = 0;
548 step->numerator = 1;
549 step->denominator = 1;
550 return ret;
551 }
552
553 if(0 < qc.step)
554 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
555
556 if((level > MAX_LEVEL_FOR_EXPOSURE)
557 || (level < MIN_LEVEL_FOR_EXPOSURE)){
558 *minExp = -4;
559 *maxExp = 4;
560 *def = 0;
561 step->numerator = 1;
562 step->denominator = 1;
563 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
564 *minExp, *maxExp, *def);
565 return true;
566 }
567
568 middle = (qc.minimum+qc.maximum)/2;
569 *minExp = qc.minimum - middle;
570 *maxExp = qc.maximum - middle;
571 *def = qc.default_value - middle;
572 step->numerator = 1;
573 step->denominator = 2;//qc.step;
574 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
575 return ret;
576}
577
578status_t Sensor::setExposure(int expCmp)
579{
580 int ret = 0;
581 struct v4l2_control ctl;
582 struct v4l2_queryctrl qc;
583
584 if(mEV == expCmp){
585 return 0;
586 }else{
587 mEV = expCmp;
588 }
589 memset(&ctl, 0, sizeof(ctl));
590 memset(&qc, 0, sizeof(qc));
591
592 qc.id = V4L2_CID_EXPOSURE;
593
594 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
595 if (ret < 0) {
596 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
597 }
598
599 ctl.id = V4L2_CID_EXPOSURE;
600 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
601
602 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
603 if (ret < 0) {
604 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
605 }
606 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
607 return ret ;
608}
609
610int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
611{
612 struct v4l2_queryctrl qc;
613 struct v4l2_querymenu qm;
614 int ret;
615 int mode_count = -1;
616
617 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
618 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
619 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
620 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
621 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
622 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
623 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
624 } else {
625 memset(&qm, 0, sizeof(qm));
626
627 int index = 0;
628 mode_count = 1;
629 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
630
631 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
632 if (mode_count >= maxCont)
633 break;
634
635 memset(&qm, 0, sizeof(struct v4l2_querymenu));
636 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
637 qm.index = index;
638 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
639 continue;
640 } else {
641 if (strcmp((char*)qm.name,"50hz") == 0) {
642 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
643 mode_count++;
644 } else if (strcmp((char*)qm.name,"60hz") == 0) {
645 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
646 mode_count++;
647 } else if (strcmp((char*)qm.name,"auto") == 0) {
648 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
649 mode_count++;
650 }
651
652 }
653 }
654 }
655
656 return mode_count;
657}
658
659status_t Sensor::setAntiBanding(uint8_t antiBanding)
660{
661 int ret = 0;
662 struct v4l2_control ctl;
663 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
664
665 switch (antiBanding) {
666 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
667 ctl.value= CAM_ANTIBANDING_OFF;
668 break;
669 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
670 ctl.value= CAM_ANTIBANDING_50HZ;
671 break;
672 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
673 ctl.value= CAM_ANTIBANDING_60HZ;
674 break;
675 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
676 ctl.value= CAM_ANTIBANDING_AUTO;
677 break;
678 default:
679 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
680 __FUNCTION__, antiBanding);
681 return BAD_VALUE;
682 }
683
684 DBG_LOGB("anti banding mode:%d", antiBanding);
685 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
686 if ( ret < 0) {
687 CAMHAL_LOGDA("failed to set anti banding mode!\n");
688 return BAD_VALUE;
689 }
690 return ret;
691}
692
693status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
694{
695 int ret = 0;
696 struct v4l2_control ctl;
697 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
698 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
699 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
700
701 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
702 return ret;
703}
704
705
706int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
707{
708 struct v4l2_queryctrl qc;
709 struct v4l2_querymenu qm;
710 int ret;
711 int mode_count = -1;
712
713 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
714 qc.id = V4L2_CID_FOCUS_AUTO;
715 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
716 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
717 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
718 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
719 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
720 }else{
721 memset(&qm, 0, sizeof(qm));
722
723 int index = 0;
724 mode_count = 1;
725 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
726
727 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
728 if (mode_count >= maxCount)
729 break;
730
731 memset(&qm, 0, sizeof(struct v4l2_querymenu));
732 qm.id = V4L2_CID_FOCUS_AUTO;
733 qm.index = index;
734 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
735 continue;
736 } else {
737 if (strcmp((char*)qm.name,"auto") == 0) {
738 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
739 mode_count++;
740 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
741 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
742 mode_count++;
743 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
744 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
745 mode_count++;
746 }
747
748 }
749 }
750 }
751
752 return mode_count;
753}
754
755status_t Sensor::setAutoFocuas(uint8_t afMode)
756{
757 struct v4l2_control ctl;
758 ctl.id = V4L2_CID_FOCUS_AUTO;
759
760 switch (afMode) {
761 case ANDROID_CONTROL_AF_MODE_AUTO:
762 ctl.value = CAM_FOCUS_MODE_AUTO;
763 break;
764 case ANDROID_CONTROL_AF_MODE_MACRO:
765 ctl.value = CAM_FOCUS_MODE_MACRO;
766 break;
767 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
768 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
769 break;
770 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
771 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
772 break;
773 default:
774 ALOGE("%s: Emulator doesn't support AF mode %d",
775 __FUNCTION__, afMode);
776 return BAD_VALUE;
777 }
778
779 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
780 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
781 return BAD_VALUE;
782 }
783
784 return OK;
785}
786
787int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
788{
789 struct v4l2_queryctrl qc;
790 struct v4l2_querymenu qm;
791 int ret;
792 int mode_count = -1;
793
794 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
795 qc.id = V4L2_CID_DO_WHITE_BALANCE;
796 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
797 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
798 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
799 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
800 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
801 }else{
802 memset(&qm, 0, sizeof(qm));
803
804 int index = 0;
805 mode_count = 1;
806 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
807
808 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
809 if (mode_count >= maxCount)
810 break;
811
812 memset(&qm, 0, sizeof(struct v4l2_querymenu));
813 qm.id = V4L2_CID_DO_WHITE_BALANCE;
814 qm.index = index;
815 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
816 continue;
817 } else {
818 if (strcmp((char*)qm.name,"auto") == 0) {
819 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
820 mode_count++;
821 } else if (strcmp((char*)qm.name,"daylight") == 0) {
822 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
823 mode_count++;
824 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
825 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
826 mode_count++;
827 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
828 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
829 mode_count++;
830 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
831 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
832 mode_count++;
833 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
834 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
835 mode_count++;
836 } else if (strcmp((char*)qm.name,"twilight") == 0) {
837 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
838 mode_count++;
839 } else if (strcmp((char*)qm.name,"shade") == 0) {
840 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
841 mode_count++;
842 }
843
844 }
845 }
846 }
847
848 return mode_count;
849}
850
851status_t Sensor::setAWB(uint8_t awbMode)
852{
853 int ret = 0;
854 struct v4l2_control ctl;
855 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
856
857 switch (awbMode) {
858 case ANDROID_CONTROL_AWB_MODE_AUTO:
859 ctl.value = CAM_WB_AUTO;
860 break;
861 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
862 ctl.value = CAM_WB_INCANDESCENCE;
863 break;
864 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
865 ctl.value = CAM_WB_FLUORESCENT;
866 break;
867 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
868 ctl.value = CAM_WB_DAYLIGHT;
869 break;
870 case ANDROID_CONTROL_AWB_MODE_SHADE:
871 ctl.value = CAM_WB_SHADE;
872 break;
873 default:
874 ALOGE("%s: Emulator doesn't support AWB mode %d",
875 __FUNCTION__, awbMode);
876 return BAD_VALUE;
877 }
878 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
879 return ret;
880}
881
882void Sensor::setExposureTime(uint64_t ns) {
883 Mutex::Autolock lock(mControlMutex);
884 ALOGVV("Exposure set to %f", ns/1000000.f);
885 mExposureTime = ns;
886}
887
888void Sensor::setFrameDuration(uint64_t ns) {
889 Mutex::Autolock lock(mControlMutex);
890 ALOGVV("Frame duration set to %f", ns/1000000.f);
891 mFrameDuration = ns;
892}
893
894void Sensor::setSensitivity(uint32_t gain) {
895 Mutex::Autolock lock(mControlMutex);
896 ALOGVV("Gain set to %d", gain);
897 mGainFactor = gain;
898}
899
900void Sensor::setDestinationBuffers(Buffers *buffers) {
901 Mutex::Autolock lock(mControlMutex);
902 mNextBuffers = buffers;
903}
904
905void Sensor::setFrameNumber(uint32_t frameNumber) {
906 Mutex::Autolock lock(mControlMutex);
907 mFrameNumber = frameNumber;
908}
909
910bool Sensor::waitForVSync(nsecs_t reltime) {
911 int res;
912 Mutex::Autolock lock(mControlMutex);
913
914 mGotVSync = false;
915 res = mVSync.waitRelative(mControlMutex, reltime);
916 if (res != OK && res != TIMED_OUT) {
917 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
918 return false;
919 }
920 return mGotVSync;
921}
922
923bool Sensor::waitForNewFrame(nsecs_t reltime,
924 nsecs_t *captureTime) {
925 Mutex::Autolock lock(mReadoutMutex);
926 uint8_t *ret;
927 if (mCapturedBuffers == NULL) {
928 int res;
929 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
930 if (res == TIMED_OUT) {
931 return false;
932 } else if (res != OK || mCapturedBuffers == NULL) {
933 ALOGE("Error waiting for sensor readout signal: %d", res);
934 return false;
935 }
936 } else {
937 mReadoutComplete.signal();
938 }
939
940 *captureTime = mCaptureTime;
941 mCapturedBuffers = NULL;
942 return true;
943}
944
945Sensor::SensorListener::~SensorListener() {
946}
947
948void Sensor::setSensorListener(SensorListener *listener) {
949 Mutex::Autolock lock(mControlMutex);
950 mListener = listener;
951}
952
953status_t Sensor::readyToRun() {
954 int res;
955 ALOGV("Starting up sensor thread");
956 mStartupTime = systemTime();
957 mNextCaptureTime = 0;
958 mNextCapturedBuffers = NULL;
959
960 DBG_LOGA("");
961
962 return OK;
963}
964
965bool Sensor::threadLoop() {
966 /**
967 * Sensor capture operation main loop.
968 *
969 * Stages are out-of-order relative to a single frame's processing, but
970 * in-order in time.
971 */
972
973 /**
974 * Stage 1: Read in latest control parameters
975 */
976 uint64_t exposureDuration;
977 uint64_t frameDuration;
978 uint32_t gain;
979 Buffers *nextBuffers;
980 uint32_t frameNumber;
981 SensorListener *listener = NULL;
982 {
983 Mutex::Autolock lock(mControlMutex);
984 exposureDuration = mExposureTime;
985 frameDuration = mFrameDuration;
986 gain = mGainFactor;
987 nextBuffers = mNextBuffers;
988 frameNumber = mFrameNumber;
989 listener = mListener;
990 // Don't reuse a buffer set
991 mNextBuffers = NULL;
992
993 // Signal VSync for start of readout
994 ALOGVV("Sensor VSync");
995 mGotVSync = true;
996 mVSync.signal();
997 }
998
999 /**
1000 * Stage 3: Read out latest captured image
1001 */
1002
1003 Buffers *capturedBuffers = NULL;
1004 nsecs_t captureTime = 0;
1005
1006 nsecs_t startRealTime = systemTime();
1007 // Stagefright cares about system time for timestamps, so base simulated
1008 // time on that.
1009 nsecs_t simulatedTime = startRealTime;
1010 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1011 nsecs_t frameReadoutEndRealTime = startRealTime +
1012 kRowReadoutTime * kResolution[1];
1013
1014 if (mNextCapturedBuffers != NULL) {
1015 ALOGVV("Sensor starting readout");
1016 // Pretend we're doing readout now; will signal once enough time has elapsed
1017 capturedBuffers = mNextCapturedBuffers;
1018 captureTime = mNextCaptureTime;
1019 }
1020 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1021
1022 // TODO: Move this signal to another thread to simulate readout
1023 // time properly
1024 if (capturedBuffers != NULL) {
1025 ALOGVV("Sensor readout complete");
1026 Mutex::Autolock lock(mReadoutMutex);
1027 if (mCapturedBuffers != NULL) {
1028 ALOGV("Waiting for readout thread to catch up!");
1029 mReadoutComplete.wait(mReadoutMutex);
1030 }
1031
1032 mCapturedBuffers = capturedBuffers;
1033 mCaptureTime = captureTime;
1034 mReadoutAvailable.signal();
1035 capturedBuffers = NULL;
1036 }
1037
1038 /**
1039 * Stage 2: Capture new image
1040 */
1041 mNextCaptureTime = simulatedTime;
1042 mNextCapturedBuffers = nextBuffers;
1043
1044 if (mNextCapturedBuffers != NULL) {
1045 if (listener != NULL) {
1046#if 0
1047 if (get_device_status(vinfo)) {
1048 listener->onSensorEvent(frameNumber, SensorListener::ERROR_CAMERA_DEVICE, mNextCaptureTime);
1049 }
1050#endif
1051 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1052 mNextCaptureTime);
1053 }
1054
1055 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1056 (float)exposureDuration/1e6, gain);
1057 mScene.setExposureDuration((float)exposureDuration/1e9);
1058 mScene.calculateScene(mNextCaptureTime);
1059
1060 if ( mSensorType == SENSOR_SHARE_FD) {
1061 captureNewImageWithGe2d();
1062 } else {
1063 captureNewImage();
1064 }
1065 mFramecount ++;
1066 }
1067 if (mFramecount == 100) {
1068 gettimeofday(&mTimeEnd, NULL);
1069 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1070 mCurFps = mFramecount/(interval/1000000.0f);
1071 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1072 mFramecount = 0;
1073 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1074 }
1075 ALOGVV("Sensor vertical blanking interval");
1076 nsecs_t workDoneRealTime = systemTime();
1077 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1078 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1079 timespec t;
1080 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1081 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1082
1083 int ret;
1084 do {
1085 ret = nanosleep(&t, &t);
1086 } while (ret != 0);
1087 }
1088 nsecs_t endRealTime = systemTime();
1089 ALOGVV("Frame cycle took %d ms, target %d ms",
1090 (int)((endRealTime - startRealTime)/1000000),
1091 (int)(frameDuration / 1000000));
1092 return true;
1093};
1094
1095int Sensor::captureNewImageWithGe2d() {
1096
1097 uint32_t gain = mGainFactor;
1098 mKernelPhysAddr = 0;
1099
1100
1101 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1102 usleep(5000);
1103 }
1104
1105 // Might be adding more buffers, so size isn't constant
1106 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1107 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1108 fillStream(vinfo, mKernelPhysAddr, b);
1109 }
1110 putback_frame(vinfo);
1111 mKernelPhysAddr = 0;
1112
1113 return 0;
1114
1115}
1116
1117int Sensor::captureNewImage() {
1118 bool isjpeg = false;
1119 uint32_t gain = mGainFactor;
1120 mKernelBuffer = NULL;
1121
1122 // Might be adding more buffers, so size isn't constant
1123 CAMHAL_LOGDB("size=%d\n", mNextCapturedBuffers->size());
1124 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1125 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1126 ALOGVV("Sensor capturing buffer %d: stream %d,"
1127 " %d x %d, format %x, stride %d, buf %p, img %p",
1128 i, b.streamId, b.width, b.height, b.format, b.stride,
1129 b.buffer, b.img);
1130 switch (b.format) {
1131 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1132 captureRaw(b.img, gain, b.stride);
1133 break;
1134 case HAL_PIXEL_FORMAT_RGB_888:
1135 captureRGB(b.img, gain, b.stride);
1136 break;
1137 case HAL_PIXEL_FORMAT_RGBA_8888:
1138 captureRGBA(b.img, gain, b.stride);
1139 break;
1140 case HAL_PIXEL_FORMAT_BLOB:
1141 // Add auxillary buffer of the right size
1142 // Assumes only one BLOB (JPEG) buffer in
1143 // mNextCapturedBuffers
1144 StreamBuffer bAux;
1145 int orientation;
1146 orientation = getPictureRotate();
1147 ALOGD("bAux orientation=%d",orientation);
1148 uint32_t pixelfmt;
1149 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1150 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1151
1152 pixelfmt = getOutputFormat();
1153 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1154 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1155 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1156 DBG_LOGA("");
1157 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1158 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1159 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1160 } else {
1161 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1162 }
1163 } else {
1164 isjpeg = true;
1165 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1166 }
1167
1168 if (!msupportrotate) {
1169 bAux.streamId = 0;
1170 bAux.width = b.width;
1171 bAux.height = b.height;
1172 bAux.format = pixelfmt;
1173 bAux.stride = b.width;
1174 bAux.buffer = NULL;
1175 } else {
1176 if ((orientation == 90) || (orientation == 270)) {
1177 bAux.streamId = 0;
1178 bAux.width = b.height;
1179 bAux.height = b.width;
1180 bAux.format = pixelfmt;
1181 bAux.stride = b.height;
1182 bAux.buffer = NULL;
1183 } else {
1184 bAux.streamId = 0;
1185 bAux.width = b.width;
1186 bAux.height = b.height;
1187 bAux.format = pixelfmt;
1188 bAux.stride = b.width;
1189 bAux.buffer = NULL;
1190 }
1191 }
1192 // TODO: Reuse these
1193 bAux.img = new uint8_t[b.width * b.height * 3];
1194 mNextCapturedBuffers->push_back(bAux);
1195 break;
1196 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1197 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1198 captureNV21(b, gain);
1199 break;
1200 case HAL_PIXEL_FORMAT_YV12:
1201 captureYV12(b, gain);
1202 break;
1203 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1204 captureYUYV(b.img, gain, b.stride);
1205 break;
1206 default:
1207 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1208 b.format);
1209 break;
1210 }
1211 }
1212 if (!isjpeg) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1213 // whose buffer putback separately.
1214 putback_frame(vinfo);
1215 }
1216 mKernelBuffer = NULL;
1217
1218 return 0;
1219}
1220
1221int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1222 int res;
1223 int i, j, k, START;
1224 int count = 0;
1225 int pixelfmt;
1226 struct v4l2_frmsizeenum frmsize;
1227 char property[PROPERTY_VALUE_MAX];
1228 unsigned int support_w,support_h;
1229
1230 support_w = 10000;
1231 support_h = 10000;
1232 memset(property, 0, sizeof(property));
1233 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1234 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1235 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1236 support_w = 10000;
1237 support_h = 10000;
1238 }
1239 }
1240
1241 memset(&frmsize,0,sizeof(frmsize));
1242 frmsize.pixel_format = getOutputFormat();
1243
1244 START = 0;
1245 for (i = 0; ; i++) {
1246 frmsize.index = i;
1247 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1248 if (res < 0){
1249 DBG_LOGB("index=%d, break\n", i);
1250 break;
1251 }
1252
1253 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1254
1255 if (0 != (frmsize.discrete.width%16))
1256 continue;
1257
1258 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1259 continue;
1260
1261 if (count >= size)
1262 break;
1263#if 0
1264 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1265 int count = sizeof(kUsbAvailableSize)/sizeof(kUsbAvailableSize[0]);
1266 if (!IsUsbAvailableSize(kUsbAvailableSize, frmsize.discrete.width, frmsize.discrete.height,count))
1267 continue;
1268 }
1269#endif
1270 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1271 picSizes[count+1] = frmsize.discrete.width;
1272 picSizes[count+2] = frmsize.discrete.height;
1273 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1274
1275 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1276 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1277 if (0 == i) {
1278 count += 4;
1279 continue;
1280 }
1281
1282 for (k = count; k > START; k -= 4) {
1283 if (frmsize.discrete.width * frmsize.discrete.height >
1284 picSizes[k - 3] * picSizes[k - 2]) {
1285 picSizes[k + 1] = picSizes[k - 3];
1286 picSizes[k + 2] = picSizes[k - 2];
1287
1288 } else {
1289 break;
1290 }
1291 }
1292 picSizes[k + 1] = frmsize.discrete.width;
1293 picSizes[k + 2] = frmsize.discrete.height;
1294
1295 count += 4;
1296 }
1297 }
1298
1299 START = count;
1300 for (i = 0; ; i++) {
1301 frmsize.index = i;
1302 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1303 if (res < 0){
1304 DBG_LOGB("index=%d, break\n", i);
1305 break;
1306 }
1307
1308 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1309
1310 if (0 != (frmsize.discrete.width%16))
1311 continue;
1312
1313 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1314 continue;
1315
1316 if (count >= size)
1317 break;
1318#if 0
1319 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1320 int count = sizeof(kUsbAvailableSize)/sizeof(kUsbAvailableSize[0]);
1321 if (!IsUsbAvailableSize(kUsbAvailableSize, frmsize.discrete.width, frmsize.discrete.height,count))
1322 continue;
1323 }
1324#endif
1325 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1326 picSizes[count+1] = frmsize.discrete.width;
1327 picSizes[count+2] = frmsize.discrete.height;
1328 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1329
1330 DBG_LOGB("get output width=%d, height=%d, format =\
1331 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1332 frmsize.discrete.height);
1333 if (0 == i) {
1334 count += 4;
1335 continue;
1336 }
1337
1338 for (k = count; k > START; k -= 4) {
1339 if (frmsize.discrete.width * frmsize.discrete.height >
1340 picSizes[k - 3] * picSizes[k - 2]) {
1341 picSizes[k + 1] = picSizes[k - 3];
1342 picSizes[k + 2] = picSizes[k - 2];
1343
1344 } else {
1345 break;
1346 }
1347 }
1348 picSizes[k + 1] = frmsize.discrete.width;
1349 picSizes[k + 2] = frmsize.discrete.height;
1350
1351 count += 4;
1352 }
1353 }
1354
1355#if 0
1356 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1357 START = count;
1358 for (i = 0; ; i++) {
1359 frmsize.index = i;
1360 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1361 if (res < 0){
1362 DBG_LOGB("index=%d, break\n", i);
1363 break;
1364 }
1365
1366 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1367
1368 if (0 != (frmsize.discrete.width%16))
1369 continue;
1370
1371 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1372 continue;
1373
1374 if (count >= size)
1375 break;
1376
1377 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1378 picSizes[count+1] = frmsize.discrete.width;
1379 picSizes[count+2] = frmsize.discrete.height;
1380 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1381
1382 DBG_LOGB("get output width=%d, height=%d, format =\
1383 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1384 frmsize.discrete.height);
1385 if (0 == i) {
1386 count += 4;
1387 continue;
1388 }
1389
1390 for (k = count; k > START; k -= 4) {
1391 if (frmsize.discrete.width * frmsize.discrete.height >
1392 picSizes[k - 3] * picSizes[k - 2]) {
1393 picSizes[k + 1] = picSizes[k - 3];
1394 picSizes[k + 2] = picSizes[k - 2];
1395
1396 } else {
1397 break;
1398 }
1399 }
1400 picSizes[k + 1] = frmsize.discrete.width;
1401 picSizes[k + 2] = frmsize.discrete.height;
1402
1403 count += 4;
1404 }
1405 }
1406 }
1407#endif
1408
1409 uint32_t jpgSrcfmt[] = {
1410 V4L2_PIX_FMT_RGB24,
1411 V4L2_PIX_FMT_MJPEG,
1412 V4L2_PIX_FMT_YUYV,
1413 };
1414
1415 START = count;
1416 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1417 memset(&frmsize,0,sizeof(frmsize));
1418 frmsize.pixel_format = jpgSrcfmt[j];
1419
1420 for (i = 0; ; i++) {
1421 frmsize.index = i;
1422 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1423 if (res < 0){
1424 DBG_LOGB("index=%d, break\n", i);
1425 break;
1426 }
1427
1428 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1429
1430 if (0 != (frmsize.discrete.width%16))
1431 continue;
1432
1433 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1434 // continue;
1435
1436 if (count >= size)
1437 break;
1438
1439 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1440 picSizes[count+1] = frmsize.discrete.width;
1441 picSizes[count+2] = frmsize.discrete.height;
1442 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1443
1444 if (0 == i) {
1445 count += 4;
1446 continue;
1447 }
1448
1449 //TODO insert in descend order
1450 for (k = count; k > START; k -= 4) {
1451 if (frmsize.discrete.width * frmsize.discrete.height >
1452 picSizes[k - 3] * picSizes[k - 2]) {
1453 picSizes[k + 1] = picSizes[k - 3];
1454 picSizes[k + 2] = picSizes[k - 2];
1455
1456 } else {
1457 break;
1458 }
1459 }
1460
1461 picSizes[k + 1] = frmsize.discrete.width;
1462 picSizes[k + 2] = frmsize.discrete.height;
1463
1464 count += 4;
1465 }
1466 }
1467
1468 if (frmsize.index > 0)
1469 break;
1470 }
1471
1472 if (frmsize.index == 0)
1473 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1474
1475 return count;
1476
1477}
1478
1479int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1480{
1481 int ret=0; int framerate=0; int temp_rate=0;
1482 struct v4l2_frmivalenum fival;
1483 int i,j=0;
1484 int count = 0;
1485 int tmp_size = size;
1486 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1487 int pixelfmt_tbl[] = {
1488 V4L2_PIX_FMT_MJPEG,
1489 V4L2_PIX_FMT_YVU420,
1490 V4L2_PIX_FMT_NV21,
1491 V4L2_PIX_FMT_RGB24,
1492 V4L2_PIX_FMT_YUYV,
1493 // V4L2_PIX_FMT_YVU420
1494 };
1495
1496 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1497 {
1498 for( ; size > 0; size-=4)
1499 {
1500 memset(&fival, 0, sizeof(fival));
1501
1502 for (fival.index = 0;;fival.index++)
1503 {
1504 fival.pixel_format = pixelfmt_tbl[i];
1505 fival.width = picSizes[size-3];
1506 fival.height = picSizes[size-2];
1507 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1508 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1509 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1510 if(framerate < temp_rate)
1511 framerate = temp_rate;
1512 duration[count+0] = (int64_t)(picSizes[size-4]);
1513 duration[count+1] = (int64_t)(picSizes[size-3]);
1514 duration[count+2] = (int64_t)(picSizes[size-2]);
1515 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1516 j++;
1517 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1518 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1519 if(framerate < temp_rate)
1520 framerate = temp_rate;
1521 duration[count+0] = (int64_t)picSizes[size-4];
1522 duration[count+1] = (int64_t)picSizes[size-3];
1523 duration[count+2] = (int64_t)picSizes[size-2];
1524 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1525 j++;
1526 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1527 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1528 if(framerate < temp_rate)
1529 framerate = temp_rate;
1530 duration[count+0] = (int64_t)picSizes[size-4];
1531 duration[count+1] = (int64_t)picSizes[size-3];
1532 duration[count+2] = (int64_t)picSizes[size-2];
1533 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1534 j++;
1535 }
1536 } else {
1537 if (j > 0) {
1538 if (count > tmp_size)
1539 break;
1540 duration[count+0] = (int64_t)(picSizes[size-4]);
1541 duration[count+1] = (int64_t)(picSizes[size-3]);
1542 duration[count+2] = (int64_t)(picSizes[size-2]);
1543 if (framerate == 5) {
1544 duration[count+3] = (int64_t)200000000L;
1545 } else if (framerate == 10) {
1546 duration[count+3] = (int64_t)100000000L;
1547 } else if (framerate == 15) {
1548 duration[count+3] = (int64_t)66666666L;
1549 } else if (framerate == 30) {
1550 duration[count+3] = (int64_t)33333333L;
1551 } else {
1552 duration[count+3] = (int64_t)66666666L;
1553 }
1554 count += 4;
1555 break;
1556 } else {
1557 break;
1558 }
1559 }
1560 }
1561 j=0;
1562 }
1563 size = tmp_size;
1564 }
1565
1566 return count;
1567
1568}
1569
1570int64_t Sensor::getMinFrameDuration()
1571{
1572 int64_t tmpDuration = 66666666L; // 1/15 s
1573 int64_t frameDuration = 66666666L; // 1/15 s
1574 struct v4l2_frmivalenum fival;
1575 int i,j;
1576
1577 uint32_t pixelfmt_tbl[]={
1578 V4L2_PIX_FMT_MJPEG,
1579 V4L2_PIX_FMT_YUYV,
1580 V4L2_PIX_FMT_NV21,
1581 };
1582 struct v4l2_frmsize_discrete resolution_tbl[]={
1583 {1920, 1080},
1584 {1280, 960},
1585 {640, 480},
1586 {320, 240},
1587 };
1588
1589 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1590 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1591 memset(&fival, 0, sizeof(fival));
1592 fival.index = 0;
1593 fival.pixel_format = pixelfmt_tbl[i];
1594 fival.width = resolution_tbl[j].width;
1595 fival.height = resolution_tbl[j].height;
1596
1597 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1598 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1599 tmpDuration =
1600 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1601
1602 if (frameDuration > tmpDuration)
1603 frameDuration = tmpDuration;
1604 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1605 frameDuration =
1606 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1607 break;
1608 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1609 frameDuration =
1610 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1611 break;
1612 }
1613 fival.index++;
1614 }
1615 }
1616
1617 if (fival.index > 0) {
1618 break;
1619 }
1620 }
1621
1622 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1623 return frameDuration;
1624}
1625
1626int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1627 int res;
1628 int i;
1629 int count = 0;
1630 struct v4l2_frmsizeenum frmsize;
1631 char property[PROPERTY_VALUE_MAX];
1632 unsigned int support_w,support_h;
1633 int preview_fmt;
1634
1635 support_w = 10000;
1636 support_h = 10000;
1637 memset(property, 0, sizeof(property));
1638 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1639 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1640 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1641 support_w = 10000;
1642 support_h = 10000;
1643 }
1644 }
1645
1646
1647 memset(&frmsize,0,sizeof(frmsize));
1648 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1649
1650 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1651 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1652 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1653 if (preview == true)
1654 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1655 else
1656 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1657 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1658 if (preview == true)
1659 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1660 else
1661 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1662 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1663 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1664
1665 for (i = 0; ; i++) {
1666 frmsize.index = i;
1667 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1668 if (res < 0){
1669 DBG_LOGB("index=%d, break\n", i);
1670 break;
1671 }
1672
1673
1674 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1675
1676 if (0 != (frmsize.discrete.width%16))
1677 continue;
1678
1679 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1680 continue;
1681
1682 if (count >= size)
1683 break;
1684
1685 picSizes[count] = frmsize.discrete.width;
1686 picSizes[count+1] = frmsize.discrete.height;
1687
1688 if (0 == i) {
1689 count += 2;
1690 continue;
1691 }
1692
1693 //TODO insert in descend order
1694 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1695 picSizes[count + 0] = picSizes[count - 2];
1696 picSizes[count + 1] = picSizes[count - 1];
1697
1698 picSizes[count - 2] = frmsize.discrete.width;
1699 picSizes[count - 1] = frmsize.discrete.height;
1700 }
1701
1702 count += 2;
1703 }
1704 }
1705
1706 return count;
1707
1708}
1709
1710void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1711 float totalGain = gain/100.0 * kBaseGainFactor;
1712 float noiseVarGain = totalGain * totalGain;
1713 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1714 + kReadNoiseVarAfterGain;
1715
1716 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1717 mScene.setReadoutPixel(0,0);
1718 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1719 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1720 uint16_t *px = (uint16_t*)img + y * stride;
1721 for (unsigned int x = 0; x < kResolution[0]; x++) {
1722 uint32_t electronCount;
1723 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1724
1725 // TODO: Better pixel saturation curve?
1726 electronCount = (electronCount < kSaturationElectrons) ?
1727 electronCount : kSaturationElectrons;
1728
1729 // TODO: Better A/D saturation curve?
1730 uint16_t rawCount = electronCount * totalGain;
1731 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1732
1733 // Calculate noise value
1734 // TODO: Use more-correct Gaussian instead of uniform noise
1735 float photonNoiseVar = electronCount * noiseVarGain;
1736 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1737 // Scaled to roughly match gaussian/uniform noise stddev
1738 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1739
1740 rawCount += kBlackLevel;
1741 rawCount += noiseStddev * noiseSample;
1742
1743 *px++ = rawCount;
1744 }
1745 // TODO: Handle this better
1746 //simulatedTime += kRowReadoutTime;
1747 }
1748 ALOGVV("Raw sensor image captured");
1749}
1750
1751void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1752 float totalGain = gain/100.0 * kBaseGainFactor;
1753 // In fixed-point math, calculate total scaling from electrons to 8bpp
1754 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1755 uint32_t inc = kResolution[0] / stride;
1756
1757 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1758 uint8_t *px = img + outY * stride * 4;
1759 mScene.setReadoutPixel(0, y);
1760 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1761 uint32_t rCount, gCount, bCount;
1762 // TODO: Perfect demosaicing is a cheat
1763 const uint32_t *pixel = mScene.getPixelElectrons();
1764 rCount = pixel[Scene::R] * scale64x;
1765 gCount = pixel[Scene::Gr] * scale64x;
1766 bCount = pixel[Scene::B] * scale64x;
1767
1768 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1769 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1770 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1771 *px++ = 255;
1772 for (unsigned int j = 1; j < inc; j++)
1773 mScene.getPixelElectrons();
1774 }
1775 // TODO: Handle this better
1776 //simulatedTime += kRowReadoutTime;
1777 }
1778 ALOGVV("RGBA sensor image captured");
1779}
1780
1781void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1782#if 0
1783 float totalGain = gain/100.0 * kBaseGainFactor;
1784 // In fixed-point math, calculate total scaling from electrons to 8bpp
1785 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1786 uint32_t inc = kResolution[0] / stride;
1787
1788 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1789 mScene.setReadoutPixel(0, y);
1790 uint8_t *px = img + outY * stride * 3;
1791 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1792 uint32_t rCount, gCount, bCount;
1793 // TODO: Perfect demosaicing is a cheat
1794 const uint32_t *pixel = mScene.getPixelElectrons();
1795 rCount = pixel[Scene::R] * scale64x;
1796 gCount = pixel[Scene::Gr] * scale64x;
1797 bCount = pixel[Scene::B] * scale64x;
1798
1799 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1800 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1801 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1802 for (unsigned int j = 1; j < inc; j++)
1803 mScene.getPixelElectrons();
1804 }
1805 // TODO: Handle this better
1806 //simulatedTime += kRowReadoutTime;
1807 }
1808#else
1809 uint8_t *src = NULL;
1810 int ret = 0, rotate = 0;
1811 uint32_t width = 0, height = 0;
1812
1813 rotate = getPictureRotate();
1814 width = vinfo->picture.format.fmt.pix.width;
1815 height = vinfo->picture.format.fmt.pix.height;
1816
1817 if (mSensorType == SENSOR_USB) {
1818 releasebuf_and_stop_capturing(vinfo);
1819 } else {
1820 stop_capturing(vinfo);
1821 }
1822
1823 ret = start_picture(vinfo,rotate);
1824 if (ret < 0)
1825 {
1826 ALOGD("start picture failed!");
1827 }
1828 while(1)
1829 {
1830 src = (uint8_t *)get_picture(vinfo);
1831 if (NULL != src) {
1832 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
1833 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1834 if ( tmp_buffer == NULL) {
1835 ALOGE("new buffer failed!\n");
1836 return;
1837 }
1838 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
1839 width, tmp_buffer + width * height, (width + 1) / 2, width,
1840 height, width, height, libyuv::FOURCC_MJPG) != 0) {
1841 DBG_LOGA("Decode MJPEG frame failed\n");
1842 putback_picture_frame(vinfo);
1843 usleep(5000);
1844 } else {
1845 nv21_to_rgb24(tmp_buffer,img,width,height);
1846 if (tmp_buffer != NULL)
1847 delete [] tmp_buffer;
1848 break;
1849 }
1850 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1851 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
1852 yuyv422_to_rgb24(src,img,width,height);
1853 break;
1854 } else {
1855 putback_picture_frame(vinfo);
1856 usleep(5000);
1857 }
1858 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
1859 if (vinfo->picture.buf.length == width * height * 3) {
1860 memcpy(img, src, vinfo->picture.buf.length);
1861 } else {
1862 rgb24_memcpy(img, src, width, height);
1863 }
1864 break;
1865 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
1866 memcpy(img, src, vinfo->picture.buf.length);
1867 break;
1868 }
1869 }
1870 }
1871 ALOGD("get picture success !");
1872
1873 if (mSensorType == SENSOR_USB) {
1874 releasebuf_and_stop_picture(vinfo);
1875 } else {
1876 stop_picture(vinfo);
1877 }
1878
1879#endif
1880}
1881
1882void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
1883{
1884 for (int i = 0; i < width * height * 2; i += 2) {
1885 *dst++ = *(src + i);
1886 }
1887
1888 for (int y = 0; y < height - 1; y +=2) {
1889 for (int j = 0; j < width * 2; j += 4) {
1890 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
1891 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
1892 }
1893 src += width * 2 * 2;
1894 }
1895
1896 if (height & 1)
1897 for (int j = 0; j < width * 2; j += 4) {
1898 *dst++ = *(src + 3 + j); //v
1899 *dst++ = *(src + 1 + j); //u
1900 }
1901}
1902
1903void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
1904{
1905 //width should be an even number.
1906 //uv ALIGN 32.
1907 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
1908 unsigned char *dst_copy,*src_copy;
1909
1910 dst_copy = dst;
1911 src_copy = src;
1912
1913 y_size = width*height;
1914 c_stride = ALIGN(width/2, 16);
1915 c_size = c_stride * height/2;
1916 cr_offset = y_size;
1917 cb_offset = y_size+c_size;
1918
1919 for(i=0;i< y_size;i++){
1920 *dst++ = *src;
1921 src += 2;
1922 }
1923
1924 dst = dst_copy;
1925 src = src_copy;
1926
1927 for(i=0;i<height;i+=2){
1928 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
1929 //ceil(u1+u2)/2
1930 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
1931 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
1932 }
1933 dst += c_stride;
1934 src += width*4;
1935 }
1936}
1937
1938
1939void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
1940#if 0
1941 float totalGain = gain/100.0 * kBaseGainFactor;
1942 // Using fixed-point math with 6 bits of fractional precision.
1943 // In fixed-point math, calculate total scaling from electrons to 8bpp
1944 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1945 // In fixed-point math, saturation point of sensor after gain
1946 const int saturationPoint = 64 * 255;
1947 // Fixed-point coefficients for RGB-YUV transform
1948 // Based on JFIF RGB->YUV transform.
1949 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
1950 const int rgbToY[] = {19, 37, 7};
1951 const int rgbToCb[] = {-10,-21, 32, 524288};
1952 const int rgbToCr[] = {32,-26, -5, 524288};
1953 // Scale back to 8bpp non-fixed-point
1954 const int scaleOut = 64;
1955 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
1956
1957 uint32_t inc = kResolution[0] / stride;
1958 uint32_t outH = kResolution[1] / inc;
1959 for (unsigned int y = 0, outY = 0;
1960 y < kResolution[1]; y+=inc, outY++) {
1961 uint8_t *pxY = img + outY * stride;
1962 uint8_t *pxVU = img + (outH + outY / 2) * stride;
1963 mScene.setReadoutPixel(0,y);
1964 for (unsigned int outX = 0; outX < stride; outX++) {
1965 int32_t rCount, gCount, bCount;
1966 // TODO: Perfect demosaicing is a cheat
1967 const uint32_t *pixel = mScene.getPixelElectrons();
1968 rCount = pixel[Scene::R] * scale64x;
1969 rCount = rCount < saturationPoint ? rCount : saturationPoint;
1970 gCount = pixel[Scene::Gr] * scale64x;
1971 gCount = gCount < saturationPoint ? gCount : saturationPoint;
1972 bCount = pixel[Scene::B] * scale64x;
1973 bCount = bCount < saturationPoint ? bCount : saturationPoint;
1974
1975 *pxY++ = (rgbToY[0] * rCount +
1976 rgbToY[1] * gCount +
1977 rgbToY[2] * bCount) / scaleOutSq;
1978 if (outY % 2 == 0 && outX % 2 == 0) {
1979 *pxVU++ = (rgbToCr[0] * rCount +
1980 rgbToCr[1] * gCount +
1981 rgbToCr[2] * bCount +
1982 rgbToCr[3]) / scaleOutSq;
1983 *pxVU++ = (rgbToCb[0] * rCount +
1984 rgbToCb[1] * gCount +
1985 rgbToCb[2] * bCount +
1986 rgbToCb[3]) / scaleOutSq;
1987 }
1988 for (unsigned int j = 1; j < inc; j++)
1989 mScene.getPixelElectrons();
1990 }
1991 }
1992#else
1993 uint8_t *src;
1994
1995 if (mKernelBuffer) {
1996 src = mKernelBuffer;
1997 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
1998 uint32_t width = vinfo->preview.format.fmt.pix.width;
1999 uint32_t height = vinfo->preview.format.fmt.pix.height;
2000 if ((width == b.width) && (height == b.height)) {
2001 memcpy(b.img, src, b.width * b.height * 3/2);
2002 } else {
2003 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2004 }
2005 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2006 uint32_t width = vinfo->preview.format.fmt.pix.width;
2007 uint32_t height = vinfo->preview.format.fmt.pix.height;
2008
2009 if ((width == b.width) && (height == b.height)) {
2010 memcpy(b.img, src, b.width * b.height * 3/2);
2011 } else {
2012 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2013 }
2014 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2015 uint32_t width = vinfo->preview.format.fmt.pix.width;
2016 uint32_t height = vinfo->preview.format.fmt.pix.height;
2017
2018 if ((width == b.width) && (height == b.height)) {
2019 memcpy(b.img, src, b.width * b.height * 3/2);
2020 } else {
2021 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2022 }
2023 } else {
2024 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2025 }
2026 return ;
2027 }
2028 while(1){
2029 src = (uint8_t *)get_frame(vinfo);
2030 if (NULL == src) {
2031 if (get_device_status(vinfo)) {
2032 break;
2033 } else {
2034 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2035 usleep(5000);
2036 continue;
2037 }
2038 }
2039
2040 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2041 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2042 DBG_LOGB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2043 putback_frame(vinfo);
2044 continue;
2045 }
2046 }
2047 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2048 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2049 memcpy(b.img, src, vinfo->preview.buf.length);
2050 } else {
2051 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2052 }
2053 mKernelBuffer = b.img;
2054 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2055 int width = vinfo->preview.format.fmt.pix.width;
2056 int height = vinfo->preview.format.fmt.pix.height;
2057 YUYVToNV21(src, b.img, width, height);
2058 mKernelBuffer = b.img;
2059 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2060 int width = vinfo->preview.format.fmt.pix.width;
2061 int height = vinfo->preview.format.fmt.pix.height;
2062 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, b.img,
2063 width, b.img + width * height, (width + 1) / 2, width,
2064 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2065 putback_frame(vinfo);
2066 DBG_LOGA("Decode MJPEG frame failed\n");
2067 continue;
2068 }
2069 mKernelBuffer = b.img;
2070 }
2071
2072 break;
2073 }
2074#endif
2075
2076 ALOGVV("NV21 sensor image captured");
2077}
2078
2079void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2080#if 0
2081 float totalGain = gain/100.0 * kBaseGainFactor;
2082 // Using fixed-point math with 6 bits of fractional precision.
2083 // In fixed-point math, calculate total scaling from electrons to 8bpp
2084 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2085 // In fixed-point math, saturation point of sensor after gain
2086 const int saturationPoint = 64 * 255;
2087 // Fixed-point coefficients for RGB-YUV transform
2088 // Based on JFIF RGB->YUV transform.
2089 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2090 const int rgbToY[] = {19, 37, 7};
2091 const int rgbToCb[] = {-10,-21, 32, 524288};
2092 const int rgbToCr[] = {32,-26, -5, 524288};
2093 // Scale back to 8bpp non-fixed-point
2094 const int scaleOut = 64;
2095 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2096
2097 uint32_t inc = kResolution[0] / stride;
2098 uint32_t outH = kResolution[1] / inc;
2099 for (unsigned int y = 0, outY = 0;
2100 y < kResolution[1]; y+=inc, outY++) {
2101 uint8_t *pxY = img + outY * stride;
2102 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2103 mScene.setReadoutPixel(0,y);
2104 for (unsigned int outX = 0; outX < stride; outX++) {
2105 int32_t rCount, gCount, bCount;
2106 // TODO: Perfect demosaicing is a cheat
2107 const uint32_t *pixel = mScene.getPixelElectrons();
2108 rCount = pixel[Scene::R] * scale64x;
2109 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2110 gCount = pixel[Scene::Gr] * scale64x;
2111 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2112 bCount = pixel[Scene::B] * scale64x;
2113 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2114
2115 *pxY++ = (rgbToY[0] * rCount +
2116 rgbToY[1] * gCount +
2117 rgbToY[2] * bCount) / scaleOutSq;
2118 if (outY % 2 == 0 && outX % 2 == 0) {
2119 *pxVU++ = (rgbToCr[0] * rCount +
2120 rgbToCr[1] * gCount +
2121 rgbToCr[2] * bCount +
2122 rgbToCr[3]) / scaleOutSq;
2123 *pxVU++ = (rgbToCb[0] * rCount +
2124 rgbToCb[1] * gCount +
2125 rgbToCb[2] * bCount +
2126 rgbToCb[3]) / scaleOutSq;
2127 }
2128 for (unsigned int j = 1; j < inc; j++)
2129 mScene.getPixelElectrons();
2130 }
2131 }
2132#else
2133 uint8_t *src;
2134 if (mKernelBuffer) {
2135 src = mKernelBuffer;
2136 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2137 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2138 ALOGI("Sclale YV12 frame down \n");
2139
2140 int width = vinfo->preview.format.fmt.pix.width;
2141 int height = vinfo->preview.format.fmt.pix.height;
2142 int ret = libyuv::I420Scale(src, width,
2143 src + width * height, width / 2,
2144 src + width * height + width * height / 4, width / 2,
2145 width, height,
2146 b.img, b.width,
2147 b.img + b.width * b.height, b.width / 2,
2148 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2149 b.width, b.height,
2150 libyuv::kFilterNone);
2151 if (ret < 0)
2152 ALOGE("Sclale YV12 frame down failed!\n");
2153 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2154 int width = vinfo->preview.format.fmt.pix.width;
2155 int height = vinfo->preview.format.fmt.pix.height;
2156 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2157
2158 if ( tmp_buffer == NULL) {
2159 ALOGE("new buffer failed!\n");
2160 return;
2161 }
2162
2163 YUYVToYV12(src, tmp_buffer, width, height);
2164
2165 int ret = libyuv::I420Scale(tmp_buffer, width,
2166 tmp_buffer + width * height, width / 2,
2167 tmp_buffer + width * height + width * height / 4, width / 2,
2168 width, height,
2169 b.img, b.width,
2170 b.img + b.width * b.height, b.width / 2,
2171 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2172 b.width, b.height,
2173 libyuv::kFilterNone);
2174 if (ret < 0)
2175 ALOGE("Sclale YV12 frame down failed!\n");
2176 delete [] tmp_buffer;
2177 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2178 int width = vinfo->preview.format.fmt.pix.width;
2179 int height = vinfo->preview.format.fmt.pix.height;
2180 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2181
2182 if ( tmp_buffer == NULL) {
2183 ALOGE("new buffer failed!\n");
2184 return;
2185 }
2186
2187 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2188 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2189 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2190 DBG_LOGA("Decode MJPEG frame failed\n");
2191 }
2192
2193 int ret = libyuv::I420Scale(tmp_buffer, width,
2194 tmp_buffer + width * height, width / 2,
2195 tmp_buffer + width * height + width * height / 4, width / 2,
2196 width, height,
2197 b.img, b.width,
2198 b.img + b.width * b.height, b.width / 2,
2199 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2200 b.width, b.height,
2201 libyuv::kFilterNone);
2202 if (ret < 0)
2203 ALOGE("Sclale YV12 frame down failed!\n");
2204
2205 delete [] tmp_buffer;
2206 } else {
2207 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2208 }
2209 return ;
2210 }
2211 while(1){
2212 src = (uint8_t *)get_frame(vinfo);
2213
2214 if (NULL == src) {
2215 if (get_device_status(vinfo)) {
2216 break;
2217 } else {
2218 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2219 usleep(5000);
2220 continue;
2221 }
2222 }
2223 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2224 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2225 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2226 putback_frame(vinfo);
2227 continue;
2228 }
2229 }
2230 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2231 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2232 memcpy(b.img, src, vinfo->preview.buf.length);
2233 } else {
2234 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2235 }
2236 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2237 int width = vinfo->preview.format.fmt.pix.width;
2238 int height = vinfo->preview.format.fmt.pix.height;
2239 YUYVToYV12(src, b.img, width, height);
2240 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2241 int width = vinfo->preview.format.fmt.pix.width;
2242 int height = vinfo->preview.format.fmt.pix.height;
2243 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2244 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2245 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2246 putback_frame(vinfo);
2247 DBG_LOGA("Decode MJPEG frame failed\n");
2248 continue;
2249 }
2250 } else {
2251 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2252 }
2253
2254 break;
2255 }
2256#endif
2257 mKernelBuffer = src;
2258 ALOGVV("YV12 sensor image captured");
2259}
2260
2261void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2262#if 0
2263 float totalGain = gain/100.0 * kBaseGainFactor;
2264 // Using fixed-point math with 6 bits of fractional precision.
2265 // In fixed-point math, calculate total scaling from electrons to 8bpp
2266 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2267 // In fixed-point math, saturation point of sensor after gain
2268 const int saturationPoint = 64 * 255;
2269 // Fixed-point coefficients for RGB-YUV transform
2270 // Based on JFIF RGB->YUV transform.
2271 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2272 const int rgbToY[] = {19, 37, 7};
2273 const int rgbToCb[] = {-10,-21, 32, 524288};
2274 const int rgbToCr[] = {32,-26, -5, 524288};
2275 // Scale back to 8bpp non-fixed-point
2276 const int scaleOut = 64;
2277 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2278
2279 uint32_t inc = kResolution[0] / stride;
2280 uint32_t outH = kResolution[1] / inc;
2281 for (unsigned int y = 0, outY = 0;
2282 y < kResolution[1]; y+=inc, outY++) {
2283 uint8_t *pxY = img + outY * stride;
2284 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2285 mScene.setReadoutPixel(0,y);
2286 for (unsigned int outX = 0; outX < stride; outX++) {
2287 int32_t rCount, gCount, bCount;
2288 // TODO: Perfect demosaicing is a cheat
2289 const uint32_t *pixel = mScene.getPixelElectrons();
2290 rCount = pixel[Scene::R] * scale64x;
2291 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2292 gCount = pixel[Scene::Gr] * scale64x;
2293 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2294 bCount = pixel[Scene::B] * scale64x;
2295 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2296
2297 *pxY++ = (rgbToY[0] * rCount +
2298 rgbToY[1] * gCount +
2299 rgbToY[2] * bCount) / scaleOutSq;
2300 if (outY % 2 == 0 && outX % 2 == 0) {
2301 *pxVU++ = (rgbToCr[0] * rCount +
2302 rgbToCr[1] * gCount +
2303 rgbToCr[2] * bCount +
2304 rgbToCr[3]) / scaleOutSq;
2305 *pxVU++ = (rgbToCb[0] * rCount +
2306 rgbToCb[1] * gCount +
2307 rgbToCb[2] * bCount +
2308 rgbToCb[3]) / scaleOutSq;
2309 }
2310 for (unsigned int j = 1; j < inc; j++)
2311 mScene.getPixelElectrons();
2312 }
2313 }
2314#else
2315 uint8_t *src;
2316 if (mKernelBuffer) {
2317 src = mKernelBuffer;
2318 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2319 //TODO YUYV scale
2320 //memcpy(img, src, vinfo->preview.buf.length);
2321
2322 } else
2323 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2324
2325 return ;
2326 }
2327
2328 while(1) {
2329 src = (uint8_t *)get_frame(vinfo);
2330 if (NULL == src) {
2331 if (get_device_status(vinfo)) {
2332 break;
2333 } else {
2334 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2335 usleep(5000);
2336 continue;
2337 }
2338 }
2339 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2340 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2341 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2342 putback_frame(vinfo);
2343 continue;
2344 }
2345 }
2346 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2347 memcpy(img, src, vinfo->preview.buf.length);
2348 } else {
2349 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2350 }
2351
2352 break;
2353 }
2354#endif
2355 mKernelBuffer = src;
2356 ALOGVV("YUYV sensor image captured");
2357}
2358
2359void Sensor::dump(int fd) {
2360 String8 result;
2361 result = String8::format("%s, sensor preview information: \n", __FILE__);
2362 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2363 result.appendFormat("camera preview width: %d , height =%d\n",
2364 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2365
2366 result.appendFormat("camera preview format: %.4s\n\n",
2367 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2368
2369 write(fd, result.string(), result.size());
2370}
2371
2372} // namespace android
2373
2374