summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.cpp (plain)
blob: 7dc38fceda0fab43bacfc80544c815b01832d220
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18//#define LOG_NNDEBUG 0
19#define LOG_TAG "EmulatedCamera3_Sensor"
20
21#ifdef LOG_NNDEBUG
22#define ALOGVV(...) ALOGV(__VA_ARGS__)
23#else
24#define ALOGVV(...) ((void)0)
25#endif
26
27#include <utils/Log.h>
28#include <cutils/properties.h>
29
30#include "../EmulatedFakeCamera2.h"
31#include "Sensor.h"
32#include <cmath>
33#include <cstdlib>
34#include <hardware/camera3.h>
35#include "system/camera_metadata.h"
36#include "libyuv.h"
37#include "NV12_resize.h"
38#include "libyuv/scale.h"
39#include "ge2d_stream.h"
40#include "util.h"
41#include <sys/time.h>
42
43
44
45#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
46
47namespace android {
48
49const unsigned int Sensor::kResolution[2] = {1600, 1200};
50
51const nsecs_t Sensor::kExposureTimeRange[2] =
52 {1000L, 30000000000L} ; // 1 us - 30 sec
53const nsecs_t Sensor::kFrameDurationRange[2] =
54 {33331760L, 30000000000L}; // ~1/30 s - 30 sec
55const nsecs_t Sensor::kMinVerticalBlank = 10000L;
56
57const uint8_t Sensor::kColorFilterArrangement =
58 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
59
60// Output image data characteristics
61const uint32_t Sensor::kMaxRawValue = 4000;
62const uint32_t Sensor::kBlackLevel = 1000;
63
64// Sensor sensitivity
65const float Sensor::kSaturationVoltage = 0.520f;
66const uint32_t Sensor::kSaturationElectrons = 2000;
67const float Sensor::kVoltsPerLuxSecond = 0.100f;
68
69const float Sensor::kElectronsPerLuxSecond =
70 Sensor::kSaturationElectrons / Sensor::kSaturationVoltage
71 * Sensor::kVoltsPerLuxSecond;
72
73const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue /
74 Sensor::kSaturationElectrons;
75
76const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
77const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts
78const float Sensor::kReadNoiseVarBeforeGain =
79 Sensor::kReadNoiseStddevBeforeGain *
80 Sensor::kReadNoiseStddevBeforeGain;
81const float Sensor::kReadNoiseVarAfterGain =
82 Sensor::kReadNoiseStddevAfterGain *
83 Sensor::kReadNoiseStddevAfterGain;
84
85// While each row has to read out, reset, and then expose, the (reset +
86// expose) sequence can be overlapped by other row readouts, so the final
87// minimum frame duration is purely a function of row readout time, at least
88// if there's a reasonable number of rows.
89const nsecs_t Sensor::kRowReadoutTime =
90 Sensor::kFrameDurationRange[0] / Sensor::kResolution[1];
91
92const int32_t Sensor::kSensitivityRange[2] = {100, 1600};
93const uint32_t Sensor::kDefaultSensitivity = 100;
94
95const uint32_t kUsbAvailableSize [10] = {176, 144, 320, 240, 352, 288, 640, 480, 1280, 720};
96
97/** A few utility functions for math, normal distributions */
98
99// Take advantage of IEEE floating-point format to calculate an approximate
100// square root. Accurate to within +-3.6%
101float sqrtf_approx(float r) {
102 // Modifier is based on IEEE floating-point representation; the
103 // manipulations boil down to finding approximate log2, dividing by two, and
104 // then inverting the log2. A bias is added to make the relative error
105 // symmetric about the real answer.
106 const int32_t modifier = 0x1FBB4000;
107
108 int32_t r_i = *(int32_t*)(&r);
109 r_i = (r_i >> 1) + modifier;
110
111 return *(float*)(&r_i);
112}
113
114void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height)
115{
116 int stride = (width + 31) & ( ~31);
117 int w, h;
118 for (h=0; h<height; h++)
119 {
120 memcpy( dst, src, width*3);
121 dst += width*3;
122 src += stride*3;
123 }
124}
125
126static int ALIGN(int x, int y) {
127 // y must be a power of 2.
128 return (x + y - 1) & ~(y - 1);
129}
130
131bool IsUsbAvailableSize(const uint32_t kUsbAvailableSize[], uint32_t width, uint32_t height, int count)
132{
133 int i;
134 bool ret = false;
135 for (i = 0; i < count; i += 2) {
136 if ((width == kUsbAvailableSize[i]) && (height == kUsbAvailableSize[i+1])) {
137 ret = true;
138 } else {
139 continue;
140 }
141 }
142 return ret;
143}
144
145void ReSizeNV21(struct VideoInfo *vinfo, uint8_t *src, uint8_t *img, uint32_t width, uint32_t height)
146{
147 structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width,
148 (mmInt32)vinfo->preview.format.fmt.pix.height,
149 (mmInt32)vinfo->preview.format.fmt.pix.width,
150 IC_FORMAT_YCbCr420_lp,
151 (mmByte *) src,
152 (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height,
153 0};
154
155 structConvImage output = {(mmInt32)width,
156 (mmInt32)height,
157 (mmInt32)width,
158 IC_FORMAT_YCbCr420_lp,
159 (mmByte *) img,
160 (mmByte *) img + width * height,
161 0};
162
163 if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0))
164 ALOGE("Sclale NV21 frame down failed!\n");
165}
166
167Sensor::Sensor():
168 Thread(false),
169 mGotVSync(false),
170 mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
171 mFrameDuration(kFrameDurationRange[0]),
172 mGainFactor(kDefaultSensitivity),
173 mNextBuffers(NULL),
174 mFrameNumber(0),
175 mCapturedBuffers(NULL),
176 mListener(NULL),
177 mExitSensorThread(false),
178 mIoctlSupport(0),
179 msupportrotate(0),
180 mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
181{
182
183}
184
185Sensor::~Sensor() {
186 //shutDown();
187}
188
189status_t Sensor::startUp(int idx) {
190 ALOGV("%s: E", __FUNCTION__);
191 DBG_LOGA("ddd");
192
193 int res;
194 mCapturedBuffers = NULL;
195 res = run("EmulatedFakeCamera3::Sensor",
196 ANDROID_PRIORITY_URGENT_DISPLAY);
197
198 if (res != OK) {
199 ALOGE("Unable to start up sensor capture thread: %d", res);
200 }
201
202 vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo));
203 vinfo->idx = idx;
204
205 res = camera_open(vinfo);
206 if (res < 0) {
207 ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res);
208 }
209
210 mSensorType = SENSOR_MMAP;
211 if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) {
212 mSensorType = SENSOR_USB;
213 }
214
215 if (strstr((const char *)vinfo->cap.card, "share_fd")) {
216 mSensorType = SENSOR_SHARE_FD;
217 }
218
219 if (strstr((const char *)vinfo->cap.card, "front"))
220 mSensorFace = SENSOR_FACE_FRONT;
221 else if (strstr((const char *)vinfo->cap.card, "back"))
222 mSensorFace = SENSOR_FACE_BACK;
223 else
224 mSensorFace = SENSOR_FACE_NONE;
225
226 return res;
227}
228
229sensor_type_e Sensor::getSensorType(void)
230{
231 return mSensorType;
232}
233status_t Sensor::IoctlStateProbe(void) {
234 struct v4l2_queryctrl qc;
235 int ret = 0;
236 mIoctlSupport = 0;
237 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
238 qc.id = V4L2_ROTATE_ID;
239 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
240 if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){
241 mIoctlSupport &= ~IOCTL_MASK_ROTATE;
242 }else{
243 mIoctlSupport |= IOCTL_MASK_ROTATE;
244 }
245
246 if(mIoctlSupport & IOCTL_MASK_ROTATE){
247 msupportrotate = true;
248 DBG_LOGA("camera support capture rotate");
249 }
250 return mIoctlSupport;
251}
252
253uint32_t Sensor::getStreamUsage(int stream_type)
254{
255 uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
256
257 switch (stream_type) {
258 case CAMERA3_STREAM_OUTPUT:
259 usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
260 break;
261 case CAMERA3_STREAM_INPUT:
262 usage = GRALLOC_USAGE_HW_CAMERA_READ;
263 break;
264 case CAMERA3_STREAM_BIDIRECTIONAL:
265 usage = GRALLOC_USAGE_HW_CAMERA_READ |
266 GRALLOC_USAGE_HW_CAMERA_WRITE;
267 break;
268 }
269 if ((mSensorType == SENSOR_MMAP)
270 || (mSensorType == SENSOR_USB)) {
271 usage = (GRALLOC_USAGE_HW_TEXTURE
272 | GRALLOC_USAGE_HW_RENDER
273 | GRALLOC_USAGE_SW_READ_MASK
274 | GRALLOC_USAGE_SW_WRITE_MASK
275 );
276 }
277
278 return usage;
279}
280
281status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg)
282{
283 int res;
284
285 mFramecount = 0;
286 mCurFps = 0;
287 gettimeofday(&mTimeStart, NULL);
288
289 if (isjpeg) {
290 vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
291 vinfo->picture.format.fmt.pix.width = width;
292 vinfo->picture.format.fmt.pix.height = height;
293 vinfo->picture.format.fmt.pix.pixelformat = pixelformat;
294 } else {
295 vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
296 vinfo->preview.format.fmt.pix.width = width;
297 vinfo->preview.format.fmt.pix.height = height;
298 vinfo->preview.format.fmt.pix.pixelformat = pixelformat;
299
300 res = setBuffersFormat(vinfo);
301 if (res < 0) {
302 ALOGE("set buffer failed\n");
303 return res;
304 }
305 }
306
307 return OK;
308
309}
310
311status_t Sensor::streamOn() {
312
313 return start_capturing(vinfo);
314}
315
316bool Sensor::isStreaming() {
317
318 return vinfo->isStreaming;
319}
320
321bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat)
322{
323 if ((vinfo->preview.format.fmt.pix.width != width)
324 ||(vinfo->preview.format.fmt.pix.height != height)
325 //||(vinfo->format.fmt.pix.pixelformat != pixelformat)
326 ) {
327
328 return true;
329
330 }
331
332 return false;
333}
334status_t Sensor::streamOff() {
335 if (mSensorType == SENSOR_USB) {
336 return releasebuf_and_stop_capturing(vinfo);
337 } else {
338 return stop_capturing(vinfo);
339 }
340}
341
342int Sensor::getOutputFormat()
343{
344 struct v4l2_fmtdesc fmt;
345 int ret;
346 memset(&fmt,0,sizeof(fmt));
347 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
348
349 fmt.index = 0;
350 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
351 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
352 return V4L2_PIX_FMT_MJPEG;
353 fmt.index++;
354 }
355
356 fmt.index = 0;
357 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
358 if (fmt.pixelformat == V4L2_PIX_FMT_NV21)
359 return V4L2_PIX_FMT_NV21;
360 fmt.index++;
361 }
362
363 fmt.index = 0;
364 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
365 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
366 return V4L2_PIX_FMT_YUYV;
367 fmt.index++;
368 }
369
370 ALOGE("Unable to find a supported sensor format!");
371 return BAD_VALUE;
372}
373
374/* if sensor supports MJPEG, return it first, otherwise
375 * trasform HAL format to v4l2 format then check whether
376 * it is supported.
377 */
378int Sensor::halFormatToSensorFormat(uint32_t pixelfmt)
379{
380 struct v4l2_fmtdesc fmt;
381 int ret;
382 memset(&fmt,0,sizeof(fmt));
383 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
384
385 if (pixelfmt == HAL_PIXEL_FORMAT_YV12) {
386 pixelfmt = V4L2_PIX_FMT_YVU420;
387 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
388 pixelfmt = V4L2_PIX_FMT_NV21;
389 } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) {
390 pixelfmt = V4L2_PIX_FMT_YUYV;
391 } else {
392 pixelfmt = V4L2_PIX_FMT_NV21;
393 }
394
395 fmt.index = 0;
396 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
397 if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG)
398 return V4L2_PIX_FMT_MJPEG;
399 fmt.index++;
400 }
401
402 fmt.index = 0;
403 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){
404 if (fmt.pixelformat == pixelfmt)
405 return pixelfmt;
406 fmt.index++;
407 }
408
409 fmt.index = 0;
410 while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0) {
411 if (fmt.pixelformat == V4L2_PIX_FMT_YUYV)
412 return V4L2_PIX_FMT_YUYV;
413 fmt.index++;
414 }
415 ALOGE("%s, Unable to find a supported sensor format!", __FUNCTION__);
416 return BAD_VALUE;
417}
418
419void Sensor::setPictureRotate(int rotate)
420{
421 mRotateValue = rotate;
422}
423int Sensor::getPictureRotate()
424{
425 return mRotateValue;
426}
427status_t Sensor::shutDown() {
428 ALOGV("%s: E", __FUNCTION__);
429
430 int res;
431 res = requestExitAndWait();
432 if (res != OK) {
433 ALOGE("Unable to shut down sensor capture thread: %d", res);
434 }
435
436 if (vinfo != NULL) {
437 if (mSensorType == SENSOR_USB) {
438 releasebuf_and_stop_capturing(vinfo);
439 } else {
440 stop_capturing(vinfo);
441 }
442 }
443
444 camera_close(vinfo);
445
446 if (vinfo){
447 free(vinfo);
448 vinfo = NULL;
449 }
450 ALOGD("%s: Exit", __FUNCTION__);
451 return res;
452}
453
454void Sensor::sendExitSingalToSensor() {
455 {
456 Mutex::Autolock lock(mReadoutMutex);
457 mExitSensorThread = true;
458 mReadoutComplete.signal();
459 }
460
461 {
462 Mutex::Autolock lock(mControlMutex);
463 mVSync.signal();
464 }
465
466 {
467 Mutex::Autolock lock(mReadoutMutex);
468 mReadoutAvailable.signal();
469 }
470}
471
472Scene &Sensor::getScene() {
473 return mScene;
474}
475
476int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep)
477{
478 int ret = 0;
479 struct v4l2_queryctrl qc;
480
481 memset(&qc, 0, sizeof(qc));
482 qc.id = V4L2_CID_ZOOM_ABSOLUTE;
483 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
484
485 if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0)
486 || (qc.type != V4L2_CTRL_TYPE_INTEGER)) {
487 ret = -1;
488 *zoomMin = 0;
489 *zoomMax = 0;
490 *zoomStep = 1;
491 CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__);
492 } else {
493 *zoomMin = qc.minimum;
494 *zoomMax = qc.maximum;
495 *zoomStep = qc.step;
496 DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep);
497 }
498
499 return ret ;
500}
501
502int Sensor::setZoom(int zoomValue)
503{
504 int ret = 0;
505 struct v4l2_control ctl;
506
507 memset( &ctl, 0, sizeof(ctl));
508 ctl.value = zoomValue;
509 ctl.id = V4L2_CID_ZOOM_ABSOLUTE;
510 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
511 if (ret < 0) {
512 ALOGE("%s: Set zoom level failed!\n", __FUNCTION__);
513 }
514 return ret ;
515}
516
517status_t Sensor::setEffect(uint8_t effect)
518{
519 int ret = 0;
520 struct v4l2_control ctl;
521 ctl.id = V4L2_CID_COLORFX;
522
523 switch (effect) {
524 case ANDROID_CONTROL_EFFECT_MODE_OFF:
525 ctl.value= CAM_EFFECT_ENC_NORMAL;
526 break;
527 case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
528 ctl.value= CAM_EFFECT_ENC_COLORINV;
529 break;
530 case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
531 ctl.value= CAM_EFFECT_ENC_SEPIA;
532 break;
533 default:
534 ALOGE("%s: Doesn't support effect mode %d",
535 __FUNCTION__, effect);
536 return BAD_VALUE;
537 }
538
539 DBG_LOGB("set effect mode:%d", effect);
540 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
541 if (ret < 0) {
542 CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret);
543 }
544 return ret ;
545}
546
547#define MAX_LEVEL_FOR_EXPOSURE 16
548#define MIN_LEVEL_FOR_EXPOSURE 3
549
550int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step)
551{
552 struct v4l2_queryctrl qc;
553 int ret=0;
554 int level = 0;
555 int middle = 0;
556
557 memset( &qc, 0, sizeof(qc));
558
559 DBG_LOGA("getExposure\n");
560 qc.id = V4L2_CID_EXPOSURE;
561 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
562 if(ret < 0) {
563 CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno);
564 *minExp = -4;
565 *maxExp = 4;
566 *def = 0;
567 step->numerator = 1;
568 step->denominator = 1;
569 return ret;
570 }
571
572 if(0 < qc.step)
573 level = ( qc.maximum - qc.minimum + 1 )/qc.step;
574
575 if((level > MAX_LEVEL_FOR_EXPOSURE)
576 || (level < MIN_LEVEL_FOR_EXPOSURE)){
577 *minExp = -4;
578 *maxExp = 4;
579 *def = 0;
580 step->numerator = 1;
581 step->denominator = 1;
582 DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n",
583 *minExp, *maxExp, *def);
584 return true;
585 }
586
587 middle = (qc.minimum+qc.maximum)/2;
588 *minExp = qc.minimum - middle;
589 *maxExp = qc.maximum - middle;
590 *def = qc.default_value - middle;
591 step->numerator = 1;
592 step->denominator = 2;//qc.step;
593 DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step);
594 return ret;
595}
596
597status_t Sensor::setExposure(int expCmp)
598{
599 int ret = 0;
600 struct v4l2_control ctl;
601 struct v4l2_queryctrl qc;
602
603 if(mEV == expCmp){
604 return 0;
605 }else{
606 mEV = expCmp;
607 }
608 memset(&ctl, 0, sizeof(ctl));
609 memset(&qc, 0, sizeof(qc));
610
611 qc.id = V4L2_CID_EXPOSURE;
612
613 ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc);
614 if (ret < 0) {
615 CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret);
616 }
617
618 ctl.id = V4L2_CID_EXPOSURE;
619 ctl.value = expCmp + (qc.maximum - qc.minimum) / 2;
620
621 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
622 if (ret < 0) {
623 CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret);
624 }
625 DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum);
626 return ret ;
627}
628
629int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont)
630{
631 struct v4l2_queryctrl qc;
632 struct v4l2_querymenu qm;
633 int ret;
634 int mode_count = -1;
635
636 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
637 qc.id = V4L2_CID_POWER_LINE_FREQUENCY;
638 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
639 if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
640 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
641 } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) {
642 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
643 } else {
644 memset(&qm, 0, sizeof(qm));
645
646 int index = 0;
647 mode_count = 1;
648 antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
649
650 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
651 if (mode_count >= maxCont)
652 break;
653
654 memset(&qm, 0, sizeof(struct v4l2_querymenu));
655 qm.id = V4L2_CID_POWER_LINE_FREQUENCY;
656 qm.index = index;
657 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
658 continue;
659 } else {
660 if (strcmp((char*)qm.name,"50hz") == 0) {
661 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ;
662 mode_count++;
663 } else if (strcmp((char*)qm.name,"60hz") == 0) {
664 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
665 mode_count++;
666 } else if (strcmp((char*)qm.name,"auto") == 0) {
667 antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
668 mode_count++;
669 }
670
671 }
672 }
673 }
674
675 return mode_count;
676}
677
678status_t Sensor::setAntiBanding(uint8_t antiBanding)
679{
680 int ret = 0;
681 struct v4l2_control ctl;
682 ctl.id = V4L2_CID_POWER_LINE_FREQUENCY;
683
684 switch (antiBanding) {
685 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
686 ctl.value= CAM_ANTIBANDING_OFF;
687 break;
688 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
689 ctl.value= CAM_ANTIBANDING_50HZ;
690 break;
691 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
692 ctl.value= CAM_ANTIBANDING_60HZ;
693 break;
694 case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
695 ctl.value= CAM_ANTIBANDING_AUTO;
696 break;
697 default:
698 ALOGE("%s: Doesn't support ANTIBANDING mode %d",
699 __FUNCTION__, antiBanding);
700 return BAD_VALUE;
701 }
702
703 DBG_LOGB("anti banding mode:%d", antiBanding);
704 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
705 if ( ret < 0) {
706 CAMHAL_LOGDA("failed to set anti banding mode!\n");
707 return BAD_VALUE;
708 }
709 return ret;
710}
711
712status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1)
713{
714 int ret = 0;
715 struct v4l2_control ctl;
716 ctl.id = V4L2_CID_FOCUS_ABSOLUTE;
717 ctl.value = ((x0 + x1) / 2 + 1000) << 16;
718 ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff;
719
720 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
721 return ret;
722}
723
724
725int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount)
726{
727 struct v4l2_queryctrl qc;
728 struct v4l2_querymenu qm;
729 int ret;
730 int mode_count = -1;
731
732 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
733 qc.id = V4L2_CID_FOCUS_AUTO;
734 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
735 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
736 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
737 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
738 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
739 }else{
740 memset(&qm, 0, sizeof(qm));
741
742 int index = 0;
743 mode_count = 1;
744 afMode[0] = ANDROID_CONTROL_AF_MODE_OFF;
745
746 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
747 if (mode_count >= maxCount)
748 break;
749
750 memset(&qm, 0, sizeof(struct v4l2_querymenu));
751 qm.id = V4L2_CID_FOCUS_AUTO;
752 qm.index = index;
753 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
754 continue;
755 } else {
756 if (strcmp((char*)qm.name,"auto") == 0) {
757 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO;
758 mode_count++;
759 } else if (strcmp((char*)qm.name,"continuous-video") == 0) {
760 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
761 mode_count++;
762 } else if (strcmp((char*)qm.name,"continuous-picture") == 0) {
763 afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
764 mode_count++;
765 }
766
767 }
768 }
769 }
770
771 return mode_count;
772}
773
774status_t Sensor::setAutoFocuas(uint8_t afMode)
775{
776 struct v4l2_control ctl;
777 ctl.id = V4L2_CID_FOCUS_AUTO;
778
779 switch (afMode) {
780 case ANDROID_CONTROL_AF_MODE_AUTO:
781 ctl.value = CAM_FOCUS_MODE_AUTO;
782 break;
783 case ANDROID_CONTROL_AF_MODE_MACRO:
784 ctl.value = CAM_FOCUS_MODE_MACRO;
785 break;
786 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
787 ctl.value = CAM_FOCUS_MODE_CONTI_VID;
788 break;
789 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
790 ctl.value = CAM_FOCUS_MODE_CONTI_PIC;
791 break;
792 default:
793 ALOGE("%s: Emulator doesn't support AF mode %d",
794 __FUNCTION__, afMode);
795 return BAD_VALUE;
796 }
797
798 if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) {
799 CAMHAL_LOGDA("failed to set camera focuas mode!\n");
800 return BAD_VALUE;
801 }
802
803 return OK;
804}
805
806int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount)
807{
808 struct v4l2_queryctrl qc;
809 struct v4l2_querymenu qm;
810 int ret;
811 int mode_count = -1;
812
813 memset(&qc, 0, sizeof(struct v4l2_queryctrl));
814 qc.id = V4L2_CID_DO_WHITE_BALANCE;
815 ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc);
816 if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){
817 DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd);
818 }else if( qc.type != V4L2_CTRL_TYPE_MENU) {
819 DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd);
820 }else{
821 memset(&qm, 0, sizeof(qm));
822
823 int index = 0;
824 mode_count = 1;
825 awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF;
826
827 for (index = qc.minimum; index <= qc.maximum; index+= qc.step) {
828 if (mode_count >= maxCount)
829 break;
830
831 memset(&qm, 0, sizeof(struct v4l2_querymenu));
832 qm.id = V4L2_CID_DO_WHITE_BALANCE;
833 qm.index = index;
834 if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){
835 continue;
836 } else {
837 if (strcmp((char*)qm.name,"auto") == 0) {
838 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO;
839 mode_count++;
840 } else if (strcmp((char*)qm.name,"daylight") == 0) {
841 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT;
842 mode_count++;
843 } else if (strcmp((char*)qm.name,"incandescent") == 0) {
844 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT;
845 mode_count++;
846 } else if (strcmp((char*)qm.name,"fluorescent") == 0) {
847 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT;
848 mode_count++;
849 } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) {
850 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT;
851 mode_count++;
852 } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) {
853 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
854 mode_count++;
855 } else if (strcmp((char*)qm.name,"twilight") == 0) {
856 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT;
857 mode_count++;
858 } else if (strcmp((char*)qm.name,"shade") == 0) {
859 awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE;
860 mode_count++;
861 }
862
863 }
864 }
865 }
866
867 return mode_count;
868}
869
870status_t Sensor::setAWB(uint8_t awbMode)
871{
872 int ret = 0;
873 struct v4l2_control ctl;
874 ctl.id = V4L2_CID_DO_WHITE_BALANCE;
875
876 switch (awbMode) {
877 case ANDROID_CONTROL_AWB_MODE_AUTO:
878 ctl.value = CAM_WB_AUTO;
879 break;
880 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
881 ctl.value = CAM_WB_INCANDESCENCE;
882 break;
883 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
884 ctl.value = CAM_WB_FLUORESCENT;
885 break;
886 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
887 ctl.value = CAM_WB_DAYLIGHT;
888 break;
889 case ANDROID_CONTROL_AWB_MODE_SHADE:
890 ctl.value = CAM_WB_SHADE;
891 break;
892 default:
893 ALOGE("%s: Emulator doesn't support AWB mode %d",
894 __FUNCTION__, awbMode);
895 return BAD_VALUE;
896 }
897 ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl);
898 return ret;
899}
900
901void Sensor::setExposureTime(uint64_t ns) {
902 Mutex::Autolock lock(mControlMutex);
903 ALOGVV("Exposure set to %f", ns/1000000.f);
904 mExposureTime = ns;
905}
906
907void Sensor::setFrameDuration(uint64_t ns) {
908 Mutex::Autolock lock(mControlMutex);
909 ALOGVV("Frame duration set to %f", ns/1000000.f);
910 mFrameDuration = ns;
911}
912
913void Sensor::setSensitivity(uint32_t gain) {
914 Mutex::Autolock lock(mControlMutex);
915 ALOGVV("Gain set to %d", gain);
916 mGainFactor = gain;
917}
918
919void Sensor::setDestinationBuffers(Buffers *buffers) {
920 Mutex::Autolock lock(mControlMutex);
921 mNextBuffers = buffers;
922}
923
924void Sensor::setFrameNumber(uint32_t frameNumber) {
925 Mutex::Autolock lock(mControlMutex);
926 mFrameNumber = frameNumber;
927}
928
929status_t Sensor::waitForVSync(nsecs_t reltime) {
930 int res;
931 Mutex::Autolock lock(mControlMutex);
932 if (mExitSensorThread) {
933 return -1;
934 }
935
936 mGotVSync = false;
937 res = mVSync.waitRelative(mControlMutex, reltime);
938 if (res != OK && res != TIMED_OUT) {
939 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
940 return false;
941 }
942 return mGotVSync;
943}
944
945status_t Sensor::waitForNewFrame(nsecs_t reltime,
946 nsecs_t *captureTime) {
947 Mutex::Autolock lock(mReadoutMutex);
948 uint8_t *ret;
949 if (mExitSensorThread) {
950 return -1;
951 }
952
953 if (mCapturedBuffers == NULL) {
954 int res;
955 res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime);
956 if (res == TIMED_OUT) {
957 return false;
958 } else if (res != OK || mCapturedBuffers == NULL) {
959 ALOGE("Error waiting for sensor readout signal: %d", res);
960 return false;
961 }
962 } else {
963 mReadoutComplete.signal();
964 }
965
966 *captureTime = mCaptureTime;
967 mCapturedBuffers = NULL;
968 return true;
969}
970
971Sensor::SensorListener::~SensorListener() {
972}
973
974void Sensor::setSensorListener(SensorListener *listener) {
975 Mutex::Autolock lock(mControlMutex);
976 mListener = listener;
977}
978
979status_t Sensor::readyToRun() {
980 int res;
981 ALOGV("Starting up sensor thread");
982 mStartupTime = systemTime();
983 mNextCaptureTime = 0;
984 mNextCapturedBuffers = NULL;
985
986 DBG_LOGA("");
987
988 return OK;
989}
990
991bool Sensor::threadLoop() {
992 /**
993 * Sensor capture operation main loop.
994 *
995 * Stages are out-of-order relative to a single frame's processing, but
996 * in-order in time.
997 */
998
999 if (mExitSensorThread) {
1000 return false;
1001 }
1002 /**
1003 * Stage 1: Read in latest control parameters
1004 */
1005 uint64_t exposureDuration;
1006 uint64_t frameDuration;
1007 uint32_t gain;
1008 Buffers *nextBuffers;
1009 uint32_t frameNumber;
1010 SensorListener *listener = NULL;
1011 {
1012 Mutex::Autolock lock(mControlMutex);
1013 exposureDuration = mExposureTime;
1014 frameDuration = mFrameDuration;
1015 gain = mGainFactor;
1016 nextBuffers = mNextBuffers;
1017 frameNumber = mFrameNumber;
1018 listener = mListener;
1019 // Don't reuse a buffer set
1020 mNextBuffers = NULL;
1021
1022 // Signal VSync for start of readout
1023 ALOGVV("Sensor VSync");
1024 mGotVSync = true;
1025 mVSync.signal();
1026 }
1027
1028 /**
1029 * Stage 3: Read out latest captured image
1030 */
1031
1032 Buffers *capturedBuffers = NULL;
1033 nsecs_t captureTime = 0;
1034
1035 nsecs_t startRealTime = systemTime();
1036 // Stagefright cares about system time for timestamps, so base simulated
1037 // time on that.
1038 nsecs_t simulatedTime = startRealTime;
1039 nsecs_t frameEndRealTime = startRealTime + frameDuration;
1040 nsecs_t frameReadoutEndRealTime = startRealTime +
1041 kRowReadoutTime * kResolution[1];
1042
1043 if (mNextCapturedBuffers != NULL) {
1044 ALOGVV("Sensor starting readout");
1045 // Pretend we're doing readout now; will signal once enough time has elapsed
1046 capturedBuffers = mNextCapturedBuffers;
1047 captureTime = mNextCaptureTime;
1048 }
1049 simulatedTime += kRowReadoutTime + kMinVerticalBlank;
1050
1051 // TODO: Move this signal to another thread to simulate readout
1052 // time properly
1053 if (capturedBuffers != NULL) {
1054 ALOGVV("Sensor readout complete");
1055 Mutex::Autolock lock(mReadoutMutex);
1056 if (mCapturedBuffers != NULL) {
1057 ALOGV("Waiting for readout thread to catch up!");
1058 mReadoutComplete.wait(mReadoutMutex);
1059 }
1060
1061 mCapturedBuffers = capturedBuffers;
1062 mCaptureTime = captureTime;
1063 mReadoutAvailable.signal();
1064 capturedBuffers = NULL;
1065 }
1066
1067 if (mExitSensorThread) {
1068 return false;
1069 }
1070 /**
1071 * Stage 2: Capture new image
1072 */
1073 mNextCaptureTime = simulatedTime;
1074 mNextCapturedBuffers = nextBuffers;
1075
1076 if (mNextCapturedBuffers != NULL) {
1077 if (listener != NULL) {
1078#if 0
1079 if (get_device_status(vinfo)) {
1080 listener->onSensorEvent(frameNumber, SensorListener::ERROR_CAMERA_DEVICE, mNextCaptureTime);
1081 }
1082#endif
1083 listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START,
1084 mNextCaptureTime);
1085 }
1086
1087 ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
1088 (float)exposureDuration/1e6, gain);
1089 mScene.setExposureDuration((float)exposureDuration/1e9);
1090 mScene.calculateScene(mNextCaptureTime);
1091
1092 if ( mSensorType == SENSOR_SHARE_FD) {
1093 captureNewImageWithGe2d();
1094 } else {
1095 captureNewImage();
1096 }
1097 mFramecount ++;
1098 }
1099
1100 if (mExitSensorThread) {
1101 return false;
1102 }
1103
1104 if (mFramecount == 100) {
1105 gettimeofday(&mTimeEnd, NULL);
1106 int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec);
1107 mCurFps = mFramecount/(interval/1000000.0f);
1108 memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd));
1109 mFramecount = 0;
1110 CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps);
1111 }
1112 ALOGVV("Sensor vertical blanking interval");
1113 nsecs_t workDoneRealTime = systemTime();
1114 const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok
1115 if (workDoneRealTime < frameEndRealTime - timeAccuracy) {
1116 timespec t;
1117 t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L;
1118 t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L;
1119
1120 int ret;
1121 do {
1122 ret = nanosleep(&t, &t);
1123 } while (ret != 0);
1124 }
1125 nsecs_t endRealTime = systemTime();
1126 ALOGVV("Frame cycle took %d ms, target %d ms",
1127 (int)((endRealTime - startRealTime)/1000000),
1128 (int)(frameDuration / 1000000));
1129 return true;
1130};
1131
1132int Sensor::captureNewImageWithGe2d() {
1133
1134 uint32_t gain = mGainFactor;
1135 mKernelPhysAddr = 0;
1136
1137
1138 while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) {
1139 usleep(5000);
1140 }
1141
1142 // Might be adding more buffers, so size isn't constant
1143 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1144 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1145 fillStream(vinfo, mKernelPhysAddr, b);
1146 }
1147 putback_frame(vinfo);
1148 mKernelPhysAddr = 0;
1149
1150 return 0;
1151
1152}
1153
1154int Sensor::captureNewImage() {
1155 bool isjpeg = false;
1156 uint32_t gain = mGainFactor;
1157 mKernelBuffer = NULL;
1158
1159 // Might be adding more buffers, so size isn't constant
1160 CAMHAL_LOGDB("size=%d\n", mNextCapturedBuffers->size());
1161 for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
1162 const StreamBuffer &b = (*mNextCapturedBuffers)[i];
1163 ALOGVV("Sensor capturing buffer %d: stream %d,"
1164 " %d x %d, format %x, stride %d, buf %p, img %p",
1165 i, b.streamId, b.width, b.height, b.format, b.stride,
1166 b.buffer, b.img);
1167 switch (b.format) {
1168 case HAL_PIXEL_FORMAT_RAW_SENSOR:
1169 captureRaw(b.img, gain, b.stride);
1170 break;
1171 case HAL_PIXEL_FORMAT_RGB_888:
1172 captureRGB(b.img, gain, b.stride);
1173 break;
1174 case HAL_PIXEL_FORMAT_RGBA_8888:
1175 captureRGBA(b.img, gain, b.stride);
1176 break;
1177 case HAL_PIXEL_FORMAT_BLOB:
1178 // Add auxillary buffer of the right size
1179 // Assumes only one BLOB (JPEG) buffer in
1180 // mNextCapturedBuffers
1181 StreamBuffer bAux;
1182 int orientation;
1183 orientation = getPictureRotate();
1184 ALOGD("bAux orientation=%d",orientation);
1185 uint32_t pixelfmt;
1186 if ((b.width == vinfo->preview.format.fmt.pix.width &&
1187 b.height == vinfo->preview.format.fmt.pix.height) && (orientation == 0)) {
1188
1189 pixelfmt = getOutputFormat();
1190 if (pixelfmt == V4L2_PIX_FMT_YVU420) {
1191 pixelfmt = HAL_PIXEL_FORMAT_YV12;
1192 } else if (pixelfmt == V4L2_PIX_FMT_NV21) {
1193 DBG_LOGA("");
1194 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1195 } else if (pixelfmt == V4L2_PIX_FMT_YUYV) {
1196 pixelfmt = HAL_PIXEL_FORMAT_YCbCr_422_I;
1197 } else {
1198 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
1199 }
1200 } else {
1201 isjpeg = true;
1202 pixelfmt = HAL_PIXEL_FORMAT_RGB_888;
1203 }
1204
1205 if (!msupportrotate) {
1206 bAux.streamId = 0;
1207 bAux.width = b.width;
1208 bAux.height = b.height;
1209 bAux.format = pixelfmt;
1210 bAux.stride = b.width;
1211 bAux.buffer = NULL;
1212 } else {
1213 if ((orientation == 90) || (orientation == 270)) {
1214 bAux.streamId = 0;
1215 bAux.width = b.height;
1216 bAux.height = b.width;
1217 bAux.format = pixelfmt;
1218 bAux.stride = b.height;
1219 bAux.buffer = NULL;
1220 } else {
1221 bAux.streamId = 0;
1222 bAux.width = b.width;
1223 bAux.height = b.height;
1224 bAux.format = pixelfmt;
1225 bAux.stride = b.width;
1226 bAux.buffer = NULL;
1227 }
1228 }
1229 // TODO: Reuse these
1230 bAux.img = new uint8_t[b.width * b.height * 3];
1231 mNextCapturedBuffers->push_back(bAux);
1232 break;
1233 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
1234 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1235 captureNV21(b, gain);
1236 break;
1237 case HAL_PIXEL_FORMAT_YV12:
1238 captureYV12(b, gain);
1239 break;
1240 case HAL_PIXEL_FORMAT_YCbCr_422_I:
1241 captureYUYV(b.img, gain, b.stride);
1242 break;
1243 default:
1244 ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
1245 b.format);
1246 break;
1247 }
1248 }
1249 if (!isjpeg) { //jpeg buffer that is rgb888 has been save in the different buffer struct;
1250 // whose buffer putback separately.
1251 putback_frame(vinfo);
1252 }
1253 mKernelBuffer = NULL;
1254
1255 return 0;
1256}
1257
1258int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) {
1259 int res;
1260 int i, j, k, START;
1261 int count = 0;
1262 int pixelfmt;
1263 struct v4l2_frmsizeenum frmsize;
1264 char property[PROPERTY_VALUE_MAX];
1265 unsigned int support_w,support_h;
1266
1267 support_w = 10000;
1268 support_h = 10000;
1269 memset(property, 0, sizeof(property));
1270 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1271 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1272 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1273 support_w = 10000;
1274 support_h = 10000;
1275 }
1276 }
1277
1278 memset(&frmsize,0,sizeof(frmsize));
1279 frmsize.pixel_format = getOutputFormat();
1280
1281 START = 0;
1282 for (i = 0; ; i++) {
1283 frmsize.index = i;
1284 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1285 if (res < 0){
1286 DBG_LOGB("index=%d, break\n", i);
1287 break;
1288 }
1289
1290 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1291
1292 if (0 != (frmsize.discrete.width%16))
1293 continue;
1294
1295 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1296 continue;
1297
1298 if (count >= size)
1299 break;
1300#if 0
1301 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1302 int count = sizeof(kUsbAvailableSize)/sizeof(kUsbAvailableSize[0]);
1303 if (!IsUsbAvailableSize(kUsbAvailableSize, frmsize.discrete.width, frmsize.discrete.height,count))
1304 continue;
1305 }
1306#endif
1307 picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
1308 picSizes[count+1] = frmsize.discrete.width;
1309 picSizes[count+2] = frmsize.discrete.height;
1310 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1311
1312 DBG_LOGB("get output width=%d, height=%d, format=%d\n",
1313 frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format);
1314 if (0 == i) {
1315 count += 4;
1316 continue;
1317 }
1318
1319 for (k = count; k > START; k -= 4) {
1320 if (frmsize.discrete.width * frmsize.discrete.height >
1321 picSizes[k - 3] * picSizes[k - 2]) {
1322 picSizes[k + 1] = picSizes[k - 3];
1323 picSizes[k + 2] = picSizes[k - 2];
1324
1325 } else {
1326 break;
1327 }
1328 }
1329 picSizes[k + 1] = frmsize.discrete.width;
1330 picSizes[k + 2] = frmsize.discrete.height;
1331
1332 count += 4;
1333 }
1334 }
1335
1336 START = count;
1337 for (i = 0; ; i++) {
1338 frmsize.index = i;
1339 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1340 if (res < 0){
1341 DBG_LOGB("index=%d, break\n", i);
1342 break;
1343 }
1344
1345 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1346
1347 if (0 != (frmsize.discrete.width%16))
1348 continue;
1349
1350 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1351 continue;
1352
1353 if (count >= size)
1354 break;
1355#if 0
1356 if ((frmsize.pixel_format == V4L2_PIX_FMT_MJPEG) || (frmsize.pixel_format == V4L2_PIX_FMT_YUYV)) {
1357 int count = sizeof(kUsbAvailableSize)/sizeof(kUsbAvailableSize[0]);
1358 if (!IsUsbAvailableSize(kUsbAvailableSize, frmsize.discrete.width, frmsize.discrete.height,count))
1359 continue;
1360 }
1361#endif
1362 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888;
1363 picSizes[count+1] = frmsize.discrete.width;
1364 picSizes[count+2] = frmsize.discrete.height;
1365 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1366
1367 DBG_LOGB("get output width=%d, height=%d, format =\
1368 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1369 frmsize.discrete.height);
1370 if (0 == i) {
1371 count += 4;
1372 continue;
1373 }
1374
1375 for (k = count; k > START; k -= 4) {
1376 if (frmsize.discrete.width * frmsize.discrete.height >
1377 picSizes[k - 3] * picSizes[k - 2]) {
1378 picSizes[k + 1] = picSizes[k - 3];
1379 picSizes[k + 2] = picSizes[k - 2];
1380
1381 } else {
1382 break;
1383 }
1384 }
1385 picSizes[k + 1] = frmsize.discrete.width;
1386 picSizes[k + 2] = frmsize.discrete.height;
1387
1388 count += 4;
1389 }
1390 }
1391
1392#if 0
1393 if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) {
1394 START = count;
1395 for (i = 0; ; i++) {
1396 frmsize.index = i;
1397 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1398 if (res < 0){
1399 DBG_LOGB("index=%d, break\n", i);
1400 break;
1401 }
1402
1403 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1404
1405 if (0 != (frmsize.discrete.width%16))
1406 continue;
1407
1408 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1409 continue;
1410
1411 if (count >= size)
1412 break;
1413
1414 picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I;
1415 picSizes[count+1] = frmsize.discrete.width;
1416 picSizes[count+2] = frmsize.discrete.height;
1417 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1418
1419 DBG_LOGB("get output width=%d, height=%d, format =\
1420 HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width,
1421 frmsize.discrete.height);
1422 if (0 == i) {
1423 count += 4;
1424 continue;
1425 }
1426
1427 for (k = count; k > START; k -= 4) {
1428 if (frmsize.discrete.width * frmsize.discrete.height >
1429 picSizes[k - 3] * picSizes[k - 2]) {
1430 picSizes[k + 1] = picSizes[k - 3];
1431 picSizes[k + 2] = picSizes[k - 2];
1432
1433 } else {
1434 break;
1435 }
1436 }
1437 picSizes[k + 1] = frmsize.discrete.width;
1438 picSizes[k + 2] = frmsize.discrete.height;
1439
1440 count += 4;
1441 }
1442 }
1443 }
1444#endif
1445
1446 uint32_t jpgSrcfmt[] = {
1447 V4L2_PIX_FMT_RGB24,
1448 V4L2_PIX_FMT_MJPEG,
1449 V4L2_PIX_FMT_YUYV,
1450 };
1451
1452 START = count;
1453 for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) {
1454 memset(&frmsize,0,sizeof(frmsize));
1455 frmsize.pixel_format = jpgSrcfmt[j];
1456
1457 for (i = 0; ; i++) {
1458 frmsize.index = i;
1459 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1460 if (res < 0){
1461 DBG_LOGB("index=%d, break\n", i);
1462 break;
1463 }
1464
1465 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1466
1467 if (0 != (frmsize.discrete.width%16))
1468 continue;
1469
1470 //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1471 // continue;
1472
1473 if (count >= size)
1474 break;
1475
1476 picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB;
1477 picSizes[count+1] = frmsize.discrete.width;
1478 picSizes[count+2] = frmsize.discrete.height;
1479 picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
1480
1481 if (0 == i) {
1482 count += 4;
1483 continue;
1484 }
1485
1486 //TODO insert in descend order
1487 for (k = count; k > START; k -= 4) {
1488 if (frmsize.discrete.width * frmsize.discrete.height >
1489 picSizes[k - 3] * picSizes[k - 2]) {
1490 picSizes[k + 1] = picSizes[k - 3];
1491 picSizes[k + 2] = picSizes[k - 2];
1492
1493 } else {
1494 break;
1495 }
1496 }
1497
1498 picSizes[k + 1] = frmsize.discrete.width;
1499 picSizes[k + 2] = frmsize.discrete.height;
1500
1501 count += 4;
1502 }
1503 }
1504
1505 if (frmsize.index > 0)
1506 break;
1507 }
1508
1509 if (frmsize.index == 0)
1510 CAMHAL_LOGDA("no support pixel fmt for jpeg");
1511
1512 return count;
1513
1514}
1515
1516int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size)
1517{
1518 int ret=0; int framerate=0; int temp_rate=0;
1519 struct v4l2_frmivalenum fival;
1520 int i,j=0;
1521 int count = 0;
1522 int tmp_size = size;
1523 memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration));
1524 int pixelfmt_tbl[] = {
1525 V4L2_PIX_FMT_MJPEG,
1526 V4L2_PIX_FMT_YVU420,
1527 V4L2_PIX_FMT_NV21,
1528 V4L2_PIX_FMT_RGB24,
1529 V4L2_PIX_FMT_YUYV,
1530 //V4L2_PIX_FMT_YVU420
1531 };
1532
1533 for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++)
1534 {
1535 /* we got all duration for each resolution for prev format*/
1536 if (count >= tmp_size)
1537 break;
1538
1539 for( ; size > 0; size-=4)
1540 {
1541 memset(&fival, 0, sizeof(fival));
1542
1543 for (fival.index = 0;;fival.index++)
1544 {
1545 fival.pixel_format = pixelfmt_tbl[i];
1546 fival.width = picSizes[size-3];
1547 fival.height = picSizes[size-2];
1548 if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) {
1549 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){
1550 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1551 if(framerate < temp_rate)
1552 framerate = temp_rate;
1553 duration[count+0] = (int64_t)(picSizes[size-4]);
1554 duration[count+1] = (int64_t)(picSizes[size-3]);
1555 duration[count+2] = (int64_t)(picSizes[size-2]);
1556 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1557 j++;
1558 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){
1559 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1560 if(framerate < temp_rate)
1561 framerate = temp_rate;
1562 duration[count+0] = (int64_t)picSizes[size-4];
1563 duration[count+1] = (int64_t)picSizes[size-3];
1564 duration[count+2] = (int64_t)picSizes[size-2];
1565 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1566 j++;
1567 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){
1568 temp_rate = fival.discrete.denominator/fival.discrete.numerator;
1569 if(framerate < temp_rate)
1570 framerate = temp_rate;
1571 duration[count+0] = (int64_t)picSizes[size-4];
1572 duration[count+1] = (int64_t)picSizes[size-3];
1573 duration[count+2] = (int64_t)picSizes[size-2];
1574 duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver
1575 j++;
1576 }
1577 } else {
1578 if (j > 0) {
1579 if (count >= tmp_size)
1580 break;
1581 duration[count+0] = (int64_t)(picSizes[size-4]);
1582 duration[count+1] = (int64_t)(picSizes[size-3]);
1583 duration[count+2] = (int64_t)(picSizes[size-2]);
1584 if (framerate == 5) {
1585 duration[count+3] = (int64_t)200000000L;
1586 } else if (framerate == 10) {
1587 duration[count+3] = (int64_t)100000000L;
1588 } else if (framerate == 15) {
1589 duration[count+3] = (int64_t)66666666L;
1590 } else if (framerate == 30) {
1591 duration[count+3] = (int64_t)33333333L;
1592 } else {
1593 duration[count+3] = (int64_t)66666666L;
1594 }
1595 count += 4;
1596 break;
1597 } else {
1598 break;
1599 }
1600 }
1601 }
1602 j=0;
1603 }
1604 size = tmp_size;
1605 }
1606
1607 return count;
1608
1609}
1610
1611int64_t Sensor::getMinFrameDuration()
1612{
1613 int64_t tmpDuration = 66666666L; // 1/15 s
1614 int64_t frameDuration = 66666666L; // 1/15 s
1615 struct v4l2_frmivalenum fival;
1616 int i,j;
1617
1618 uint32_t pixelfmt_tbl[]={
1619 V4L2_PIX_FMT_MJPEG,
1620 V4L2_PIX_FMT_YUYV,
1621 V4L2_PIX_FMT_NV21,
1622 };
1623 struct v4l2_frmsize_discrete resolution_tbl[]={
1624 {1920, 1080},
1625 {1280, 960},
1626 {640, 480},
1627 {320, 240},
1628 };
1629
1630 for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) {
1631 for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) {
1632 memset(&fival, 0, sizeof(fival));
1633 fival.index = 0;
1634 fival.pixel_format = pixelfmt_tbl[i];
1635 fival.width = resolution_tbl[j].width;
1636 fival.height = resolution_tbl[j].height;
1637
1638 while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) {
1639 if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
1640 tmpDuration =
1641 fival.discrete.numerator * 1000000000L / fival.discrete.denominator;
1642
1643 if (frameDuration > tmpDuration)
1644 frameDuration = tmpDuration;
1645 } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
1646 frameDuration =
1647 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1648 break;
1649 } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
1650 frameDuration =
1651 fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator;
1652 break;
1653 }
1654 fival.index++;
1655 }
1656 }
1657
1658 if (fival.index > 0) {
1659 break;
1660 }
1661 }
1662
1663 CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration);
1664 return frameDuration;
1665}
1666
1667int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) {
1668 int res;
1669 int i;
1670 int count = 0;
1671 struct v4l2_frmsizeenum frmsize;
1672 char property[PROPERTY_VALUE_MAX];
1673 unsigned int support_w,support_h;
1674 int preview_fmt;
1675
1676 support_w = 10000;
1677 support_h = 10000;
1678 memset(property, 0, sizeof(property));
1679 if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){
1680 CAMHAL_LOGDB("support Max Preview Size :%s",property);
1681 if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){
1682 support_w = 10000;
1683 support_h = 10000;
1684 }
1685 }
1686
1687
1688 memset(&frmsize,0,sizeof(frmsize));
1689 preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat();
1690
1691 if (preview_fmt == V4L2_PIX_FMT_MJPEG)
1692 frmsize.pixel_format = V4L2_PIX_FMT_MJPEG;
1693 else if (preview_fmt == V4L2_PIX_FMT_NV21) {
1694 if (preview == true)
1695 frmsize.pixel_format = V4L2_PIX_FMT_NV21;
1696 else
1697 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1698 } else if (preview_fmt == V4L2_PIX_FMT_YVU420) {
1699 if (preview == true)
1700 frmsize.pixel_format = V4L2_PIX_FMT_YVU420;
1701 else
1702 frmsize.pixel_format = V4L2_PIX_FMT_RGB24;
1703 } else if (preview_fmt == V4L2_PIX_FMT_YUYV)
1704 frmsize.pixel_format = V4L2_PIX_FMT_YUYV;
1705
1706 for (i = 0; ; i++) {
1707 frmsize.index = i;
1708 res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize);
1709 if (res < 0){
1710 DBG_LOGB("index=%d, break\n", i);
1711 break;
1712 }
1713
1714
1715 if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type
1716
1717 if (0 != (frmsize.discrete.width%16))
1718 continue;
1719
1720 if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h))
1721 continue;
1722
1723 if (count >= size)
1724 break;
1725
1726 picSizes[count] = frmsize.discrete.width;
1727 picSizes[count+1] = frmsize.discrete.height;
1728
1729 if (0 == i) {
1730 count += 2;
1731 continue;
1732 }
1733
1734 //TODO insert in descend order
1735 if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) {
1736 picSizes[count + 0] = picSizes[count - 2];
1737 picSizes[count + 1] = picSizes[count - 1];
1738
1739 picSizes[count - 2] = frmsize.discrete.width;
1740 picSizes[count - 1] = frmsize.discrete.height;
1741 }
1742
1743 count += 2;
1744 }
1745 }
1746
1747 return count;
1748
1749}
1750
1751void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
1752 float totalGain = gain/100.0 * kBaseGainFactor;
1753 float noiseVarGain = totalGain * totalGain;
1754 float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
1755 + kReadNoiseVarAfterGain;
1756
1757 int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
1758 mScene.setReadoutPixel(0,0);
1759 for (unsigned int y = 0; y < kResolution[1]; y++ ) {
1760 int *bayerRow = bayerSelect + (y & 0x1) * 2;
1761 uint16_t *px = (uint16_t*)img + y * stride;
1762 for (unsigned int x = 0; x < kResolution[0]; x++) {
1763 uint32_t electronCount;
1764 electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
1765
1766 // TODO: Better pixel saturation curve?
1767 electronCount = (electronCount < kSaturationElectrons) ?
1768 electronCount : kSaturationElectrons;
1769
1770 // TODO: Better A/D saturation curve?
1771 uint16_t rawCount = electronCount * totalGain;
1772 rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue;
1773
1774 // Calculate noise value
1775 // TODO: Use more-correct Gaussian instead of uniform noise
1776 float photonNoiseVar = electronCount * noiseVarGain;
1777 float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar);
1778 // Scaled to roughly match gaussian/uniform noise stddev
1779 float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25;
1780
1781 rawCount += kBlackLevel;
1782 rawCount += noiseStddev * noiseSample;
1783
1784 *px++ = rawCount;
1785 }
1786 // TODO: Handle this better
1787 //simulatedTime += kRowReadoutTime;
1788 }
1789 ALOGVV("Raw sensor image captured");
1790}
1791
1792void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
1793 float totalGain = gain/100.0 * kBaseGainFactor;
1794 // In fixed-point math, calculate total scaling from electrons to 8bpp
1795 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1796 uint32_t inc = kResolution[0] / stride;
1797
1798 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) {
1799 uint8_t *px = img + outY * stride * 4;
1800 mScene.setReadoutPixel(0, y);
1801 for (unsigned int x = 0; x < kResolution[0]; x+=inc) {
1802 uint32_t rCount, gCount, bCount;
1803 // TODO: Perfect demosaicing is a cheat
1804 const uint32_t *pixel = mScene.getPixelElectrons();
1805 rCount = pixel[Scene::R] * scale64x;
1806 gCount = pixel[Scene::Gr] * scale64x;
1807 bCount = pixel[Scene::B] * scale64x;
1808
1809 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1810 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1811 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1812 *px++ = 255;
1813 for (unsigned int j = 1; j < inc; j++)
1814 mScene.getPixelElectrons();
1815 }
1816 // TODO: Handle this better
1817 //simulatedTime += kRowReadoutTime;
1818 }
1819 ALOGVV("RGBA sensor image captured");
1820}
1821
1822void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
1823#if 0
1824 float totalGain = gain/100.0 * kBaseGainFactor;
1825 // In fixed-point math, calculate total scaling from electrons to 8bpp
1826 int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1827 uint32_t inc = kResolution[0] / stride;
1828
1829 for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) {
1830 mScene.setReadoutPixel(0, y);
1831 uint8_t *px = img + outY * stride * 3;
1832 for (unsigned int x = 0; x < kResolution[0]; x += inc) {
1833 uint32_t rCount, gCount, bCount;
1834 // TODO: Perfect demosaicing is a cheat
1835 const uint32_t *pixel = mScene.getPixelElectrons();
1836 rCount = pixel[Scene::R] * scale64x;
1837 gCount = pixel[Scene::Gr] * scale64x;
1838 bCount = pixel[Scene::B] * scale64x;
1839
1840 *px++ = rCount < 255*64 ? rCount / 64 : 255;
1841 *px++ = gCount < 255*64 ? gCount / 64 : 255;
1842 *px++ = bCount < 255*64 ? bCount / 64 : 255;
1843 for (unsigned int j = 1; j < inc; j++)
1844 mScene.getPixelElectrons();
1845 }
1846 // TODO: Handle this better
1847 //simulatedTime += kRowReadoutTime;
1848 }
1849#else
1850 uint8_t *src = NULL;
1851 int ret = 0, rotate = 0;
1852 uint32_t width = 0, height = 0;
1853
1854 rotate = getPictureRotate();
1855 width = vinfo->picture.format.fmt.pix.width;
1856 height = vinfo->picture.format.fmt.pix.height;
1857
1858 if (mSensorType == SENSOR_USB) {
1859 releasebuf_and_stop_capturing(vinfo);
1860 } else {
1861 stop_capturing(vinfo);
1862 }
1863
1864 ret = start_picture(vinfo,rotate);
1865 if (ret < 0)
1866 {
1867 ALOGD("start picture failed!");
1868 }
1869 while(1)
1870 {
1871 src = (uint8_t *)get_picture(vinfo);
1872 if (NULL != src) {
1873 if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
1874 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
1875 if ( tmp_buffer == NULL) {
1876 ALOGE("new buffer failed!\n");
1877 return;
1878 }
1879 if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer,
1880 width, tmp_buffer + width * height, (width + 1) / 2, width,
1881 height, width, height, libyuv::FOURCC_MJPG) != 0) {
1882 DBG_LOGA("Decode MJPEG frame failed\n");
1883 putback_picture_frame(vinfo);
1884 usleep(5000);
1885 } else {
1886 nv21_to_rgb24(tmp_buffer,img,width,height);
1887 if (tmp_buffer != NULL)
1888 delete [] tmp_buffer;
1889 break;
1890 }
1891 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
1892 if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) {
1893 yuyv422_to_rgb24(src,img,width,height);
1894 break;
1895 } else {
1896 putback_picture_frame(vinfo);
1897 usleep(5000);
1898 }
1899 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) {
1900 if (vinfo->picture.buf.length == width * height * 3) {
1901 memcpy(img, src, vinfo->picture.buf.length);
1902 } else {
1903 rgb24_memcpy(img, src, width, height);
1904 }
1905 break;
1906 } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
1907 memcpy(img, src, vinfo->picture.buf.length);
1908 break;
1909 }
1910 }
1911 }
1912 ALOGD("get picture success !");
1913
1914 if (mSensorType == SENSOR_USB) {
1915 releasebuf_and_stop_picture(vinfo);
1916 } else {
1917 stop_picture(vinfo);
1918 }
1919
1920#endif
1921}
1922
1923void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height)
1924{
1925 for (int i = 0; i < width * height * 2; i += 2) {
1926 *dst++ = *(src + i);
1927 }
1928
1929 for (int y = 0; y < height - 1; y +=2) {
1930 for (int j = 0; j < width * 2; j += 4) {
1931 *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v
1932 *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u
1933 }
1934 src += width * 2 * 2;
1935 }
1936
1937 if (height & 1)
1938 for (int j = 0; j < width * 2; j += 4) {
1939 *dst++ = *(src + 3 + j); //v
1940 *dst++ = *(src + 1 + j); //u
1941 }
1942}
1943
1944void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height)
1945{
1946 //width should be an even number.
1947 //uv ALIGN 32.
1948 int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset;
1949 unsigned char *dst_copy,*src_copy;
1950
1951 dst_copy = dst;
1952 src_copy = src;
1953
1954 y_size = width*height;
1955 c_stride = ALIGN(width/2, 16);
1956 c_size = c_stride * height/2;
1957 cr_offset = y_size;
1958 cb_offset = y_size+c_size;
1959
1960 for(i=0;i< y_size;i++){
1961 *dst++ = *src;
1962 src += 2;
1963 }
1964
1965 dst = dst_copy;
1966 src = src_copy;
1967
1968 for(i=0;i<height;i+=2){
1969 for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv.
1970 //ceil(u1+u2)/2
1971 *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2;
1972 *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2;
1973 }
1974 dst += c_stride;
1975 src += width*4;
1976 }
1977}
1978
1979
1980void Sensor::captureNV21(StreamBuffer b, uint32_t gain) {
1981#if 0
1982 float totalGain = gain/100.0 * kBaseGainFactor;
1983 // Using fixed-point math with 6 bits of fractional precision.
1984 // In fixed-point math, calculate total scaling from electrons to 8bpp
1985 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
1986 // In fixed-point math, saturation point of sensor after gain
1987 const int saturationPoint = 64 * 255;
1988 // Fixed-point coefficients for RGB-YUV transform
1989 // Based on JFIF RGB->YUV transform.
1990 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
1991 const int rgbToY[] = {19, 37, 7};
1992 const int rgbToCb[] = {-10,-21, 32, 524288};
1993 const int rgbToCr[] = {32,-26, -5, 524288};
1994 // Scale back to 8bpp non-fixed-point
1995 const int scaleOut = 64;
1996 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
1997
1998 uint32_t inc = kResolution[0] / stride;
1999 uint32_t outH = kResolution[1] / inc;
2000 for (unsigned int y = 0, outY = 0;
2001 y < kResolution[1]; y+=inc, outY++) {
2002 uint8_t *pxY = img + outY * stride;
2003 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2004 mScene.setReadoutPixel(0,y);
2005 for (unsigned int outX = 0; outX < stride; outX++) {
2006 int32_t rCount, gCount, bCount;
2007 // TODO: Perfect demosaicing is a cheat
2008 const uint32_t *pixel = mScene.getPixelElectrons();
2009 rCount = pixel[Scene::R] * scale64x;
2010 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2011 gCount = pixel[Scene::Gr] * scale64x;
2012 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2013 bCount = pixel[Scene::B] * scale64x;
2014 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2015
2016 *pxY++ = (rgbToY[0] * rCount +
2017 rgbToY[1] * gCount +
2018 rgbToY[2] * bCount) / scaleOutSq;
2019 if (outY % 2 == 0 && outX % 2 == 0) {
2020 *pxVU++ = (rgbToCr[0] * rCount +
2021 rgbToCr[1] * gCount +
2022 rgbToCr[2] * bCount +
2023 rgbToCr[3]) / scaleOutSq;
2024 *pxVU++ = (rgbToCb[0] * rCount +
2025 rgbToCb[1] * gCount +
2026 rgbToCb[2] * bCount +
2027 rgbToCb[3]) / scaleOutSq;
2028 }
2029 for (unsigned int j = 1; j < inc; j++)
2030 mScene.getPixelElectrons();
2031 }
2032 }
2033#else
2034 uint8_t *src;
2035
2036 if (mKernelBuffer) {
2037 src = mKernelBuffer;
2038 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2039 uint32_t width = vinfo->preview.format.fmt.pix.width;
2040 uint32_t height = vinfo->preview.format.fmt.pix.height;
2041 if ((width == b.width) && (height == b.height)) {
2042 memcpy(b.img, src, b.width * b.height * 3/2);
2043 } else {
2044 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2045 }
2046 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2047 uint32_t width = vinfo->preview.format.fmt.pix.width;
2048 uint32_t height = vinfo->preview.format.fmt.pix.height;
2049
2050 if ((width == b.width) && (height == b.height)) {
2051 memcpy(b.img, src, b.width * b.height * 3/2);
2052 } else {
2053 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2054 }
2055 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2056 uint32_t width = vinfo->preview.format.fmt.pix.width;
2057 uint32_t height = vinfo->preview.format.fmt.pix.height;
2058
2059 if ((width == b.width) && (height == b.height)) {
2060 memcpy(b.img, src, b.width * b.height * 3/2);
2061 } else {
2062 ReSizeNV21(vinfo, src, b.img, b.width, b.height);
2063 }
2064 } else {
2065 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2066 }
2067 return ;
2068 }
2069 while(1){
2070 if (mExitSensorThread) {
2071 break;
2072 }
2073
2074 src = (uint8_t *)get_frame(vinfo);
2075 if (NULL == src) {
2076 if (get_device_status(vinfo)) {
2077 break;
2078 }
2079 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2080 usleep(5000);
2081 continue;
2082 }
2083
2084 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2085 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2086 DBG_LOGB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2087 putback_frame(vinfo);
2088 continue;
2089 }
2090 }
2091 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) {
2092 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2093 memcpy(b.img, src, vinfo->preview.buf.length);
2094 } else {
2095 nv21_memcpy_align32 (b.img, src, b.width, b.height);
2096 }
2097 mKernelBuffer = b.img;
2098 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2099 int width = vinfo->preview.format.fmt.pix.width;
2100 int height = vinfo->preview.format.fmt.pix.height;
2101 YUYVToNV21(src, b.img, width, height);
2102 mKernelBuffer = b.img;
2103 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2104 int width = vinfo->preview.format.fmt.pix.width;
2105 int height = vinfo->preview.format.fmt.pix.height;
2106 if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, b.img,
2107 width, b.img + width * height, (width + 1) / 2, width,
2108 height, width, height, libyuv::FOURCC_MJPG) != 0) {
2109 putback_frame(vinfo);
2110 DBG_LOGA("Decode MJPEG frame failed\n");
2111 continue;
2112 }
2113 mKernelBuffer = b.img;
2114 }
2115
2116 break;
2117 }
2118#endif
2119
2120 ALOGVV("NV21 sensor image captured");
2121}
2122
2123void Sensor::captureYV12(StreamBuffer b, uint32_t gain) {
2124#if 0
2125 float totalGain = gain/100.0 * kBaseGainFactor;
2126 // Using fixed-point math with 6 bits of fractional precision.
2127 // In fixed-point math, calculate total scaling from electrons to 8bpp
2128 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2129 // In fixed-point math, saturation point of sensor after gain
2130 const int saturationPoint = 64 * 255;
2131 // Fixed-point coefficients for RGB-YUV transform
2132 // Based on JFIF RGB->YUV transform.
2133 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2134 const int rgbToY[] = {19, 37, 7};
2135 const int rgbToCb[] = {-10,-21, 32, 524288};
2136 const int rgbToCr[] = {32,-26, -5, 524288};
2137 // Scale back to 8bpp non-fixed-point
2138 const int scaleOut = 64;
2139 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2140
2141 uint32_t inc = kResolution[0] / stride;
2142 uint32_t outH = kResolution[1] / inc;
2143 for (unsigned int y = 0, outY = 0;
2144 y < kResolution[1]; y+=inc, outY++) {
2145 uint8_t *pxY = img + outY * stride;
2146 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2147 mScene.setReadoutPixel(0,y);
2148 for (unsigned int outX = 0; outX < stride; outX++) {
2149 int32_t rCount, gCount, bCount;
2150 // TODO: Perfect demosaicing is a cheat
2151 const uint32_t *pixel = mScene.getPixelElectrons();
2152 rCount = pixel[Scene::R] * scale64x;
2153 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2154 gCount = pixel[Scene::Gr] * scale64x;
2155 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2156 bCount = pixel[Scene::B] * scale64x;
2157 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2158
2159 *pxY++ = (rgbToY[0] * rCount +
2160 rgbToY[1] * gCount +
2161 rgbToY[2] * bCount) / scaleOutSq;
2162 if (outY % 2 == 0 && outX % 2 == 0) {
2163 *pxVU++ = (rgbToCr[0] * rCount +
2164 rgbToCr[1] * gCount +
2165 rgbToCr[2] * bCount +
2166 rgbToCr[3]) / scaleOutSq;
2167 *pxVU++ = (rgbToCb[0] * rCount +
2168 rgbToCb[1] * gCount +
2169 rgbToCb[2] * bCount +
2170 rgbToCb[3]) / scaleOutSq;
2171 }
2172 for (unsigned int j = 1; j < inc; j++)
2173 mScene.getPixelElectrons();
2174 }
2175 }
2176#else
2177 uint8_t *src;
2178 if (mKernelBuffer) {
2179 src = mKernelBuffer;
2180 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2181 //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/);
2182 ALOGI("Sclale YV12 frame down \n");
2183
2184 int width = vinfo->preview.format.fmt.pix.width;
2185 int height = vinfo->preview.format.fmt.pix.height;
2186 int ret = libyuv::I420Scale(src, width,
2187 src + width * height, width / 2,
2188 src + width * height + width * height / 4, width / 2,
2189 width, height,
2190 b.img, b.width,
2191 b.img + b.width * b.height, b.width / 2,
2192 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2193 b.width, b.height,
2194 libyuv::kFilterNone);
2195 if (ret < 0)
2196 ALOGE("Sclale YV12 frame down failed!\n");
2197 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2198 int width = vinfo->preview.format.fmt.pix.width;
2199 int height = vinfo->preview.format.fmt.pix.height;
2200 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2201
2202 if ( tmp_buffer == NULL) {
2203 ALOGE("new buffer failed!\n");
2204 return;
2205 }
2206
2207 YUYVToYV12(src, tmp_buffer, width, height);
2208
2209 int ret = libyuv::I420Scale(tmp_buffer, width,
2210 tmp_buffer + width * height, width / 2,
2211 tmp_buffer + width * height + width * height / 4, width / 2,
2212 width, height,
2213 b.img, b.width,
2214 b.img + b.width * b.height, b.width / 2,
2215 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2216 b.width, b.height,
2217 libyuv::kFilterNone);
2218 if (ret < 0)
2219 ALOGE("Sclale YV12 frame down failed!\n");
2220 delete [] tmp_buffer;
2221 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2222 int width = vinfo->preview.format.fmt.pix.width;
2223 int height = vinfo->preview.format.fmt.pix.height;
2224 uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2];
2225
2226 if ( tmp_buffer == NULL) {
2227 ALOGE("new buffer failed!\n");
2228 return;
2229 }
2230
2231 if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2,
2232 tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height,
2233 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2234 DBG_LOGA("Decode MJPEG frame failed\n");
2235 }
2236
2237 int ret = libyuv::I420Scale(tmp_buffer, width,
2238 tmp_buffer + width * height, width / 2,
2239 tmp_buffer + width * height + width * height / 4, width / 2,
2240 width, height,
2241 b.img, b.width,
2242 b.img + b.width * b.height, b.width / 2,
2243 b.img + b.width * b.height + b.width * b.height / 4, b.width / 2,
2244 b.width, b.height,
2245 libyuv::kFilterNone);
2246 if (ret < 0)
2247 ALOGE("Sclale YV12 frame down failed!\n");
2248
2249 delete [] tmp_buffer;
2250 } else {
2251 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2252 }
2253 return ;
2254 }
2255 while(1){
2256 if (mExitSensorThread) {
2257 break;
2258 }
2259 src = (uint8_t *)get_frame(vinfo);
2260
2261 if (NULL == src) {
2262 if (get_device_status(vinfo)) {
2263 break;
2264 }
2265 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2266 usleep(5000);
2267 continue;
2268 }
2269 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2270 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2271 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2272 putback_frame(vinfo);
2273 continue;
2274 }
2275 }
2276 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) {
2277 if (vinfo->preview.buf.length == b.width * b.height * 3/2) {
2278 memcpy(b.img, src, vinfo->preview.buf.length);
2279 } else {
2280 yv12_memcpy_align32 (b.img, src, b.width, b.height);
2281 }
2282 mKernelBuffer = b.img;
2283 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2284 int width = vinfo->preview.format.fmt.pix.width;
2285 int height = vinfo->preview.format.fmt.pix.height;
2286 YUYVToYV12(src, b.img, width, height);
2287 mKernelBuffer = b.img;
2288 } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) {
2289 int width = vinfo->preview.format.fmt.pix.width;
2290 int height = vinfo->preview.format.fmt.pix.height;
2291 if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2,
2292 b.img + width * height, (width + 1) / 2, 0, 0, width, height,
2293 width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) {
2294 putback_frame(vinfo);
2295 DBG_LOGA("Decode MJPEG frame failed\n");
2296 continue;
2297 }
2298 mKernelBuffer = b.img;
2299 } else {
2300 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2301 }
2302
2303 break;
2304 }
2305#endif
2306 //mKernelBuffer = src;
2307 ALOGVV("YV12 sensor image captured");
2308}
2309
2310void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) {
2311#if 0
2312 float totalGain = gain/100.0 * kBaseGainFactor;
2313 // Using fixed-point math with 6 bits of fractional precision.
2314 // In fixed-point math, calculate total scaling from electrons to 8bpp
2315 const int scale64x = 64 * totalGain * 255 / kMaxRawValue;
2316 // In fixed-point math, saturation point of sensor after gain
2317 const int saturationPoint = 64 * 255;
2318 // Fixed-point coefficients for RGB-YUV transform
2319 // Based on JFIF RGB->YUV transform.
2320 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
2321 const int rgbToY[] = {19, 37, 7};
2322 const int rgbToCb[] = {-10,-21, 32, 524288};
2323 const int rgbToCr[] = {32,-26, -5, 524288};
2324 // Scale back to 8bpp non-fixed-point
2325 const int scaleOut = 64;
2326 const int scaleOutSq = scaleOut * scaleOut; // after multiplies
2327
2328 uint32_t inc = kResolution[0] / stride;
2329 uint32_t outH = kResolution[1] / inc;
2330 for (unsigned int y = 0, outY = 0;
2331 y < kResolution[1]; y+=inc, outY++) {
2332 uint8_t *pxY = img + outY * stride;
2333 uint8_t *pxVU = img + (outH + outY / 2) * stride;
2334 mScene.setReadoutPixel(0,y);
2335 for (unsigned int outX = 0; outX < stride; outX++) {
2336 int32_t rCount, gCount, bCount;
2337 // TODO: Perfect demosaicing is a cheat
2338 const uint32_t *pixel = mScene.getPixelElectrons();
2339 rCount = pixel[Scene::R] * scale64x;
2340 rCount = rCount < saturationPoint ? rCount : saturationPoint;
2341 gCount = pixel[Scene::Gr] * scale64x;
2342 gCount = gCount < saturationPoint ? gCount : saturationPoint;
2343 bCount = pixel[Scene::B] * scale64x;
2344 bCount = bCount < saturationPoint ? bCount : saturationPoint;
2345
2346 *pxY++ = (rgbToY[0] * rCount +
2347 rgbToY[1] * gCount +
2348 rgbToY[2] * bCount) / scaleOutSq;
2349 if (outY % 2 == 0 && outX % 2 == 0) {
2350 *pxVU++ = (rgbToCr[0] * rCount +
2351 rgbToCr[1] * gCount +
2352 rgbToCr[2] * bCount +
2353 rgbToCr[3]) / scaleOutSq;
2354 *pxVU++ = (rgbToCb[0] * rCount +
2355 rgbToCb[1] * gCount +
2356 rgbToCb[2] * bCount +
2357 rgbToCb[3]) / scaleOutSq;
2358 }
2359 for (unsigned int j = 1; j < inc; j++)
2360 mScene.getPixelElectrons();
2361 }
2362 }
2363#else
2364 uint8_t *src;
2365 if (mKernelBuffer) {
2366 src = mKernelBuffer;
2367 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2368 //TODO YUYV scale
2369 //memcpy(img, src, vinfo->preview.buf.length);
2370
2371 } else
2372 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2373
2374 return ;
2375 }
2376
2377 while(1) {
2378 if (mExitSensorThread) {
2379 break;
2380 }
2381 src = (uint8_t *)get_frame(vinfo);
2382 if (NULL == src) {
2383 if (get_device_status(vinfo)) {
2384 break;
2385 }
2386 CAMHAL_LOGDA("get frame NULL, sleep 5ms");
2387 usleep(5000);
2388 continue;
2389 }
2390 if (vinfo->preview.format.fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG) {
2391 if (vinfo->preview.buf.length != vinfo->preview.buf.bytesused) {
2392 CAMHAL_LOGDB("length=%d, bytesused=%d \n", vinfo->preview.buf.length, vinfo->preview.buf.bytesused);
2393 putback_frame(vinfo);
2394 continue;
2395 }
2396 }
2397 if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) {
2398 memcpy(img, src, vinfo->preview.buf.length);
2399 mKernelBuffer = src;
2400 } else {
2401 ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat);
2402 }
2403
2404 break;
2405 }
2406#endif
2407 //mKernelBuffer = src;
2408 ALOGVV("YUYV sensor image captured");
2409}
2410
2411void Sensor::dump(int fd) {
2412 String8 result;
2413 result = String8::format("%s, sensor preview information: \n", __FILE__);
2414 result.appendFormat("camera preview fps: %.2f\n", mCurFps);
2415 result.appendFormat("camera preview width: %d , height =%d\n",
2416 vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height);
2417
2418 result.appendFormat("camera preview format: %.4s\n\n",
2419 (char *) &vinfo->preview.format.fmt.pix.pixelformat);
2420
2421 write(fd, result.string(), result.size());
2422}
2423
2424} // namespace android
2425
2426