blob: 68a76bebafb42e5f6531556f0f7999159e9eafd7
1 | /* |
2 | * Copyright (C) 2012 The Android Open Source Project |
3 | * |
4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
5 | * you may not use this file except in compliance with the License. |
6 | * You may obtain a copy of the License at |
7 | * |
8 | * http://www.apache.org/licenses/LICENSE-2.0 |
9 | * |
10 | * Unless required by applicable law or agreed to in writing, software |
11 | * distributed under the License is distributed on an "AS IS" BASIS, |
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13 | * See the License for the specific language governing permissions and |
14 | * limitations under the License. |
15 | */ |
16 | |
17 | //#define LOG_NDEBUG 0 |
18 | //#define LOG_NNDEBUG 0 |
19 | #define LOG_TAG "EmulatedCamera3_Sensor" |
20 | |
21 | #ifdef LOG_NNDEBUG |
22 | #define ALOGVV(...) ALOGV(__VA_ARGS__) |
23 | #else |
24 | #define ALOGVV(...) ((void)0) |
25 | #endif |
26 | |
27 | #include <utils/Log.h> |
28 | #include <cutils/properties.h> |
29 | |
30 | #include "../EmulatedFakeCamera2.h" |
31 | #include "Sensor.h" |
32 | #include <cmath> |
33 | #include <cstdlib> |
34 | #include <hardware/camera3.h> |
35 | #include "system/camera_metadata.h" |
36 | #include "libyuv.h" |
37 | #include "NV12_resize.h" |
38 | #include "libyuv/scale.h" |
39 | #include "ge2d_stream.h" |
40 | #include "util.h" |
41 | #include <sys/time.h> |
42 | |
43 | |
44 | #define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0]))) |
45 | |
46 | namespace android { |
47 | |
48 | const unsigned int Sensor::kResolution[2] = {1600, 1200}; |
49 | |
50 | const nsecs_t Sensor::kExposureTimeRange[2] = |
51 | {1000L, 30000000000L} ; // 1 us - 30 sec |
52 | const nsecs_t Sensor::kFrameDurationRange[2] = |
53 | {33331760L, 30000000000L}; // ~1/30 s - 30 sec |
54 | const nsecs_t Sensor::kMinVerticalBlank = 10000L; |
55 | |
56 | const uint8_t Sensor::kColorFilterArrangement = |
57 | ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; |
58 | |
59 | // Output image data characteristics |
60 | const uint32_t Sensor::kMaxRawValue = 4000; |
61 | const uint32_t Sensor::kBlackLevel = 1000; |
62 | |
63 | // Sensor sensitivity |
64 | const float Sensor::kSaturationVoltage = 0.520f; |
65 | const uint32_t Sensor::kSaturationElectrons = 2000; |
66 | const float Sensor::kVoltsPerLuxSecond = 0.100f; |
67 | |
68 | const float Sensor::kElectronsPerLuxSecond = |
69 | Sensor::kSaturationElectrons / Sensor::kSaturationVoltage |
70 | * Sensor::kVoltsPerLuxSecond; |
71 | |
72 | const float Sensor::kBaseGainFactor = (float)Sensor::kMaxRawValue / |
73 | Sensor::kSaturationElectrons; |
74 | |
75 | const float Sensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons |
76 | const float Sensor::kReadNoiseStddevAfterGain = 2.100; // in digital counts |
77 | const float Sensor::kReadNoiseVarBeforeGain = |
78 | Sensor::kReadNoiseStddevBeforeGain * |
79 | Sensor::kReadNoiseStddevBeforeGain; |
80 | const float Sensor::kReadNoiseVarAfterGain = |
81 | Sensor::kReadNoiseStddevAfterGain * |
82 | Sensor::kReadNoiseStddevAfterGain; |
83 | |
84 | // While each row has to read out, reset, and then expose, the (reset + |
85 | // expose) sequence can be overlapped by other row readouts, so the final |
86 | // minimum frame duration is purely a function of row readout time, at least |
87 | // if there's a reasonable number of rows. |
88 | const nsecs_t Sensor::kRowReadoutTime = |
89 | Sensor::kFrameDurationRange[0] / Sensor::kResolution[1]; |
90 | |
91 | const int32_t Sensor::kSensitivityRange[2] = {100, 1600}; |
92 | const uint32_t Sensor::kDefaultSensitivity = 100; |
93 | |
94 | /** A few utility functions for math, normal distributions */ |
95 | |
96 | // Take advantage of IEEE floating-point format to calculate an approximate |
97 | // square root. Accurate to within +-3.6% |
98 | float sqrtf_approx(float r) { |
99 | // Modifier is based on IEEE floating-point representation; the |
100 | // manipulations boil down to finding approximate log2, dividing by two, and |
101 | // then inverting the log2. A bias is added to make the relative error |
102 | // symmetric about the real answer. |
103 | const int32_t modifier = 0x1FBB4000; |
104 | |
105 | int32_t r_i = *(int32_t*)(&r); |
106 | r_i = (r_i >> 1) + modifier; |
107 | |
108 | return *(float*)(&r_i); |
109 | } |
110 | |
111 | void rgb24_memcpy(unsigned char *dst, unsigned char *src, int width, int height) |
112 | { |
113 | int stride = (width + 31) & ( ~31); |
114 | int w, h; |
115 | for (h=0; h<height; h++) |
116 | { |
117 | memcpy( dst, src, width*3); |
118 | dst += width*3; |
119 | src += stride*3; |
120 | } |
121 | } |
122 | |
123 | static int ALIGN(int x, int y) { |
124 | // y must be a power of 2. |
125 | return (x + y - 1) & ~(y - 1); |
126 | } |
127 | |
128 | Sensor::Sensor(): |
129 | Thread(false), |
130 | mGotVSync(false), |
131 | mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank), |
132 | mFrameDuration(kFrameDurationRange[0]), |
133 | mGainFactor(kDefaultSensitivity), |
134 | mNextBuffers(NULL), |
135 | mFrameNumber(0), |
136 | mCapturedBuffers(NULL), |
137 | mListener(NULL), |
138 | mIoctlSupport(0), |
139 | msupportrotate(0), |
140 | mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond) |
141 | { |
142 | |
143 | } |
144 | |
145 | Sensor::~Sensor() { |
146 | shutDown(); |
147 | } |
148 | |
149 | status_t Sensor::startUp(int idx) { |
150 | ALOGV("%s: E", __FUNCTION__); |
151 | DBG_LOGA("ddd"); |
152 | |
153 | int res; |
154 | mCapturedBuffers = NULL; |
155 | res = run("EmulatedFakeCamera2::Sensor", |
156 | ANDROID_PRIORITY_URGENT_DISPLAY); |
157 | |
158 | if (res != OK) { |
159 | ALOGE("Unable to start up sensor capture thread: %d", res); |
160 | } |
161 | |
162 | vinfo = (struct VideoInfo *) calloc(1, sizeof(*vinfo)); |
163 | vinfo->idx = idx; |
164 | |
165 | res = camera_open(vinfo); |
166 | if (res < 0) { |
167 | ALOGE("Unable to open sensor %d, errno=%d\n", vinfo->idx, res); |
168 | } |
169 | |
170 | mSensorType = SENSOR_MMAP; |
171 | if (strstr((const char *)vinfo->cap.driver, "uvcvideo")) { |
172 | mSensorType = SENSOR_USB; |
173 | } |
174 | |
175 | if (strstr((const char *)vinfo->cap.card, "share_fd")) { |
176 | mSensorType = SENSOR_SHARE_FD; |
177 | } |
178 | |
179 | if (strstr((const char *)vinfo->cap.card, "front")) |
180 | mSensorFace = SENSOR_FACE_FRONT; |
181 | else if (strstr((const char *)vinfo->cap.card, "back")) |
182 | mSensorFace = SENSOR_FACE_BACK; |
183 | else |
184 | mSensorFace = SENSOR_FACE_NONE; |
185 | |
186 | return res; |
187 | } |
188 | |
189 | sensor_type_e Sensor::getSensorType(void) |
190 | { |
191 | return mSensorType; |
192 | } |
193 | status_t Sensor::IoctlStateProbe(void) { |
194 | struct v4l2_queryctrl qc; |
195 | int ret = 0; |
196 | mIoctlSupport = 0; |
197 | memset(&qc, 0, sizeof(struct v4l2_queryctrl)); |
198 | qc.id = V4L2_ROTATE_ID; |
199 | ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc); |
200 | if((qc.flags == V4L2_CTRL_FLAG_DISABLED) ||( ret < 0)|| (qc.type != V4L2_CTRL_TYPE_INTEGER)){ |
201 | mIoctlSupport &= ~IOCTL_MASK_ROTATE; |
202 | }else{ |
203 | mIoctlSupport |= IOCTL_MASK_ROTATE; |
204 | } |
205 | |
206 | if(mIoctlSupport & IOCTL_MASK_ROTATE){ |
207 | msupportrotate = true; |
208 | DBG_LOGA("camera support capture rotate"); |
209 | } |
210 | return mIoctlSupport; |
211 | } |
212 | |
213 | uint32_t Sensor::getStreamUsage(int stream_type) |
214 | { |
215 | uint32_t usage = GRALLOC_USAGE_HW_CAMERA_WRITE; |
216 | |
217 | switch (stream_type) { |
218 | case CAMERA3_STREAM_OUTPUT: |
219 | usage = GRALLOC_USAGE_HW_CAMERA_WRITE; |
220 | break; |
221 | case CAMERA3_STREAM_INPUT: |
222 | usage = GRALLOC_USAGE_HW_CAMERA_READ; |
223 | break; |
224 | case CAMERA3_STREAM_BIDIRECTIONAL: |
225 | usage = GRALLOC_USAGE_HW_CAMERA_READ | |
226 | GRALLOC_USAGE_HW_CAMERA_WRITE; |
227 | break; |
228 | } |
229 | if ((mSensorType == SENSOR_MMAP) |
230 | || (mSensorType == SENSOR_USB)) { |
231 | usage = (GRALLOC_USAGE_HW_TEXTURE |
232 | | GRALLOC_USAGE_HW_RENDER |
233 | | GRALLOC_USAGE_SW_READ_MASK |
234 | | GRALLOC_USAGE_SW_WRITE_MASK |
235 | ); |
236 | } |
237 | |
238 | return usage; |
239 | } |
240 | |
241 | status_t Sensor::setOutputFormat(int width, int height, int pixelformat, bool isjpeg) |
242 | { |
243 | int res; |
244 | |
245 | mFramecount = 0; |
246 | mCurFps = 0; |
247 | gettimeofday(&mTimeStart, NULL); |
248 | |
249 | if (isjpeg) { |
250 | vinfo->picture.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
251 | vinfo->picture.format.fmt.pix.width = width; |
252 | vinfo->picture.format.fmt.pix.height = height; |
253 | vinfo->picture.format.fmt.pix.pixelformat = pixelformat; |
254 | } else { |
255 | vinfo->preview.format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
256 | vinfo->preview.format.fmt.pix.width = width; |
257 | vinfo->preview.format.fmt.pix.height = height; |
258 | vinfo->preview.format.fmt.pix.pixelformat = pixelformat; |
259 | |
260 | res = setBuffersFormat(vinfo); |
261 | if (res < 0) { |
262 | ALOGE("set buffer failed\n"); |
263 | return res; |
264 | } |
265 | } |
266 | |
267 | return OK; |
268 | |
269 | } |
270 | |
271 | status_t Sensor::streamOn() { |
272 | |
273 | return start_capturing(vinfo); |
274 | } |
275 | |
276 | bool Sensor::isStreaming() { |
277 | |
278 | return vinfo->isStreaming; |
279 | } |
280 | |
281 | bool Sensor::isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat) |
282 | { |
283 | if ((vinfo->preview.format.fmt.pix.width != width) |
284 | ||(vinfo->preview.format.fmt.pix.height != height) |
285 | //||(vinfo->format.fmt.pix.pixelformat != pixelformat) |
286 | ) { |
287 | |
288 | return true; |
289 | |
290 | } |
291 | |
292 | return false; |
293 | } |
294 | status_t Sensor::streamOff() { |
295 | if (mSensorType == SENSOR_USB) { |
296 | return releasebuf_and_stop_capturing(vinfo); |
297 | } else { |
298 | return stop_capturing(vinfo); |
299 | } |
300 | } |
301 | |
302 | int Sensor::getOutputFormat() |
303 | { |
304 | struct v4l2_fmtdesc fmt; |
305 | int ret; |
306 | memset(&fmt,0,sizeof(fmt)); |
307 | fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
308 | |
309 | fmt.index = 0; |
310 | while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){ |
311 | if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG) |
312 | return V4L2_PIX_FMT_MJPEG; |
313 | fmt.index++; |
314 | } |
315 | |
316 | fmt.index = 0; |
317 | while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){ |
318 | if (fmt.pixelformat == V4L2_PIX_FMT_NV21) |
319 | return V4L2_PIX_FMT_NV21; |
320 | fmt.index++; |
321 | } |
322 | |
323 | fmt.index = 0; |
324 | while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){ |
325 | if (fmt.pixelformat == V4L2_PIX_FMT_YUYV) |
326 | return V4L2_PIX_FMT_YUYV; |
327 | fmt.index++; |
328 | } |
329 | |
330 | ALOGE("Unable to find a supported sensor format!"); |
331 | return BAD_VALUE; |
332 | } |
333 | |
334 | /* if sensor supports MJPEG, return it first, otherwise |
335 | * trasform HAL format to v4l2 format then check whether |
336 | * it is supported. |
337 | */ |
338 | int Sensor::halFormatToSensorFormat(uint32_t pixelfmt) |
339 | { |
340 | struct v4l2_fmtdesc fmt; |
341 | int ret; |
342 | memset(&fmt,0,sizeof(fmt)); |
343 | fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; |
344 | |
345 | if (pixelfmt == HAL_PIXEL_FORMAT_YV12) { |
346 | pixelfmt = V4L2_PIX_FMT_YVU420; |
347 | } else if (pixelfmt == HAL_PIXEL_FORMAT_YCrCb_420_SP) { |
348 | pixelfmt = V4L2_PIX_FMT_NV21; |
349 | } else if (pixelfmt == HAL_PIXEL_FORMAT_YCbCr_422_I) { |
350 | pixelfmt = V4L2_PIX_FMT_YUYV; |
351 | } else { |
352 | pixelfmt = V4L2_PIX_FMT_NV21; |
353 | } |
354 | |
355 | fmt.index = 0; |
356 | while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){ |
357 | if (fmt.pixelformat == V4L2_PIX_FMT_MJPEG) |
358 | return V4L2_PIX_FMT_MJPEG; |
359 | fmt.index++; |
360 | } |
361 | |
362 | fmt.index = 0; |
363 | while ((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FMT, &fmt)) == 0){ |
364 | if (fmt.pixelformat == pixelfmt) |
365 | return pixelfmt; |
366 | fmt.index++; |
367 | } |
368 | |
369 | ALOGE("Unable to find a supported sensor format!"); |
370 | return BAD_VALUE; |
371 | } |
372 | |
373 | void Sensor::setPictureRotate(int rotate) |
374 | { |
375 | mRotateValue = rotate; |
376 | } |
377 | int Sensor::getPictureRotate() |
378 | { |
379 | return mRotateValue; |
380 | } |
381 | status_t Sensor::shutDown() { |
382 | ALOGV("%s: E", __FUNCTION__); |
383 | |
384 | int res; |
385 | res = requestExitAndWait(); |
386 | if (res != OK) { |
387 | ALOGE("Unable to shut down sensor capture thread: %d", res); |
388 | } |
389 | |
390 | if (vinfo != NULL) { |
391 | if (mSensorType == SENSOR_USB) { |
392 | releasebuf_and_stop_capturing(vinfo); |
393 | } else { |
394 | stop_capturing(vinfo); |
395 | } |
396 | } |
397 | |
398 | camera_close(vinfo); |
399 | |
400 | if (vinfo){ |
401 | free(vinfo); |
402 | vinfo = NULL; |
403 | } |
404 | ALOGD("%s: Exit", __FUNCTION__); |
405 | return res; |
406 | } |
407 | |
408 | Scene &Sensor::getScene() { |
409 | return mScene; |
410 | } |
411 | |
412 | int Sensor::getZoom(int *zoomMin, int *zoomMax, int *zoomStep) |
413 | { |
414 | int ret = 0; |
415 | struct v4l2_queryctrl qc; |
416 | |
417 | memset(&qc, 0, sizeof(qc)); |
418 | qc.id = V4L2_CID_ZOOM_ABSOLUTE; |
419 | ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc); |
420 | |
421 | if ((qc.flags == V4L2_CTRL_FLAG_DISABLED) || ( ret < 0) |
422 | || (qc.type != V4L2_CTRL_TYPE_INTEGER)) { |
423 | ret = -1; |
424 | *zoomMin = 0; |
425 | *zoomMax = 0; |
426 | *zoomStep = 1; |
427 | CAMHAL_LOGDB("%s: Can't get zoom level!\n", __FUNCTION__); |
428 | } else { |
429 | *zoomMin = qc.minimum; |
430 | *zoomMax = qc.maximum; |
431 | *zoomStep = qc.step; |
432 | DBG_LOGB("zoomMin:%dzoomMax:%dzoomStep:%d\n", *zoomMin, *zoomMax, *zoomStep); |
433 | } |
434 | |
435 | return ret ; |
436 | } |
437 | |
438 | int Sensor::setZoom(int zoomValue) |
439 | { |
440 | int ret = 0; |
441 | struct v4l2_control ctl; |
442 | |
443 | memset( &ctl, 0, sizeof(ctl)); |
444 | ctl.value = zoomValue; |
445 | ctl.id = V4L2_CID_ZOOM_ABSOLUTE; |
446 | ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl); |
447 | if (ret < 0) { |
448 | ALOGE("%s: Set zoom level failed!\n", __FUNCTION__); |
449 | } |
450 | return ret ; |
451 | } |
452 | |
453 | status_t Sensor::setEffect(uint8_t effect) |
454 | { |
455 | int ret = 0; |
456 | struct v4l2_control ctl; |
457 | ctl.id = V4L2_CID_COLORFX; |
458 | |
459 | switch (effect) { |
460 | case ANDROID_CONTROL_EFFECT_MODE_OFF: |
461 | ctl.value= CAM_EFFECT_ENC_NORMAL; |
462 | break; |
463 | case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE: |
464 | ctl.value= CAM_EFFECT_ENC_COLORINV; |
465 | break; |
466 | case ANDROID_CONTROL_EFFECT_MODE_SEPIA: |
467 | ctl.value= CAM_EFFECT_ENC_SEPIA; |
468 | break; |
469 | default: |
470 | ALOGE("%s: Doesn't support effect mode %d", |
471 | __FUNCTION__, effect); |
472 | return BAD_VALUE; |
473 | } |
474 | |
475 | DBG_LOGB("set effect mode:%d", effect); |
476 | ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl); |
477 | if (ret < 0) { |
478 | CAMHAL_LOGDB("Set effect fail: %s. ret=%d", strerror(errno),ret); |
479 | } |
480 | return ret ; |
481 | } |
482 | |
483 | #define MAX_LEVEL_FOR_EXPOSURE 16 |
484 | #define MIN_LEVEL_FOR_EXPOSURE 3 |
485 | |
486 | int Sensor::getExposure(int *maxExp, int *minExp, int *def, camera_metadata_rational *step) |
487 | { |
488 | struct v4l2_queryctrl qc; |
489 | int ret=0; |
490 | int level = 0; |
491 | int middle = 0; |
492 | |
493 | memset( &qc, 0, sizeof(qc)); |
494 | |
495 | DBG_LOGA("getExposure\n"); |
496 | qc.id = V4L2_CID_EXPOSURE; |
497 | ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc); |
498 | if(ret < 0) { |
499 | CAMHAL_LOGDB("QUERYCTRL failed, errno=%d\n", errno); |
500 | *minExp = -4; |
501 | *maxExp = 4; |
502 | *def = 0; |
503 | step->numerator = 1; |
504 | step->denominator = 1; |
505 | return ret; |
506 | } |
507 | |
508 | if(0 < qc.step) |
509 | level = ( qc.maximum - qc.minimum + 1 )/qc.step; |
510 | |
511 | if((level > MAX_LEVEL_FOR_EXPOSURE) |
512 | || (level < MIN_LEVEL_FOR_EXPOSURE)){ |
513 | *minExp = -4; |
514 | *maxExp = 4; |
515 | *def = 0; |
516 | step->numerator = 1; |
517 | step->denominator = 1; |
518 | DBG_LOGB("not in[min,max], min=%d, max=%d, def=%d\n", |
519 | *minExp, *maxExp, *def); |
520 | return true; |
521 | } |
522 | |
523 | middle = (qc.minimum+qc.maximum)/2; |
524 | *minExp = qc.minimum - middle; |
525 | *maxExp = qc.maximum - middle; |
526 | *def = qc.default_value - middle; |
527 | step->numerator = 1; |
528 | step->denominator = 2;//qc.step; |
529 | DBG_LOGB("min=%d, max=%d, step=%d\n", qc.minimum, qc.maximum, qc.step); |
530 | return ret; |
531 | } |
532 | |
533 | status_t Sensor::setExposure(int expCmp) |
534 | { |
535 | int ret = 0; |
536 | struct v4l2_control ctl; |
537 | struct v4l2_queryctrl qc; |
538 | |
539 | if(mEV == expCmp){ |
540 | return 0; |
541 | }else{ |
542 | mEV = expCmp; |
543 | } |
544 | memset(&ctl, 0, sizeof(ctl)); |
545 | memset(&qc, 0, sizeof(qc)); |
546 | |
547 | qc.id = V4L2_CID_EXPOSURE; |
548 | |
549 | ret = ioctl(vinfo->fd, VIDIOC_QUERYCTRL, &qc); |
550 | if (ret < 0) { |
551 | CAMHAL_LOGDB("AMLOGIC CAMERA get Exposure fail: %s. ret=%d", strerror(errno),ret); |
552 | } |
553 | |
554 | ctl.id = V4L2_CID_EXPOSURE; |
555 | ctl.value = expCmp + (qc.maximum - qc.minimum) / 2; |
556 | |
557 | ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl); |
558 | if (ret < 0) { |
559 | CAMHAL_LOGDB("AMLOGIC CAMERA Set Exposure fail: %s. ret=%d", strerror(errno),ret); |
560 | } |
561 | DBG_LOGB("setExposure value%d mEVmin%d mEVmax%d\n",ctl.value, qc.minimum, qc.maximum); |
562 | return ret ; |
563 | } |
564 | |
565 | int Sensor::getAntiBanding(uint8_t *antiBanding, uint8_t maxCont) |
566 | { |
567 | struct v4l2_queryctrl qc; |
568 | struct v4l2_querymenu qm; |
569 | int ret; |
570 | int mode_count = -1; |
571 | |
572 | memset(&qc, 0, sizeof(struct v4l2_queryctrl)); |
573 | qc.id = V4L2_CID_POWER_LINE_FREQUENCY; |
574 | ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc); |
575 | if ( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){ |
576 | DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd); |
577 | } else if ( qc.type != V4L2_CTRL_TYPE_INTEGER) { |
578 | DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd); |
579 | } else { |
580 | memset(&qm, 0, sizeof(qm)); |
581 | |
582 | int index = 0; |
583 | mode_count = 1; |
584 | antiBanding[0] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF; |
585 | |
586 | for (index = qc.minimum; index <= qc.maximum; index+= qc.step) { |
587 | if (mode_count >= maxCont) |
588 | break; |
589 | |
590 | memset(&qm, 0, sizeof(struct v4l2_querymenu)); |
591 | qm.id = V4L2_CID_POWER_LINE_FREQUENCY; |
592 | qm.index = index; |
593 | if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){ |
594 | continue; |
595 | } else { |
596 | if (strcmp((char*)qm.name,"50hz") == 0) { |
597 | antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ; |
598 | mode_count++; |
599 | } else if (strcmp((char*)qm.name,"60hz") == 0) { |
600 | antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ; |
601 | mode_count++; |
602 | } else if (strcmp((char*)qm.name,"auto") == 0) { |
603 | antiBanding[mode_count] = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; |
604 | mode_count++; |
605 | } |
606 | |
607 | } |
608 | } |
609 | } |
610 | |
611 | return mode_count; |
612 | } |
613 | |
614 | status_t Sensor::setAntiBanding(uint8_t antiBanding) |
615 | { |
616 | int ret = 0; |
617 | struct v4l2_control ctl; |
618 | ctl.id = V4L2_CID_POWER_LINE_FREQUENCY; |
619 | |
620 | switch (antiBanding) { |
621 | case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF: |
622 | ctl.value= CAM_ANTIBANDING_OFF; |
623 | break; |
624 | case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ: |
625 | ctl.value= CAM_ANTIBANDING_50HZ; |
626 | break; |
627 | case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ: |
628 | ctl.value= CAM_ANTIBANDING_60HZ; |
629 | break; |
630 | case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO: |
631 | ctl.value= CAM_ANTIBANDING_AUTO; |
632 | break; |
633 | default: |
634 | ALOGE("%s: Doesn't support ANTIBANDING mode %d", |
635 | __FUNCTION__, antiBanding); |
636 | return BAD_VALUE; |
637 | } |
638 | |
639 | DBG_LOGB("anti banding mode:%d", antiBanding); |
640 | ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl); |
641 | if ( ret < 0) { |
642 | CAMHAL_LOGDA("failed to set anti banding mode!\n"); |
643 | return BAD_VALUE; |
644 | } |
645 | return ret; |
646 | } |
647 | |
648 | status_t Sensor::setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1) |
649 | { |
650 | int ret = 0; |
651 | struct v4l2_control ctl; |
652 | ctl.id = V4L2_CID_FOCUS_ABSOLUTE; |
653 | ctl.value = ((x0 + x1) / 2 + 1000) << 16; |
654 | ctl.value |= ((y0 + y1) / 2 + 1000) & 0xffff; |
655 | |
656 | ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl); |
657 | return ret; |
658 | } |
659 | |
660 | |
661 | int Sensor::getAutoFocus(uint8_t *afMode, uint8_t maxCount) |
662 | { |
663 | struct v4l2_queryctrl qc; |
664 | struct v4l2_querymenu qm; |
665 | int ret; |
666 | int mode_count = -1; |
667 | |
668 | memset(&qc, 0, sizeof(struct v4l2_queryctrl)); |
669 | qc.id = V4L2_CID_FOCUS_AUTO; |
670 | ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc); |
671 | if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){ |
672 | DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd); |
673 | }else if( qc.type != V4L2_CTRL_TYPE_MENU) { |
674 | DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd); |
675 | }else{ |
676 | memset(&qm, 0, sizeof(qm)); |
677 | |
678 | int index = 0; |
679 | mode_count = 1; |
680 | afMode[0] = ANDROID_CONTROL_AF_MODE_OFF; |
681 | |
682 | for (index = qc.minimum; index <= qc.maximum; index+= qc.step) { |
683 | if (mode_count >= maxCount) |
684 | break; |
685 | |
686 | memset(&qm, 0, sizeof(struct v4l2_querymenu)); |
687 | qm.id = V4L2_CID_FOCUS_AUTO; |
688 | qm.index = index; |
689 | if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){ |
690 | continue; |
691 | } else { |
692 | if (strcmp((char*)qm.name,"auto") == 0) { |
693 | afMode[mode_count] = ANDROID_CONTROL_AF_MODE_AUTO; |
694 | mode_count++; |
695 | } else if (strcmp((char*)qm.name,"continuous-video") == 0) { |
696 | afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; |
697 | mode_count++; |
698 | } else if (strcmp((char*)qm.name,"continuous-picture") == 0) { |
699 | afMode[mode_count] = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; |
700 | mode_count++; |
701 | } |
702 | |
703 | } |
704 | } |
705 | } |
706 | |
707 | return mode_count; |
708 | } |
709 | |
710 | status_t Sensor::setAutoFocuas(uint8_t afMode) |
711 | { |
712 | struct v4l2_control ctl; |
713 | ctl.id = V4L2_CID_FOCUS_AUTO; |
714 | |
715 | switch (afMode) { |
716 | case ANDROID_CONTROL_AF_MODE_AUTO: |
717 | ctl.value = CAM_FOCUS_MODE_AUTO; |
718 | break; |
719 | case ANDROID_CONTROL_AF_MODE_MACRO: |
720 | ctl.value = CAM_FOCUS_MODE_MACRO; |
721 | break; |
722 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
723 | ctl.value = CAM_FOCUS_MODE_CONTI_VID; |
724 | break; |
725 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
726 | ctl.value = CAM_FOCUS_MODE_CONTI_PIC; |
727 | break; |
728 | default: |
729 | ALOGE("%s: Emulator doesn't support AF mode %d", |
730 | __FUNCTION__, afMode); |
731 | return BAD_VALUE; |
732 | } |
733 | |
734 | if (ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl) < 0) { |
735 | CAMHAL_LOGDA("failed to set camera focuas mode!\n"); |
736 | return BAD_VALUE; |
737 | } |
738 | |
739 | return OK; |
740 | } |
741 | |
742 | int Sensor::getAWB(uint8_t *awbMode, uint8_t maxCount) |
743 | { |
744 | struct v4l2_queryctrl qc; |
745 | struct v4l2_querymenu qm; |
746 | int ret; |
747 | int mode_count = -1; |
748 | |
749 | memset(&qc, 0, sizeof(struct v4l2_queryctrl)); |
750 | qc.id = V4L2_CID_DO_WHITE_BALANCE; |
751 | ret = ioctl (vinfo->fd, VIDIOC_QUERYCTRL, &qc); |
752 | if( (ret<0) || (qc.flags == V4L2_CTRL_FLAG_DISABLED)){ |
753 | DBG_LOGB("camera handle %d can't support this ctrl",vinfo->fd); |
754 | }else if( qc.type != V4L2_CTRL_TYPE_MENU) { |
755 | DBG_LOGB("this ctrl of camera handle %d can't support menu type",vinfo->fd); |
756 | }else{ |
757 | memset(&qm, 0, sizeof(qm)); |
758 | |
759 | int index = 0; |
760 | mode_count = 1; |
761 | awbMode[0] = ANDROID_CONTROL_AWB_MODE_OFF; |
762 | |
763 | for (index = qc.minimum; index <= qc.maximum; index+= qc.step) { |
764 | if (mode_count >= maxCount) |
765 | break; |
766 | |
767 | memset(&qm, 0, sizeof(struct v4l2_querymenu)); |
768 | qm.id = V4L2_CID_DO_WHITE_BALANCE; |
769 | qm.index = index; |
770 | if(ioctl (vinfo->fd, VIDIOC_QUERYMENU, &qm) < 0){ |
771 | continue; |
772 | } else { |
773 | if (strcmp((char*)qm.name,"auto") == 0) { |
774 | awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_AUTO; |
775 | mode_count++; |
776 | } else if (strcmp((char*)qm.name,"daylight") == 0) { |
777 | awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_DAYLIGHT; |
778 | mode_count++; |
779 | } else if (strcmp((char*)qm.name,"incandescent") == 0) { |
780 | awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_INCANDESCENT; |
781 | mode_count++; |
782 | } else if (strcmp((char*)qm.name,"fluorescent") == 0) { |
783 | awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_FLUORESCENT; |
784 | mode_count++; |
785 | } else if (strcmp((char*)qm.name,"warm-fluorescent") == 0) { |
786 | awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT; |
787 | mode_count++; |
788 | } else if (strcmp((char*)qm.name,"cloudy-daylight") == 0) { |
789 | awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT; |
790 | mode_count++; |
791 | } else if (strcmp((char*)qm.name,"twilight") == 0) { |
792 | awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_TWILIGHT; |
793 | mode_count++; |
794 | } else if (strcmp((char*)qm.name,"shade") == 0) { |
795 | awbMode[mode_count] = ANDROID_CONTROL_AWB_MODE_SHADE; |
796 | mode_count++; |
797 | } |
798 | |
799 | } |
800 | } |
801 | } |
802 | |
803 | return mode_count; |
804 | } |
805 | |
806 | status_t Sensor::setAWB(uint8_t awbMode) |
807 | { |
808 | int ret = 0; |
809 | struct v4l2_control ctl; |
810 | ctl.id = V4L2_CID_DO_WHITE_BALANCE; |
811 | |
812 | switch (awbMode) { |
813 | case ANDROID_CONTROL_AWB_MODE_AUTO: |
814 | ctl.value = CAM_WB_AUTO; |
815 | break; |
816 | case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: |
817 | ctl.value = CAM_WB_INCANDESCENCE; |
818 | break; |
819 | case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: |
820 | ctl.value = CAM_WB_FLUORESCENT; |
821 | break; |
822 | case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: |
823 | ctl.value = CAM_WB_DAYLIGHT; |
824 | break; |
825 | case ANDROID_CONTROL_AWB_MODE_SHADE: |
826 | ctl.value = CAM_WB_SHADE; |
827 | break; |
828 | default: |
829 | ALOGE("%s: Emulator doesn't support AWB mode %d", |
830 | __FUNCTION__, awbMode); |
831 | return BAD_VALUE; |
832 | } |
833 | ret = ioctl(vinfo->fd, VIDIOC_S_CTRL, &ctl); |
834 | return ret; |
835 | } |
836 | |
837 | void Sensor::setExposureTime(uint64_t ns) { |
838 | Mutex::Autolock lock(mControlMutex); |
839 | ALOGVV("Exposure set to %f", ns/1000000.f); |
840 | mExposureTime = ns; |
841 | } |
842 | |
843 | void Sensor::setFrameDuration(uint64_t ns) { |
844 | Mutex::Autolock lock(mControlMutex); |
845 | ALOGVV("Frame duration set to %f", ns/1000000.f); |
846 | mFrameDuration = ns; |
847 | } |
848 | |
849 | void Sensor::setSensitivity(uint32_t gain) { |
850 | Mutex::Autolock lock(mControlMutex); |
851 | ALOGVV("Gain set to %d", gain); |
852 | mGainFactor = gain; |
853 | } |
854 | |
855 | void Sensor::setDestinationBuffers(Buffers *buffers) { |
856 | Mutex::Autolock lock(mControlMutex); |
857 | mNextBuffers = buffers; |
858 | } |
859 | |
860 | void Sensor::setFrameNumber(uint32_t frameNumber) { |
861 | Mutex::Autolock lock(mControlMutex); |
862 | mFrameNumber = frameNumber; |
863 | } |
864 | |
865 | bool Sensor::waitForVSync(nsecs_t reltime) { |
866 | int res; |
867 | Mutex::Autolock lock(mControlMutex); |
868 | |
869 | mGotVSync = false; |
870 | res = mVSync.waitRelative(mControlMutex, reltime); |
871 | if (res != OK && res != TIMED_OUT) { |
872 | ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res); |
873 | return false; |
874 | } |
875 | return mGotVSync; |
876 | } |
877 | |
878 | bool Sensor::waitForNewFrame(nsecs_t reltime, |
879 | nsecs_t *captureTime) { |
880 | Mutex::Autolock lock(mReadoutMutex); |
881 | uint8_t *ret; |
882 | if (mCapturedBuffers == NULL) { |
883 | int res; |
884 | res = mReadoutAvailable.waitRelative(mReadoutMutex, reltime); |
885 | if (res == TIMED_OUT) { |
886 | return false; |
887 | } else if (res != OK || mCapturedBuffers == NULL) { |
888 | ALOGE("Error waiting for sensor readout signal: %d", res); |
889 | return false; |
890 | } |
891 | } else { |
892 | mReadoutComplete.signal(); |
893 | } |
894 | |
895 | *captureTime = mCaptureTime; |
896 | mCapturedBuffers = NULL; |
897 | return true; |
898 | } |
899 | |
900 | Sensor::SensorListener::~SensorListener() { |
901 | } |
902 | |
903 | void Sensor::setSensorListener(SensorListener *listener) { |
904 | Mutex::Autolock lock(mControlMutex); |
905 | mListener = listener; |
906 | } |
907 | |
908 | status_t Sensor::readyToRun() { |
909 | int res; |
910 | ALOGV("Starting up sensor thread"); |
911 | mStartupTime = systemTime(); |
912 | mNextCaptureTime = 0; |
913 | mNextCapturedBuffers = NULL; |
914 | |
915 | DBG_LOGA(""); |
916 | |
917 | return OK; |
918 | } |
919 | |
920 | bool Sensor::threadLoop() { |
921 | /** |
922 | * Sensor capture operation main loop. |
923 | * |
924 | * Stages are out-of-order relative to a single frame's processing, but |
925 | * in-order in time. |
926 | */ |
927 | |
928 | /** |
929 | * Stage 1: Read in latest control parameters |
930 | */ |
931 | uint64_t exposureDuration; |
932 | uint64_t frameDuration; |
933 | uint32_t gain; |
934 | Buffers *nextBuffers; |
935 | uint32_t frameNumber; |
936 | SensorListener *listener = NULL; |
937 | { |
938 | Mutex::Autolock lock(mControlMutex); |
939 | exposureDuration = mExposureTime; |
940 | frameDuration = mFrameDuration; |
941 | gain = mGainFactor; |
942 | nextBuffers = mNextBuffers; |
943 | frameNumber = mFrameNumber; |
944 | listener = mListener; |
945 | // Don't reuse a buffer set |
946 | mNextBuffers = NULL; |
947 | |
948 | // Signal VSync for start of readout |
949 | ALOGVV("Sensor VSync"); |
950 | mGotVSync = true; |
951 | mVSync.signal(); |
952 | } |
953 | |
954 | /** |
955 | * Stage 3: Read out latest captured image |
956 | */ |
957 | |
958 | Buffers *capturedBuffers = NULL; |
959 | nsecs_t captureTime = 0; |
960 | |
961 | nsecs_t startRealTime = systemTime(); |
962 | // Stagefright cares about system time for timestamps, so base simulated |
963 | // time on that. |
964 | nsecs_t simulatedTime = startRealTime; |
965 | nsecs_t frameEndRealTime = startRealTime + frameDuration; |
966 | nsecs_t frameReadoutEndRealTime = startRealTime + |
967 | kRowReadoutTime * kResolution[1]; |
968 | |
969 | if (mNextCapturedBuffers != NULL) { |
970 | ALOGVV("Sensor starting readout"); |
971 | // Pretend we're doing readout now; will signal once enough time has elapsed |
972 | capturedBuffers = mNextCapturedBuffers; |
973 | captureTime = mNextCaptureTime; |
974 | } |
975 | simulatedTime += kRowReadoutTime + kMinVerticalBlank; |
976 | |
977 | // TODO: Move this signal to another thread to simulate readout |
978 | // time properly |
979 | if (capturedBuffers != NULL) { |
980 | ALOGVV("Sensor readout complete"); |
981 | Mutex::Autolock lock(mReadoutMutex); |
982 | if (mCapturedBuffers != NULL) { |
983 | ALOGV("Waiting for readout thread to catch up!"); |
984 | mReadoutComplete.wait(mReadoutMutex); |
985 | } |
986 | |
987 | mCapturedBuffers = capturedBuffers; |
988 | mCaptureTime = captureTime; |
989 | mReadoutAvailable.signal(); |
990 | capturedBuffers = NULL; |
991 | } |
992 | |
993 | /** |
994 | * Stage 2: Capture new image |
995 | */ |
996 | mNextCaptureTime = simulatedTime; |
997 | mNextCapturedBuffers = nextBuffers; |
998 | |
999 | if (mNextCapturedBuffers != NULL) { |
1000 | if (listener != NULL) { |
1001 | listener->onSensorEvent(frameNumber, SensorListener::EXPOSURE_START, |
1002 | mNextCaptureTime); |
1003 | } |
1004 | |
1005 | ALOGVV("Starting next capture: Exposure: %f ms, gain: %d", |
1006 | (float)exposureDuration/1e6, gain); |
1007 | mScene.setExposureDuration((float)exposureDuration/1e9); |
1008 | mScene.calculateScene(mNextCaptureTime); |
1009 | |
1010 | if ( mSensorType == SENSOR_SHARE_FD) { |
1011 | captureNewImageWithGe2d(); |
1012 | } else { |
1013 | captureNewImage(); |
1014 | } |
1015 | mFramecount ++; |
1016 | } |
1017 | if (mFramecount == 100) { |
1018 | gettimeofday(&mTimeEnd, NULL); |
1019 | int64_t interval = (mTimeEnd.tv_sec - mTimeStart.tv_sec) * 1000000L + (mTimeEnd.tv_usec - mTimeStart.tv_usec); |
1020 | mCurFps = mFramecount/(interval/1000000.0f); |
1021 | memcpy(&mTimeStart, &mTimeEnd, sizeof(mTimeEnd)); |
1022 | mFramecount = 0; |
1023 | CAMHAL_LOGIB("interval=%lld, interval=%f, fps=%f\n", interval, interval/1000000.0f, mCurFps); |
1024 | } |
1025 | ALOGVV("Sensor vertical blanking interval"); |
1026 | nsecs_t workDoneRealTime = systemTime(); |
1027 | const nsecs_t timeAccuracy = 2e6; // 2 ms of imprecision is ok |
1028 | if (workDoneRealTime < frameEndRealTime - timeAccuracy) { |
1029 | timespec t; |
1030 | t.tv_sec = (frameEndRealTime - workDoneRealTime) / 1000000000L; |
1031 | t.tv_nsec = (frameEndRealTime - workDoneRealTime) % 1000000000L; |
1032 | |
1033 | int ret; |
1034 | do { |
1035 | ret = nanosleep(&t, &t); |
1036 | } while (ret != 0); |
1037 | } |
1038 | nsecs_t endRealTime = systemTime(); |
1039 | ALOGVV("Frame cycle took %d ms, target %d ms", |
1040 | (int)((endRealTime - startRealTime)/1000000), |
1041 | (int)(frameDuration / 1000000)); |
1042 | return true; |
1043 | }; |
1044 | |
1045 | int Sensor::captureNewImageWithGe2d() { |
1046 | |
1047 | uint32_t gain = mGainFactor; |
1048 | mKernelPhysAddr = 0; |
1049 | |
1050 | |
1051 | while ((mKernelPhysAddr = get_frame_phys(vinfo)) == 0) { |
1052 | usleep(5000); |
1053 | } |
1054 | |
1055 | // Might be adding more buffers, so size isn't constant |
1056 | for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) { |
1057 | const StreamBuffer &b = (*mNextCapturedBuffers)[i]; |
1058 | fillStream(vinfo, mKernelPhysAddr, b); |
1059 | } |
1060 | putback_frame(vinfo); |
1061 | mKernelPhysAddr = 0; |
1062 | |
1063 | return 0; |
1064 | |
1065 | } |
1066 | |
1067 | int Sensor::captureNewImage() { |
1068 | bool isjpeg = false; |
1069 | uint32_t gain = mGainFactor; |
1070 | mKernelBuffer = NULL; |
1071 | |
1072 | // Might be adding more buffers, so size isn't constant |
1073 | DBG_LOGB("size=%d\n", mNextCapturedBuffers->size()); |
1074 | for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) { |
1075 | const StreamBuffer &b = (*mNextCapturedBuffers)[i]; |
1076 | ALOGVV("Sensor capturing buffer %d: stream %d," |
1077 | " %d x %d, format %x, stride %d, buf %p, img %p", |
1078 | i, b.streamId, b.width, b.height, b.format, b.stride, |
1079 | b.buffer, b.img); |
1080 | switch (b.format) { |
1081 | case HAL_PIXEL_FORMAT_RAW_SENSOR: |
1082 | captureRaw(b.img, gain, b.stride); |
1083 | break; |
1084 | case HAL_PIXEL_FORMAT_RGB_888: |
1085 | captureRGB(b.img, gain, b.stride); |
1086 | break; |
1087 | case HAL_PIXEL_FORMAT_RGBA_8888: |
1088 | captureRGBA(b.img, gain, b.stride); |
1089 | break; |
1090 | case HAL_PIXEL_FORMAT_BLOB: |
1091 | // Add auxillary buffer of the right size |
1092 | // Assumes only one BLOB (JPEG) buffer in |
1093 | // mNextCapturedBuffers |
1094 | isjpeg = true; |
1095 | StreamBuffer bAux; |
1096 | int orientation; |
1097 | orientation = getPictureRotate(); |
1098 | ALOGD("bAux orientation=%d",orientation); |
1099 | if (!msupportrotate) { |
1100 | bAux.streamId = 0; |
1101 | bAux.width = b.width; |
1102 | bAux.height = b.height; |
1103 | bAux.format = HAL_PIXEL_FORMAT_RGB_888; |
1104 | bAux.stride = b.width; |
1105 | bAux.buffer = NULL; |
1106 | } else { |
1107 | if ((orientation == 90) || (orientation == 270)) { |
1108 | bAux.streamId = 0; |
1109 | bAux.width = b.height; |
1110 | bAux.height = b.width; |
1111 | bAux.format = HAL_PIXEL_FORMAT_RGB_888; |
1112 | bAux.stride = b.height; |
1113 | bAux.buffer = NULL; |
1114 | } else { |
1115 | bAux.streamId = 0; |
1116 | bAux.width = b.width; |
1117 | bAux.height = b.height; |
1118 | bAux.format = HAL_PIXEL_FORMAT_RGB_888; |
1119 | bAux.stride = b.width; |
1120 | bAux.buffer = NULL; |
1121 | } |
1122 | } |
1123 | // TODO: Reuse these |
1124 | bAux.img = new uint8_t[b.width * b.height * 3]; |
1125 | mNextCapturedBuffers->push_back(bAux); |
1126 | break; |
1127 | case HAL_PIXEL_FORMAT_YCrCb_420_SP: |
1128 | case HAL_PIXEL_FORMAT_YCbCr_420_888: |
1129 | captureNV21(b, gain); |
1130 | break; |
1131 | case HAL_PIXEL_FORMAT_YV12: |
1132 | captureYV12(b, gain); |
1133 | break; |
1134 | case HAL_PIXEL_FORMAT_YCbCr_422_I: |
1135 | captureYUYV(b.img, gain, b.stride); |
1136 | break; |
1137 | default: |
1138 | ALOGE("%s: Unknown format %x, no output", __FUNCTION__, |
1139 | b.format); |
1140 | break; |
1141 | } |
1142 | } |
1143 | if (!isjpeg) { //jpeg buffer that is rgb888 has been save in the different buffer struct; |
1144 | // whose buffer putback separately. |
1145 | putback_frame(vinfo); |
1146 | } |
1147 | mKernelBuffer = NULL; |
1148 | |
1149 | return 0; |
1150 | } |
1151 | |
1152 | int Sensor::getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size) { |
1153 | int res; |
1154 | int i, j, k, START; |
1155 | int count = 0; |
1156 | int pixelfmt; |
1157 | struct v4l2_frmsizeenum frmsize; |
1158 | char property[PROPERTY_VALUE_MAX]; |
1159 | unsigned int support_w,support_h; |
1160 | |
1161 | support_w = 10000; |
1162 | support_h = 10000; |
1163 | memset(property, 0, sizeof(property)); |
1164 | if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){ |
1165 | CAMHAL_LOGDB("support Max Preview Size :%s",property); |
1166 | if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){ |
1167 | support_w = 10000; |
1168 | support_h = 10000; |
1169 | } |
1170 | } |
1171 | |
1172 | memset(&frmsize,0,sizeof(frmsize)); |
1173 | frmsize.pixel_format = getOutputFormat(); |
1174 | |
1175 | START = 0; |
1176 | for (i = 0; ; i++) { |
1177 | frmsize.index = i; |
1178 | res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize); |
1179 | if (res < 0){ |
1180 | DBG_LOGB("index=%d, break\n", i); |
1181 | break; |
1182 | } |
1183 | |
1184 | if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type |
1185 | |
1186 | if (0 != (frmsize.discrete.width%16)) |
1187 | continue; |
1188 | |
1189 | if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h)) |
1190 | continue; |
1191 | |
1192 | if (count >= size) |
1193 | break; |
1194 | |
1195 | picSizes[count+0] = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; |
1196 | picSizes[count+1] = frmsize.discrete.width; |
1197 | picSizes[count+2] = frmsize.discrete.height; |
1198 | picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT; |
1199 | |
1200 | DBG_LOGB("get output width=%d, height=%d, format=%d\n", |
1201 | frmsize.discrete.width, frmsize.discrete.height, frmsize.pixel_format); |
1202 | if (0 == i) { |
1203 | count += 4; |
1204 | continue; |
1205 | } |
1206 | |
1207 | for (k = count; k > START; k -= 4) { |
1208 | if (frmsize.discrete.width * frmsize.discrete.height > |
1209 | picSizes[k - 3] * picSizes[k - 2]) { |
1210 | picSizes[k + 1] = picSizes[k - 3]; |
1211 | picSizes[k + 2] = picSizes[k - 2]; |
1212 | |
1213 | } else { |
1214 | break; |
1215 | } |
1216 | } |
1217 | picSizes[k + 1] = frmsize.discrete.width; |
1218 | picSizes[k + 2] = frmsize.discrete.height; |
1219 | |
1220 | count += 4; |
1221 | } |
1222 | } |
1223 | |
1224 | START = count; |
1225 | for (i = 0; ; i++) { |
1226 | frmsize.index = i; |
1227 | res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize); |
1228 | if (res < 0){ |
1229 | DBG_LOGB("index=%d, break\n", i); |
1230 | break; |
1231 | } |
1232 | |
1233 | if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type |
1234 | |
1235 | if (0 != (frmsize.discrete.width%16)) |
1236 | continue; |
1237 | |
1238 | if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h)) |
1239 | continue; |
1240 | |
1241 | if (count >= size) |
1242 | break; |
1243 | |
1244 | picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_420_888; |
1245 | picSizes[count+1] = frmsize.discrete.width; |
1246 | picSizes[count+2] = frmsize.discrete.height; |
1247 | picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT; |
1248 | |
1249 | DBG_LOGB("get output width=%d, height=%d, format =\ |
1250 | HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width, |
1251 | frmsize.discrete.height); |
1252 | if (0 == i) { |
1253 | count += 4; |
1254 | continue; |
1255 | } |
1256 | |
1257 | for (k = count; k > START; k -= 4) { |
1258 | if (frmsize.discrete.width * frmsize.discrete.height > |
1259 | picSizes[k - 3] * picSizes[k - 2]) { |
1260 | picSizes[k + 1] = picSizes[k - 3]; |
1261 | picSizes[k + 2] = picSizes[k - 2]; |
1262 | |
1263 | } else { |
1264 | break; |
1265 | } |
1266 | } |
1267 | picSizes[k + 1] = frmsize.discrete.width; |
1268 | picSizes[k + 2] = frmsize.discrete.height; |
1269 | |
1270 | count += 4; |
1271 | } |
1272 | } |
1273 | |
1274 | #if 0 |
1275 | if (frmsize.pixel_format == V4L2_PIX_FMT_YUYV) { |
1276 | START = count; |
1277 | for (i = 0; ; i++) { |
1278 | frmsize.index = i; |
1279 | res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize); |
1280 | if (res < 0){ |
1281 | DBG_LOGB("index=%d, break\n", i); |
1282 | break; |
1283 | } |
1284 | |
1285 | if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type |
1286 | |
1287 | if (0 != (frmsize.discrete.width%16)) |
1288 | continue; |
1289 | |
1290 | if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h)) |
1291 | continue; |
1292 | |
1293 | if (count >= size) |
1294 | break; |
1295 | |
1296 | picSizes[count+0] = HAL_PIXEL_FORMAT_YCbCr_422_I; |
1297 | picSizes[count+1] = frmsize.discrete.width; |
1298 | picSizes[count+2] = frmsize.discrete.height; |
1299 | picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT; |
1300 | |
1301 | DBG_LOGB("get output width=%d, height=%d, format =\ |
1302 | HAL_PIXEL_FORMAT_YCbCr_420_888\n", frmsize.discrete.width, |
1303 | frmsize.discrete.height); |
1304 | if (0 == i) { |
1305 | count += 4; |
1306 | continue; |
1307 | } |
1308 | |
1309 | for (k = count; k > START; k -= 4) { |
1310 | if (frmsize.discrete.width * frmsize.discrete.height > |
1311 | picSizes[k - 3] * picSizes[k - 2]) { |
1312 | picSizes[k + 1] = picSizes[k - 3]; |
1313 | picSizes[k + 2] = picSizes[k - 2]; |
1314 | |
1315 | } else { |
1316 | break; |
1317 | } |
1318 | } |
1319 | picSizes[k + 1] = frmsize.discrete.width; |
1320 | picSizes[k + 2] = frmsize.discrete.height; |
1321 | |
1322 | count += 4; |
1323 | } |
1324 | } |
1325 | } |
1326 | #endif |
1327 | |
1328 | uint32_t jpgSrcfmt[] = { |
1329 | V4L2_PIX_FMT_RGB24, |
1330 | V4L2_PIX_FMT_MJPEG, |
1331 | V4L2_PIX_FMT_YUYV, |
1332 | }; |
1333 | |
1334 | START = count; |
1335 | for (j = 0; j<(int)(sizeof(jpgSrcfmt)/sizeof(jpgSrcfmt[0])); j++) { |
1336 | memset(&frmsize,0,sizeof(frmsize)); |
1337 | frmsize.pixel_format = jpgSrcfmt[j]; |
1338 | |
1339 | for (i = 0; ; i++) { |
1340 | frmsize.index = i; |
1341 | res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize); |
1342 | if (res < 0){ |
1343 | DBG_LOGB("index=%d, break\n", i); |
1344 | break; |
1345 | } |
1346 | |
1347 | if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type |
1348 | |
1349 | if (0 != (frmsize.discrete.width%16)) |
1350 | continue; |
1351 | |
1352 | //if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h)) |
1353 | // continue; |
1354 | |
1355 | if (count >= size) |
1356 | break; |
1357 | |
1358 | picSizes[count+0] = HAL_PIXEL_FORMAT_BLOB; |
1359 | picSizes[count+1] = frmsize.discrete.width; |
1360 | picSizes[count+2] = frmsize.discrete.height; |
1361 | picSizes[count+3] = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT; |
1362 | |
1363 | if (0 == i) { |
1364 | count += 4; |
1365 | continue; |
1366 | } |
1367 | |
1368 | //TODO insert in descend order |
1369 | for (k = count; k > START; k -= 4) { |
1370 | if (frmsize.discrete.width * frmsize.discrete.height > |
1371 | picSizes[k - 3] * picSizes[k - 2]) { |
1372 | picSizes[k + 1] = picSizes[k - 3]; |
1373 | picSizes[k + 2] = picSizes[k - 2]; |
1374 | |
1375 | } else { |
1376 | break; |
1377 | } |
1378 | } |
1379 | |
1380 | picSizes[k + 1] = frmsize.discrete.width; |
1381 | picSizes[k + 2] = frmsize.discrete.height; |
1382 | |
1383 | count += 4; |
1384 | } |
1385 | } |
1386 | |
1387 | if (frmsize.index > 0) |
1388 | break; |
1389 | } |
1390 | |
1391 | if (frmsize.index == 0) |
1392 | CAMHAL_LOGDA("no support pixel fmt for jpeg"); |
1393 | |
1394 | return count; |
1395 | |
1396 | } |
1397 | |
1398 | int Sensor::getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size) |
1399 | { |
1400 | int ret=0; int framerate=0; int temp_rate=0; |
1401 | struct v4l2_frmivalenum fival; |
1402 | int i,j=0; |
1403 | int count = 0; |
1404 | int tmp_size = size; |
1405 | memset(duration, 0 ,sizeof(int64_t)*ARRAY_SIZE(duration)); |
1406 | int pixelfmt_tbl[] = { |
1407 | V4L2_PIX_FMT_MJPEG, |
1408 | V4L2_PIX_FMT_YVU420, |
1409 | V4L2_PIX_FMT_NV21, |
1410 | V4L2_PIX_FMT_RGB24, |
1411 | V4L2_PIX_FMT_YUYV, |
1412 | // V4L2_PIX_FMT_YVU420 |
1413 | }; |
1414 | |
1415 | for( i = 0; i < (int) ARRAY_SIZE(pixelfmt_tbl); i++) |
1416 | { |
1417 | for( ; size > 0; size-=4) |
1418 | { |
1419 | memset(&fival, 0, sizeof(fival)); |
1420 | |
1421 | for (fival.index = 0;;fival.index++) |
1422 | { |
1423 | fival.pixel_format = pixelfmt_tbl[i]; |
1424 | fival.width = picSizes[size-3]; |
1425 | fival.height = picSizes[size-2]; |
1426 | if((ret = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival)) == 0) { |
1427 | if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE){ |
1428 | temp_rate = fival.discrete.denominator/fival.discrete.numerator; |
1429 | if(framerate < temp_rate) |
1430 | framerate = temp_rate; |
1431 | duration[count+0] = (int64_t)(picSizes[size-4]); |
1432 | duration[count+1] = (int64_t)(picSizes[size-3]); |
1433 | duration[count+2] = (int64_t)(picSizes[size-2]); |
1434 | duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver |
1435 | j++; |
1436 | } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS){ |
1437 | temp_rate = fival.discrete.denominator/fival.discrete.numerator; |
1438 | if(framerate < temp_rate) |
1439 | framerate = temp_rate; |
1440 | duration[count+0] = (int64_t)picSizes[size-4]; |
1441 | duration[count+1] = (int64_t)picSizes[size-3]; |
1442 | duration[count+2] = (int64_t)picSizes[size-2]; |
1443 | duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver |
1444 | j++; |
1445 | } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE){ |
1446 | temp_rate = fival.discrete.denominator/fival.discrete.numerator; |
1447 | if(framerate < temp_rate) |
1448 | framerate = temp_rate; |
1449 | duration[count+0] = (int64_t)picSizes[size-4]; |
1450 | duration[count+1] = (int64_t)picSizes[size-3]; |
1451 | duration[count+2] = (int64_t)picSizes[size-2]; |
1452 | duration[count+3] = (int64_t)66666666L;//(int64_t)(framerate), here we can get frame interval from camera driver |
1453 | j++; |
1454 | } |
1455 | } else { |
1456 | if (j > 0) { |
1457 | if (count > tmp_size) |
1458 | break; |
1459 | duration[count+0] = (int64_t)(picSizes[size-4]); |
1460 | duration[count+1] = (int64_t)(picSizes[size-3]); |
1461 | duration[count+2] = (int64_t)(picSizes[size-2]); |
1462 | if (framerate == 5) { |
1463 | duration[count+3] = (int64_t)200000000L; |
1464 | } else if (framerate == 10) { |
1465 | duration[count+3] = (int64_t)100000000L; |
1466 | } else if (framerate == 15) { |
1467 | duration[count+3] = (int64_t)66666666L; |
1468 | } else if (framerate == 30) { |
1469 | duration[count+3] = (int64_t)33333333L; |
1470 | } else { |
1471 | duration[count+3] = (int64_t)66666666L; |
1472 | } |
1473 | count += 4; |
1474 | break; |
1475 | } else { |
1476 | break; |
1477 | } |
1478 | } |
1479 | } |
1480 | j=0; |
1481 | } |
1482 | size = tmp_size; |
1483 | } |
1484 | |
1485 | return count; |
1486 | |
1487 | } |
1488 | |
1489 | int64_t Sensor::getMinFrameDuration() |
1490 | { |
1491 | int64_t tmpDuration = 66666666L; // 1/15 s |
1492 | int64_t frameDuration = 66666666L; // 1/15 s |
1493 | struct v4l2_frmivalenum fival; |
1494 | int i,j; |
1495 | |
1496 | uint32_t pixelfmt_tbl[]={ |
1497 | V4L2_PIX_FMT_MJPEG, |
1498 | V4L2_PIX_FMT_YUYV, |
1499 | V4L2_PIX_FMT_NV21, |
1500 | }; |
1501 | struct v4l2_frmsize_discrete resolution_tbl[]={ |
1502 | {1920, 1080}, |
1503 | {1280, 960}, |
1504 | {640, 480}, |
1505 | {320, 240}, |
1506 | }; |
1507 | |
1508 | for (i = 0; i < (int)ARRAY_SIZE(pixelfmt_tbl); i++) { |
1509 | for (j = 0; j < (int) ARRAY_SIZE(resolution_tbl); j++) { |
1510 | memset(&fival, 0, sizeof(fival)); |
1511 | fival.index = 0; |
1512 | fival.pixel_format = pixelfmt_tbl[i]; |
1513 | fival.width = resolution_tbl[j].width; |
1514 | fival.height = resolution_tbl[j].height; |
1515 | |
1516 | while (ioctl(vinfo->fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) == 0) { |
1517 | if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) { |
1518 | tmpDuration = |
1519 | fival.discrete.numerator * 1000000000L / fival.discrete.denominator; |
1520 | |
1521 | if (frameDuration > tmpDuration) |
1522 | frameDuration = tmpDuration; |
1523 | } else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) { |
1524 | frameDuration = |
1525 | fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator; |
1526 | break; |
1527 | } else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) { |
1528 | frameDuration = |
1529 | fival.stepwise.max.numerator * 1000000000L / fival.stepwise.max.denominator; |
1530 | break; |
1531 | } |
1532 | fival.index++; |
1533 | } |
1534 | } |
1535 | |
1536 | if (fival.index > 0) { |
1537 | break; |
1538 | } |
1539 | } |
1540 | |
1541 | CAMHAL_LOGDB("enum frameDuration=%lld\n", frameDuration); |
1542 | return frameDuration; |
1543 | } |
1544 | |
1545 | int Sensor::getPictureSizes(int32_t picSizes[], int size, bool preview) { |
1546 | int res; |
1547 | int i; |
1548 | int count = 0; |
1549 | struct v4l2_frmsizeenum frmsize; |
1550 | char property[PROPERTY_VALUE_MAX]; |
1551 | unsigned int support_w,support_h; |
1552 | int preview_fmt; |
1553 | |
1554 | support_w = 10000; |
1555 | support_h = 10000; |
1556 | memset(property, 0, sizeof(property)); |
1557 | if(property_get("ro.camera.preview.MaxSize", property, NULL) > 0){ |
1558 | CAMHAL_LOGDB("support Max Preview Size :%s",property); |
1559 | if(sscanf(property,"%dx%d",&support_w,&support_h)!=2){ |
1560 | support_w = 10000; |
1561 | support_h = 10000; |
1562 | } |
1563 | } |
1564 | |
1565 | |
1566 | memset(&frmsize,0,sizeof(frmsize)); |
1567 | preview_fmt = V4L2_PIX_FMT_NV21;//getOutputFormat(); |
1568 | |
1569 | if (preview_fmt == V4L2_PIX_FMT_MJPEG) |
1570 | frmsize.pixel_format = V4L2_PIX_FMT_MJPEG; |
1571 | else if (preview_fmt == V4L2_PIX_FMT_NV21) { |
1572 | if (preview == true) |
1573 | frmsize.pixel_format = V4L2_PIX_FMT_NV21; |
1574 | else |
1575 | frmsize.pixel_format = V4L2_PIX_FMT_RGB24; |
1576 | } else if (preview_fmt == V4L2_PIX_FMT_YVU420) { |
1577 | if (preview == true) |
1578 | frmsize.pixel_format = V4L2_PIX_FMT_YVU420; |
1579 | else |
1580 | frmsize.pixel_format = V4L2_PIX_FMT_RGB24; |
1581 | } else if (preview_fmt == V4L2_PIX_FMT_YUYV) |
1582 | frmsize.pixel_format = V4L2_PIX_FMT_YUYV; |
1583 | |
1584 | for (i = 0; ; i++) { |
1585 | frmsize.index = i; |
1586 | res = ioctl(vinfo->fd, VIDIOC_ENUM_FRAMESIZES, &frmsize); |
1587 | if (res < 0){ |
1588 | DBG_LOGB("index=%d, break\n", i); |
1589 | break; |
1590 | } |
1591 | |
1592 | |
1593 | if(frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE){ //only support this type |
1594 | |
1595 | if (0 != (frmsize.discrete.width%16)) |
1596 | continue; |
1597 | |
1598 | if((frmsize.discrete.width > support_w) && (frmsize.discrete.height >support_h)) |
1599 | continue; |
1600 | |
1601 | if (count >= size) |
1602 | break; |
1603 | |
1604 | picSizes[count] = frmsize.discrete.width; |
1605 | picSizes[count+1] = frmsize.discrete.height; |
1606 | |
1607 | if (0 == i) { |
1608 | count += 2; |
1609 | continue; |
1610 | } |
1611 | |
1612 | //TODO insert in descend order |
1613 | if (picSizes[count + 0] * picSizes[count + 1] > picSizes[count - 1] * picSizes[count - 2]) { |
1614 | picSizes[count + 0] = picSizes[count - 2]; |
1615 | picSizes[count + 1] = picSizes[count - 1]; |
1616 | |
1617 | picSizes[count - 2] = frmsize.discrete.width; |
1618 | picSizes[count - 1] = frmsize.discrete.height; |
1619 | } |
1620 | |
1621 | count += 2; |
1622 | } |
1623 | } |
1624 | |
1625 | return count; |
1626 | |
1627 | } |
1628 | |
1629 | void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) { |
1630 | float totalGain = gain/100.0 * kBaseGainFactor; |
1631 | float noiseVarGain = totalGain * totalGain; |
1632 | float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain |
1633 | + kReadNoiseVarAfterGain; |
1634 | |
1635 | int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB |
1636 | mScene.setReadoutPixel(0,0); |
1637 | for (unsigned int y = 0; y < kResolution[1]; y++ ) { |
1638 | int *bayerRow = bayerSelect + (y & 0x1) * 2; |
1639 | uint16_t *px = (uint16_t*)img + y * stride; |
1640 | for (unsigned int x = 0; x < kResolution[0]; x++) { |
1641 | uint32_t electronCount; |
1642 | electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]]; |
1643 | |
1644 | // TODO: Better pixel saturation curve? |
1645 | electronCount = (electronCount < kSaturationElectrons) ? |
1646 | electronCount : kSaturationElectrons; |
1647 | |
1648 | // TODO: Better A/D saturation curve? |
1649 | uint16_t rawCount = electronCount * totalGain; |
1650 | rawCount = (rawCount < kMaxRawValue) ? rawCount : kMaxRawValue; |
1651 | |
1652 | // Calculate noise value |
1653 | // TODO: Use more-correct Gaussian instead of uniform noise |
1654 | float photonNoiseVar = electronCount * noiseVarGain; |
1655 | float noiseStddev = sqrtf_approx(readNoiseVar + photonNoiseVar); |
1656 | // Scaled to roughly match gaussian/uniform noise stddev |
1657 | float noiseSample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25; |
1658 | |
1659 | rawCount += kBlackLevel; |
1660 | rawCount += noiseStddev * noiseSample; |
1661 | |
1662 | *px++ = rawCount; |
1663 | } |
1664 | // TODO: Handle this better |
1665 | //simulatedTime += kRowReadoutTime; |
1666 | } |
1667 | ALOGVV("Raw sensor image captured"); |
1668 | } |
1669 | |
1670 | void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) { |
1671 | float totalGain = gain/100.0 * kBaseGainFactor; |
1672 | // In fixed-point math, calculate total scaling from electrons to 8bpp |
1673 | int scale64x = 64 * totalGain * 255 / kMaxRawValue; |
1674 | uint32_t inc = kResolution[0] / stride; |
1675 | |
1676 | for (unsigned int y = 0, outY = 0; y < kResolution[1]; y+=inc, outY++ ) { |
1677 | uint8_t *px = img + outY * stride * 4; |
1678 | mScene.setReadoutPixel(0, y); |
1679 | for (unsigned int x = 0; x < kResolution[0]; x+=inc) { |
1680 | uint32_t rCount, gCount, bCount; |
1681 | // TODO: Perfect demosaicing is a cheat |
1682 | const uint32_t *pixel = mScene.getPixelElectrons(); |
1683 | rCount = pixel[Scene::R] * scale64x; |
1684 | gCount = pixel[Scene::Gr] * scale64x; |
1685 | bCount = pixel[Scene::B] * scale64x; |
1686 | |
1687 | *px++ = rCount < 255*64 ? rCount / 64 : 255; |
1688 | *px++ = gCount < 255*64 ? gCount / 64 : 255; |
1689 | *px++ = bCount < 255*64 ? bCount / 64 : 255; |
1690 | *px++ = 255; |
1691 | for (unsigned int j = 1; j < inc; j++) |
1692 | mScene.getPixelElectrons(); |
1693 | } |
1694 | // TODO: Handle this better |
1695 | //simulatedTime += kRowReadoutTime; |
1696 | } |
1697 | ALOGVV("RGBA sensor image captured"); |
1698 | } |
1699 | |
1700 | void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) { |
1701 | #if 0 |
1702 | float totalGain = gain/100.0 * kBaseGainFactor; |
1703 | // In fixed-point math, calculate total scaling from electrons to 8bpp |
1704 | int scale64x = 64 * totalGain * 255 / kMaxRawValue; |
1705 | uint32_t inc = kResolution[0] / stride; |
1706 | |
1707 | for (unsigned int y = 0, outY = 0; y < kResolution[1]; y += inc, outY++ ) { |
1708 | mScene.setReadoutPixel(0, y); |
1709 | uint8_t *px = img + outY * stride * 3; |
1710 | for (unsigned int x = 0; x < kResolution[0]; x += inc) { |
1711 | uint32_t rCount, gCount, bCount; |
1712 | // TODO: Perfect demosaicing is a cheat |
1713 | const uint32_t *pixel = mScene.getPixelElectrons(); |
1714 | rCount = pixel[Scene::R] * scale64x; |
1715 | gCount = pixel[Scene::Gr] * scale64x; |
1716 | bCount = pixel[Scene::B] * scale64x; |
1717 | |
1718 | *px++ = rCount < 255*64 ? rCount / 64 : 255; |
1719 | *px++ = gCount < 255*64 ? gCount / 64 : 255; |
1720 | *px++ = bCount < 255*64 ? bCount / 64 : 255; |
1721 | for (unsigned int j = 1; j < inc; j++) |
1722 | mScene.getPixelElectrons(); |
1723 | } |
1724 | // TODO: Handle this better |
1725 | //simulatedTime += kRowReadoutTime; |
1726 | } |
1727 | #else |
1728 | uint8_t *src = NULL; |
1729 | int ret = 0, rotate = 0; |
1730 | uint32_t width = 0, height = 0; |
1731 | |
1732 | rotate = getPictureRotate(); |
1733 | width = vinfo->picture.format.fmt.pix.width; |
1734 | height = vinfo->picture.format.fmt.pix.height; |
1735 | |
1736 | if (mSensorType == SENSOR_USB) { |
1737 | releasebuf_and_stop_capturing(vinfo); |
1738 | } else { |
1739 | stop_capturing(vinfo); |
1740 | } |
1741 | |
1742 | ret = start_picture(vinfo,rotate); |
1743 | if (ret < 0) |
1744 | { |
1745 | ALOGD("start picture failed!"); |
1746 | } |
1747 | while(1) |
1748 | { |
1749 | src = (uint8_t *)get_picture(vinfo); |
1750 | if (NULL != src) { |
1751 | if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) { |
1752 | uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2]; |
1753 | if ( tmp_buffer == NULL) { |
1754 | ALOGE("new buffer failed!\n"); |
1755 | return; |
1756 | } |
1757 | if (ConvertMjpegToNV21(src, vinfo->picture.buf.bytesused, tmp_buffer, |
1758 | width, tmp_buffer + width * height, (width + 1) / 2, width, |
1759 | height, width, height, libyuv::FOURCC_MJPG) != 0) { |
1760 | DBG_LOGA("Decode MJPEG frame failed\n"); |
1761 | putback_picture_frame(vinfo); |
1762 | usleep(5000); |
1763 | } else { |
1764 | nv21_to_rgb24(tmp_buffer,img,width,height); |
1765 | if (tmp_buffer != NULL) |
1766 | delete [] tmp_buffer; |
1767 | break; |
1768 | } |
1769 | } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { |
1770 | if (vinfo->picture.buf.length == vinfo->picture.buf.bytesused) { |
1771 | yuyv422_to_rgb24(src,img,width,height); |
1772 | break; |
1773 | } else { |
1774 | putback_picture_frame(vinfo); |
1775 | usleep(5000); |
1776 | } |
1777 | } else if (vinfo->picture.format.fmt.pix.pixelformat == V4L2_PIX_FMT_RGB24) { |
1778 | if (vinfo->picture.buf.length == width * height * 3) { |
1779 | memcpy(img, src, vinfo->picture.buf.length); |
1780 | } else { |
1781 | rgb24_memcpy(img, src, width, height); |
1782 | } |
1783 | break; |
1784 | } |
1785 | } |
1786 | } |
1787 | ALOGD("get picture success !"); |
1788 | |
1789 | if (mSensorType == SENSOR_USB) { |
1790 | releasebuf_and_stop_picture(vinfo); |
1791 | } else { |
1792 | stop_picture(vinfo); |
1793 | } |
1794 | |
1795 | #endif |
1796 | } |
1797 | |
1798 | void Sensor::YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height) |
1799 | { |
1800 | for (int i = 0; i < width * height * 2; i += 2) { |
1801 | *dst++ = *(src + i); |
1802 | } |
1803 | |
1804 | for (int y = 0; y < height - 1; y +=2) { |
1805 | for (int j = 0; j < width * 2; j += 4) { |
1806 | *dst++ = (*(src + 3 + j) + *(src + 3 + j + width * 2) + 1) >> 1; //v |
1807 | *dst++ = (*(src + 1 + j) + *(src + 1 + j + width * 2) + 1) >> 1; //u |
1808 | } |
1809 | src += width * 2 * 2; |
1810 | } |
1811 | |
1812 | if (height & 1) |
1813 | for (int j = 0; j < width * 2; j += 4) { |
1814 | *dst++ = *(src + 3 + j); //v |
1815 | *dst++ = *(src + 1 + j); //u |
1816 | } |
1817 | } |
1818 | |
1819 | void Sensor::YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height) |
1820 | { |
1821 | //width should be an even number. |
1822 | //uv ALIGN 32. |
1823 | int i,j,stride,c_stride,c_size,y_size,cb_offset,cr_offset; |
1824 | unsigned char *dst_copy,*src_copy; |
1825 | |
1826 | dst_copy = dst; |
1827 | src_copy = src; |
1828 | |
1829 | y_size = width*height; |
1830 | c_stride = ALIGN(width/2, 16); |
1831 | c_size = c_stride * height/2; |
1832 | cr_offset = y_size; |
1833 | cb_offset = y_size+c_size; |
1834 | |
1835 | for(i=0;i< y_size;i++){ |
1836 | *dst++ = *src; |
1837 | src += 2; |
1838 | } |
1839 | |
1840 | dst = dst_copy; |
1841 | src = src_copy; |
1842 | |
1843 | for(i=0;i<height;i+=2){ |
1844 | for(j=1;j<width*2;j+=4){//one line has 2*width bytes for yuyv. |
1845 | //ceil(u1+u2)/2 |
1846 | *(dst+cr_offset+j/4)= (*(src+j+2) + *(src+j+2+width*2) + 1)/2; |
1847 | *(dst+cb_offset+j/4)= (*(src+j) + *(src+j+width*2) + 1)/2; |
1848 | } |
1849 | dst += c_stride; |
1850 | src += width*4; |
1851 | } |
1852 | } |
1853 | |
1854 | |
1855 | void Sensor::captureNV21(StreamBuffer b, uint32_t gain) { |
1856 | #if 0 |
1857 | float totalGain = gain/100.0 * kBaseGainFactor; |
1858 | // Using fixed-point math with 6 bits of fractional precision. |
1859 | // In fixed-point math, calculate total scaling from electrons to 8bpp |
1860 | const int scale64x = 64 * totalGain * 255 / kMaxRawValue; |
1861 | // In fixed-point math, saturation point of sensor after gain |
1862 | const int saturationPoint = 64 * 255; |
1863 | // Fixed-point coefficients for RGB-YUV transform |
1864 | // Based on JFIF RGB->YUV transform. |
1865 | // Cb/Cr offset scaled by 64x twice since they're applied post-multiply |
1866 | const int rgbToY[] = {19, 37, 7}; |
1867 | const int rgbToCb[] = {-10,-21, 32, 524288}; |
1868 | const int rgbToCr[] = {32,-26, -5, 524288}; |
1869 | // Scale back to 8bpp non-fixed-point |
1870 | const int scaleOut = 64; |
1871 | const int scaleOutSq = scaleOut * scaleOut; // after multiplies |
1872 | |
1873 | uint32_t inc = kResolution[0] / stride; |
1874 | uint32_t outH = kResolution[1] / inc; |
1875 | for (unsigned int y = 0, outY = 0; |
1876 | y < kResolution[1]; y+=inc, outY++) { |
1877 | uint8_t *pxY = img + outY * stride; |
1878 | uint8_t *pxVU = img + (outH + outY / 2) * stride; |
1879 | mScene.setReadoutPixel(0,y); |
1880 | for (unsigned int outX = 0; outX < stride; outX++) { |
1881 | int32_t rCount, gCount, bCount; |
1882 | // TODO: Perfect demosaicing is a cheat |
1883 | const uint32_t *pixel = mScene.getPixelElectrons(); |
1884 | rCount = pixel[Scene::R] * scale64x; |
1885 | rCount = rCount < saturationPoint ? rCount : saturationPoint; |
1886 | gCount = pixel[Scene::Gr] * scale64x; |
1887 | gCount = gCount < saturationPoint ? gCount : saturationPoint; |
1888 | bCount = pixel[Scene::B] * scale64x; |
1889 | bCount = bCount < saturationPoint ? bCount : saturationPoint; |
1890 | |
1891 | *pxY++ = (rgbToY[0] * rCount + |
1892 | rgbToY[1] * gCount + |
1893 | rgbToY[2] * bCount) / scaleOutSq; |
1894 | if (outY % 2 == 0 && outX % 2 == 0) { |
1895 | *pxVU++ = (rgbToCr[0] * rCount + |
1896 | rgbToCr[1] * gCount + |
1897 | rgbToCr[2] * bCount + |
1898 | rgbToCr[3]) / scaleOutSq; |
1899 | *pxVU++ = (rgbToCb[0] * rCount + |
1900 | rgbToCb[1] * gCount + |
1901 | rgbToCb[2] * bCount + |
1902 | rgbToCb[3]) / scaleOutSq; |
1903 | } |
1904 | for (unsigned int j = 1; j < inc; j++) |
1905 | mScene.getPixelElectrons(); |
1906 | } |
1907 | } |
1908 | #else |
1909 | uint8_t *src; |
1910 | |
1911 | if (mKernelBuffer) { |
1912 | src = mKernelBuffer; |
1913 | if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) { |
1914 | //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/); |
1915 | structConvImage input = {(mmInt32)vinfo->preview.format.fmt.pix.width, |
1916 | (mmInt32)vinfo->preview.format.fmt.pix.height, |
1917 | (mmInt32)vinfo->preview.format.fmt.pix.width, |
1918 | IC_FORMAT_YCbCr420_lp, |
1919 | (mmByte *) src, |
1920 | (mmByte *) src + vinfo->preview.format.fmt.pix.width * vinfo->preview.format.fmt.pix.height, |
1921 | 0}; |
1922 | |
1923 | structConvImage output = {(mmInt32)b.width, |
1924 | (mmInt32)b.height, |
1925 | (mmInt32)b.width, |
1926 | IC_FORMAT_YCbCr420_lp, |
1927 | (mmByte *) b.img, |
1928 | (mmByte *) b.img + b.width * b.height, |
1929 | 0}; |
1930 | |
1931 | if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0)) |
1932 | ALOGE("Sclale NV21 frame down failed!\n"); |
1933 | } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { |
1934 | int width = vinfo->preview.format.fmt.pix.width; |
1935 | int height = vinfo->preview.format.fmt.pix.height; |
1936 | uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2]; |
1937 | |
1938 | if ( tmp_buffer == NULL) { |
1939 | ALOGE("new buffer failed!\n"); |
1940 | return; |
1941 | } |
1942 | |
1943 | YUYVToNV21(src, tmp_buffer, width, height); |
1944 | |
1945 | structConvImage input = {(mmInt32)width, |
1946 | (mmInt32)height, |
1947 | (mmInt32)width, |
1948 | IC_FORMAT_YCbCr420_lp, |
1949 | (mmByte *) tmp_buffer, |
1950 | (mmByte *) tmp_buffer + width * height, |
1951 | 0}; |
1952 | |
1953 | structConvImage output = {(mmInt32)b.width, |
1954 | (mmInt32)b.height, |
1955 | (mmInt32)b.width, |
1956 | IC_FORMAT_YCbCr420_lp, |
1957 | (mmByte *) b.img, |
1958 | (mmByte *) b.img + b.width * b.height, |
1959 | 0}; |
1960 | |
1961 | if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0)) |
1962 | ALOGE("Sclale NV21 frame down failed!\n"); |
1963 | |
1964 | delete [] tmp_buffer; |
1965 | } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) { |
1966 | int width = vinfo->preview.format.fmt.pix.width; |
1967 | int height = vinfo->preview.format.fmt.pix.height; |
1968 | |
1969 | #if 0 |
1970 | uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2]; |
1971 | |
1972 | if ( tmp_buffer == NULL) { |
1973 | ALOGE("new buffer failed!\n"); |
1974 | return; |
1975 | } |
1976 | #endif |
1977 | |
1978 | #if 0 |
1979 | if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, |
1980 | b.img, |
1981 | b.width, b.img + b.width * b.height, (b.width + 1) / 2, b.width, |
1982 | b.height, b.width, b.height, libyuv::FOURCC_MJPG) != 0) { |
1983 | DBG_LOGA("Decode MJPEG frame failed\n"); |
1984 | } |
1985 | #else |
1986 | memcpy(b.img, src, b.width * b.height * 3/2); |
1987 | #endif |
1988 | |
1989 | #if 0 |
1990 | structConvImage input = {(mmInt32)width, |
1991 | (mmInt32)height, |
1992 | (mmInt32)width, |
1993 | IC_FORMAT_YCbCr420_lp, |
1994 | (mmByte *) tmp_buffer, |
1995 | (mmByte *) tmp_buffer + width * height, |
1996 | 0}; |
1997 | |
1998 | structConvImage output = {(mmInt32)b.width, |
1999 | (mmInt32)b.height, |
2000 | (mmInt32)b.width, |
2001 | IC_FORMAT_YCbCr420_lp, |
2002 | (mmByte *) b.img, |
2003 | (mmByte *) b.img + b.width * b.height, |
2004 | 0}; |
2005 | |
2006 | if (!VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0)) |
2007 | ALOGE("Sclale NV21 frame down failed!\n"); |
2008 | |
2009 | delete [] tmp_buffer; |
2010 | #endif |
2011 | } else { |
2012 | ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat); |
2013 | } |
2014 | return ; |
2015 | } |
2016 | while(1){ |
2017 | src = (uint8_t *)get_frame(vinfo); |
2018 | if (NULL == src) { |
2019 | CAMHAL_LOGDA("get frame NULL, sleep 5ms"); |
2020 | usleep(5000); |
2021 | continue; |
2022 | } |
2023 | if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_NV21) { |
2024 | if (vinfo->preview.buf.length == b.width * b.height * 3/2) { |
2025 | memcpy(b.img, src, vinfo->preview.buf.length); |
2026 | } else { |
2027 | nv21_memcpy_align32 (b.img, src, b.width, b.height); |
2028 | } |
2029 | mKernelBuffer = src; |
2030 | } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { |
2031 | int width = vinfo->preview.format.fmt.pix.width; |
2032 | int height = vinfo->preview.format.fmt.pix.height; |
2033 | YUYVToNV21(src, b.img, width, height); |
2034 | mKernelBuffer = src; |
2035 | } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) { |
2036 | int width = vinfo->preview.format.fmt.pix.width; |
2037 | int height = vinfo->preview.format.fmt.pix.height; |
2038 | if (ConvertMjpegToNV21(src, vinfo->preview.buf.bytesused, b.img, |
2039 | width, b.img + width * height, (width + 1) / 2, width, |
2040 | height, width, height, libyuv::FOURCC_MJPG) != 0) { |
2041 | putback_frame(vinfo); |
2042 | DBG_LOGA("Decode MJPEG frame failed\n"); |
2043 | continue; |
2044 | } |
2045 | mKernelBuffer = b.img; |
2046 | } |
2047 | |
2048 | break; |
2049 | } |
2050 | #endif |
2051 | |
2052 | ALOGVV("NV21 sensor image captured"); |
2053 | } |
2054 | |
2055 | void Sensor::captureYV12(StreamBuffer b, uint32_t gain) { |
2056 | #if 0 |
2057 | float totalGain = gain/100.0 * kBaseGainFactor; |
2058 | // Using fixed-point math with 6 bits of fractional precision. |
2059 | // In fixed-point math, calculate total scaling from electrons to 8bpp |
2060 | const int scale64x = 64 * totalGain * 255 / kMaxRawValue; |
2061 | // In fixed-point math, saturation point of sensor after gain |
2062 | const int saturationPoint = 64 * 255; |
2063 | // Fixed-point coefficients for RGB-YUV transform |
2064 | // Based on JFIF RGB->YUV transform. |
2065 | // Cb/Cr offset scaled by 64x twice since they're applied post-multiply |
2066 | const int rgbToY[] = {19, 37, 7}; |
2067 | const int rgbToCb[] = {-10,-21, 32, 524288}; |
2068 | const int rgbToCr[] = {32,-26, -5, 524288}; |
2069 | // Scale back to 8bpp non-fixed-point |
2070 | const int scaleOut = 64; |
2071 | const int scaleOutSq = scaleOut * scaleOut; // after multiplies |
2072 | |
2073 | uint32_t inc = kResolution[0] / stride; |
2074 | uint32_t outH = kResolution[1] / inc; |
2075 | for (unsigned int y = 0, outY = 0; |
2076 | y < kResolution[1]; y+=inc, outY++) { |
2077 | uint8_t *pxY = img + outY * stride; |
2078 | uint8_t *pxVU = img + (outH + outY / 2) * stride; |
2079 | mScene.setReadoutPixel(0,y); |
2080 | for (unsigned int outX = 0; outX < stride; outX++) { |
2081 | int32_t rCount, gCount, bCount; |
2082 | // TODO: Perfect demosaicing is a cheat |
2083 | const uint32_t *pixel = mScene.getPixelElectrons(); |
2084 | rCount = pixel[Scene::R] * scale64x; |
2085 | rCount = rCount < saturationPoint ? rCount : saturationPoint; |
2086 | gCount = pixel[Scene::Gr] * scale64x; |
2087 | gCount = gCount < saturationPoint ? gCount : saturationPoint; |
2088 | bCount = pixel[Scene::B] * scale64x; |
2089 | bCount = bCount < saturationPoint ? bCount : saturationPoint; |
2090 | |
2091 | *pxY++ = (rgbToY[0] * rCount + |
2092 | rgbToY[1] * gCount + |
2093 | rgbToY[2] * bCount) / scaleOutSq; |
2094 | if (outY % 2 == 0 && outX % 2 == 0) { |
2095 | *pxVU++ = (rgbToCr[0] * rCount + |
2096 | rgbToCr[1] * gCount + |
2097 | rgbToCr[2] * bCount + |
2098 | rgbToCr[3]) / scaleOutSq; |
2099 | *pxVU++ = (rgbToCb[0] * rCount + |
2100 | rgbToCb[1] * gCount + |
2101 | rgbToCb[2] * bCount + |
2102 | rgbToCb[3]) / scaleOutSq; |
2103 | } |
2104 | for (unsigned int j = 1; j < inc; j++) |
2105 | mScene.getPixelElectrons(); |
2106 | } |
2107 | } |
2108 | #else |
2109 | uint8_t *src; |
2110 | if (mKernelBuffer) { |
2111 | src = mKernelBuffer; |
2112 | if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) { |
2113 | //memcpy(b.img, src, 200 * 100 * 3 / 2 /*vinfo->preview.buf.length*/); |
2114 | ALOGI("Sclale YV12 frame down \n"); |
2115 | |
2116 | int width = vinfo->preview.format.fmt.pix.width; |
2117 | int height = vinfo->preview.format.fmt.pix.height; |
2118 | int ret = libyuv::I420Scale(src, width, |
2119 | src + width * height, width / 2, |
2120 | src + width * height + width * height / 4, width / 2, |
2121 | width, height, |
2122 | b.img, b.width, |
2123 | b.img + b.width * b.height, b.width / 2, |
2124 | b.img + b.width * b.height + b.width * b.height / 4, b.width / 2, |
2125 | b.width, b.height, |
2126 | libyuv::kFilterNone); |
2127 | if (ret < 0) |
2128 | ALOGE("Sclale YV12 frame down failed!\n"); |
2129 | } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { |
2130 | int width = vinfo->preview.format.fmt.pix.width; |
2131 | int height = vinfo->preview.format.fmt.pix.height; |
2132 | uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2]; |
2133 | |
2134 | if ( tmp_buffer == NULL) { |
2135 | ALOGE("new buffer failed!\n"); |
2136 | return; |
2137 | } |
2138 | |
2139 | YUYVToYV12(src, tmp_buffer, width, height); |
2140 | |
2141 | int ret = libyuv::I420Scale(tmp_buffer, width, |
2142 | tmp_buffer + width * height, width / 2, |
2143 | tmp_buffer + width * height + width * height / 4, width / 2, |
2144 | width, height, |
2145 | b.img, b.width, |
2146 | b.img + b.width * b.height, b.width / 2, |
2147 | b.img + b.width * b.height + b.width * b.height / 4, b.width / 2, |
2148 | b.width, b.height, |
2149 | libyuv::kFilterNone); |
2150 | if (ret < 0) |
2151 | ALOGE("Sclale YV12 frame down failed!\n"); |
2152 | delete [] tmp_buffer; |
2153 | } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) { |
2154 | int width = vinfo->preview.format.fmt.pix.width; |
2155 | int height = vinfo->preview.format.fmt.pix.height; |
2156 | uint8_t *tmp_buffer = new uint8_t[width * height * 3 / 2]; |
2157 | |
2158 | if ( tmp_buffer == NULL) { |
2159 | ALOGE("new buffer failed!\n"); |
2160 | return; |
2161 | } |
2162 | |
2163 | if (ConvertToI420(src, vinfo->preview.buf.bytesused, tmp_buffer, width, tmp_buffer + width * height + width * height / 4, (width + 1) / 2, |
2164 | tmp_buffer + width * height, (width + 1) / 2, 0, 0, width, height, |
2165 | width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) { |
2166 | DBG_LOGA("Decode MJPEG frame failed\n"); |
2167 | } |
2168 | |
2169 | int ret = libyuv::I420Scale(tmp_buffer, width, |
2170 | tmp_buffer + width * height, width / 2, |
2171 | tmp_buffer + width * height + width * height / 4, width / 2, |
2172 | width, height, |
2173 | b.img, b.width, |
2174 | b.img + b.width * b.height, b.width / 2, |
2175 | b.img + b.width * b.height + b.width * b.height / 4, b.width / 2, |
2176 | b.width, b.height, |
2177 | libyuv::kFilterNone); |
2178 | if (ret < 0) |
2179 | ALOGE("Sclale YV12 frame down failed!\n"); |
2180 | |
2181 | delete [] tmp_buffer; |
2182 | } else { |
2183 | ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat); |
2184 | } |
2185 | return ; |
2186 | } |
2187 | while(1){ |
2188 | src = (uint8_t *)get_frame(vinfo); |
2189 | |
2190 | if (NULL == src) { |
2191 | CAMHAL_LOGDA("get frame NULL, sleep 5ms"); |
2192 | usleep(5000); |
2193 | continue; |
2194 | } |
2195 | if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YVU420) { |
2196 | if (vinfo->preview.buf.length == b.width * b.height * 3/2) { |
2197 | memcpy(b.img, src, vinfo->preview.buf.length); |
2198 | } else { |
2199 | yv12_memcpy_align32 (b.img, src, b.width, b.height); |
2200 | } |
2201 | } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { |
2202 | int width = vinfo->preview.format.fmt.pix.width; |
2203 | int height = vinfo->preview.format.fmt.pix.height; |
2204 | YUYVToYV12(src, b.img, width, height); |
2205 | } else if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) { |
2206 | int width = vinfo->preview.format.fmt.pix.width; |
2207 | int height = vinfo->preview.format.fmt.pix.height; |
2208 | if (ConvertToI420(src, vinfo->preview.buf.bytesused, b.img, width, b.img + width * height + width * height / 4, (width + 1) / 2, |
2209 | b.img + width * height, (width + 1) / 2, 0, 0, width, height, |
2210 | width, height, libyuv::kRotate0, libyuv::FOURCC_MJPG) != 0) { |
2211 | putback_frame(vinfo); |
2212 | DBG_LOGA("Decode MJPEG frame failed\n"); |
2213 | continue; |
2214 | } |
2215 | } else { |
2216 | ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat); |
2217 | } |
2218 | |
2219 | break; |
2220 | } |
2221 | #endif |
2222 | mKernelBuffer = src; |
2223 | ALOGVV("YV12 sensor image captured"); |
2224 | } |
2225 | |
2226 | void Sensor::captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride) { |
2227 | #if 0 |
2228 | float totalGain = gain/100.0 * kBaseGainFactor; |
2229 | // Using fixed-point math with 6 bits of fractional precision. |
2230 | // In fixed-point math, calculate total scaling from electrons to 8bpp |
2231 | const int scale64x = 64 * totalGain * 255 / kMaxRawValue; |
2232 | // In fixed-point math, saturation point of sensor after gain |
2233 | const int saturationPoint = 64 * 255; |
2234 | // Fixed-point coefficients for RGB-YUV transform |
2235 | // Based on JFIF RGB->YUV transform. |
2236 | // Cb/Cr offset scaled by 64x twice since they're applied post-multiply |
2237 | const int rgbToY[] = {19, 37, 7}; |
2238 | const int rgbToCb[] = {-10,-21, 32, 524288}; |
2239 | const int rgbToCr[] = {32,-26, -5, 524288}; |
2240 | // Scale back to 8bpp non-fixed-point |
2241 | const int scaleOut = 64; |
2242 | const int scaleOutSq = scaleOut * scaleOut; // after multiplies |
2243 | |
2244 | uint32_t inc = kResolution[0] / stride; |
2245 | uint32_t outH = kResolution[1] / inc; |
2246 | for (unsigned int y = 0, outY = 0; |
2247 | y < kResolution[1]; y+=inc, outY++) { |
2248 | uint8_t *pxY = img + outY * stride; |
2249 | uint8_t *pxVU = img + (outH + outY / 2) * stride; |
2250 | mScene.setReadoutPixel(0,y); |
2251 | for (unsigned int outX = 0; outX < stride; outX++) { |
2252 | int32_t rCount, gCount, bCount; |
2253 | // TODO: Perfect demosaicing is a cheat |
2254 | const uint32_t *pixel = mScene.getPixelElectrons(); |
2255 | rCount = pixel[Scene::R] * scale64x; |
2256 | rCount = rCount < saturationPoint ? rCount : saturationPoint; |
2257 | gCount = pixel[Scene::Gr] * scale64x; |
2258 | gCount = gCount < saturationPoint ? gCount : saturationPoint; |
2259 | bCount = pixel[Scene::B] * scale64x; |
2260 | bCount = bCount < saturationPoint ? bCount : saturationPoint; |
2261 | |
2262 | *pxY++ = (rgbToY[0] * rCount + |
2263 | rgbToY[1] * gCount + |
2264 | rgbToY[2] * bCount) / scaleOutSq; |
2265 | if (outY % 2 == 0 && outX % 2 == 0) { |
2266 | *pxVU++ = (rgbToCr[0] * rCount + |
2267 | rgbToCr[1] * gCount + |
2268 | rgbToCr[2] * bCount + |
2269 | rgbToCr[3]) / scaleOutSq; |
2270 | *pxVU++ = (rgbToCb[0] * rCount + |
2271 | rgbToCb[1] * gCount + |
2272 | rgbToCb[2] * bCount + |
2273 | rgbToCb[3]) / scaleOutSq; |
2274 | } |
2275 | for (unsigned int j = 1; j < inc; j++) |
2276 | mScene.getPixelElectrons(); |
2277 | } |
2278 | } |
2279 | #else |
2280 | uint8_t *src; |
2281 | if (mKernelBuffer) { |
2282 | src = mKernelBuffer; |
2283 | if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { |
2284 | //TODO YUYV scale |
2285 | //memcpy(img, src, vinfo->preview.buf.length); |
2286 | |
2287 | } else |
2288 | ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat); |
2289 | |
2290 | return ; |
2291 | } |
2292 | |
2293 | while(1) { |
2294 | src = (uint8_t *)get_frame(vinfo); |
2295 | if (NULL == src) { |
2296 | CAMHAL_LOGDA("get frame NULL, sleep 5ms"); |
2297 | usleep(5000); |
2298 | continue; |
2299 | } |
2300 | if (vinfo->preview.format.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) { |
2301 | memcpy(img, src, vinfo->preview.buf.length); |
2302 | } else { |
2303 | ALOGE("Unable known sensor format: %d", vinfo->preview.format.fmt.pix.pixelformat); |
2304 | } |
2305 | |
2306 | break; |
2307 | } |
2308 | #endif |
2309 | mKernelBuffer = src; |
2310 | ALOGVV("YUYV sensor image captured"); |
2311 | } |
2312 | |
2313 | void Sensor::dump(int fd) { |
2314 | String8 result; |
2315 | result = String8::format("%s, sensor preview information: \n", __FILE__); |
2316 | result.appendFormat("camera preview fps: %.2f\n", mCurFps); |
2317 | result.appendFormat("camera preview width: %d , height =%d\n", |
2318 | vinfo->preview.format.fmt.pix.width,vinfo->preview.format.fmt.pix.height); |
2319 | |
2320 | result.appendFormat("camera preview format: %.4s\n\n", |
2321 | (char *) &vinfo->preview.format.fmt.pix.pixelformat); |
2322 | |
2323 | write(fd, result.string(), result.size()); |
2324 | } |
2325 | |
2326 | } // namespace android |
2327 | |
2328 |