blob: 1d59eeb4b286f9d1ef7c8e26ff20c7c27d99ff30
1 | /* |
2 | * Copyright (C) 2013 The Android Open Source Project |
3 | * |
4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
5 | * you may not use this file except in compliance with the License. |
6 | * You may obtain a copy of the License at |
7 | * |
8 | * http://www.apache.org/licenses/LICENSE-2.0 |
9 | * |
10 | * Unless required by applicable law or agreed to in writing, software |
11 | * distributed under the License is distributed on an "AS IS" BASIS, |
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13 | * See the License for the specific language governing permissions and |
14 | * limitations under the License. |
15 | */ |
16 | |
17 | /* |
18 | * Contains implementation of a class EmulatedFakeCamera3 that encapsulates |
19 | * functionality of an advanced fake camera. |
20 | */ |
21 | |
22 | #include <inttypes.h> |
23 | |
24 | //#define LOG_NDEBUG 0 |
25 | //#define LOG_NNDEBUG 0 |
26 | #define LOG_TAG "EmulatedCamera_FakeCamera3" |
27 | #include <utils/Log.h> |
28 | |
29 | #include "EmulatedFakeCamera3.h" |
30 | #include "EmulatedCameraFactory.h" |
31 | #include <ui/Fence.h> |
32 | #include <ui/Rect.h> |
33 | #include <ui/GraphicBufferMapper.h> |
34 | #include <sys/types.h> |
35 | |
36 | #include <cutils/properties.h> |
37 | #include "fake-pipeline2/Sensor.h" |
38 | #include "fake-pipeline2/JpegCompressor.h" |
39 | #include <cmath> |
40 | #include <gralloc_priv.h> |
41 | |
42 | #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0 |
43 | #define ALOGVV ALOGV |
44 | #else |
45 | #define ALOGVV(...) ((void)0) |
46 | #endif |
47 | |
48 | namespace android { |
49 | |
50 | /** |
51 | * Constants for camera capabilities |
52 | */ |
53 | |
54 | const int64_t USEC = 1000LL; |
55 | const int64_t MSEC = USEC * 1000LL; |
56 | const int64_t SEC = MSEC * 1000LL; |
57 | |
58 | |
59 | const int32_t EmulatedFakeCamera3::kAvailableFormats[] = { |
60 | //HAL_PIXEL_FORMAT_RAW_SENSOR, |
61 | HAL_PIXEL_FORMAT_BLOB, |
62 | //HAL_PIXEL_FORMAT_RGBA_8888, |
63 | HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, |
64 | // These are handled by YCbCr_420_888 |
65 | HAL_PIXEL_FORMAT_YV12, |
66 | HAL_PIXEL_FORMAT_YCrCb_420_SP, |
67 | //HAL_PIXEL_FORMAT_YCbCr_422_I, |
68 | HAL_PIXEL_FORMAT_YCbCr_420_888 |
69 | }; |
70 | |
71 | const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = { |
72 | 640, 480 |
73 | // Sensor::kResolution[0], Sensor::kResolution[1] |
74 | }; |
75 | |
76 | const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = { |
77 | (const uint64_t)Sensor::kFrameDurationRange[0] |
78 | }; |
79 | |
80 | const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[6] = { |
81 | 640, 480, 320, 240,// 1280, 720 |
82 | // Sensor::kResolution[0], Sensor::kResolution[1] |
83 | }; |
84 | |
85 | const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = { |
86 | 640, 480, 320, 240 |
87 | // Sensor::kResolution[0], Sensor::kResolution[1] |
88 | }; |
89 | |
90 | const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = { |
91 | (const uint64_t)Sensor::kFrameDurationRange[0] |
92 | }; |
93 | |
94 | const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = { |
95 | 1280,720 |
96 | // Sensor::kResolution[0], Sensor::kResolution[1] |
97 | }; |
98 | |
99 | const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = { |
100 | 640, 480 |
101 | // Sensor::kResolution[0], Sensor::kResolution[1] |
102 | }; |
103 | |
104 | |
105 | const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = { |
106 | (const uint64_t)Sensor::kFrameDurationRange[0] |
107 | }; |
108 | |
109 | /** |
110 | * 3A constants |
111 | */ |
112 | |
113 | // Default exposure and gain targets for different scenarios |
114 | const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC; |
115 | const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC; |
116 | const int EmulatedFakeCamera3::kNormalSensitivity = 100; |
117 | const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400; |
118 | const float EmulatedFakeCamera3::kExposureTrackRate = 0.1; |
119 | const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10; |
120 | const int EmulatedFakeCamera3::kStableAeMaxFrames = 100; |
121 | const float EmulatedFakeCamera3::kExposureWanderMin = -2; |
122 | const float EmulatedFakeCamera3::kExposureWanderMax = 1; |
123 | |
124 | /** |
125 | * Camera device lifecycle methods |
126 | */ |
127 | static const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(camera3_jpeg_blob); |
128 | jpegsize EmulatedFakeCamera3::getMaxJpegResolution(uint32_t picSizes[],int count) { |
129 | uint32_t maxJpegWidth = 0, maxJpegHeight = 0; |
130 | jpegsize maxJpegResolution; |
131 | for (int i=0; i < count; i+= 4) { |
132 | uint32_t width = picSizes[i+1]; |
133 | uint32_t height = picSizes[i+2]; |
134 | if (picSizes[i+0] == HAL_PIXEL_FORMAT_BLOB && |
135 | (width * height > maxJpegWidth * maxJpegHeight)) { |
136 | maxJpegWidth = width; |
137 | maxJpegHeight = height; |
138 | } |
139 | } |
140 | maxJpegResolution.width = maxJpegWidth; |
141 | maxJpegResolution.height = maxJpegHeight; |
142 | return maxJpegResolution; |
143 | } |
144 | ssize_t EmulatedFakeCamera3::getJpegBufferSize(int width, int height) { |
145 | if (maxJpegResolution.width == 0) { |
146 | return BAD_VALUE; |
147 | } |
148 | ssize_t maxJpegBufferSize = JpegCompressor::kMaxJpegSize; |
149 | |
150 | // Calculate final jpeg buffer size for the given resolution. |
151 | float scaleFactor = ((float) (width * height)) / |
152 | (maxJpegResolution.width * maxJpegResolution.height); |
153 | ssize_t jpegBufferSize = scaleFactor * maxJpegBufferSize; |
154 | // Bound the buffer size to [MIN_JPEG_BUFFER_SIZE, maxJpegBufferSize]. |
155 | if (jpegBufferSize > maxJpegBufferSize) { |
156 | jpegBufferSize = maxJpegBufferSize; |
157 | } else if (jpegBufferSize < kMinJpegBufferSize) { |
158 | jpegBufferSize = kMinJpegBufferSize; |
159 | } |
160 | return jpegBufferSize; |
161 | } |
162 | |
163 | EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, struct hw_module_t* module) : |
164 | EmulatedCamera3(cameraId, module) { |
165 | ALOGI("Constructing emulated fake camera 3 cameraID:%d", mCameraID); |
166 | |
167 | for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) { |
168 | mDefaultTemplates[i] = NULL; |
169 | } |
170 | |
171 | /** |
172 | * Front cameras = limited mode |
173 | * Back cameras = full mode |
174 | */ |
175 | //TODO limited or full mode, read this from camera driver |
176 | //mFullMode = facingBack; |
177 | mCameraStatus = CAMERA_INIT; |
178 | mSupportCap = 0; |
179 | mSupportRotate = 0; |
180 | mFullMode = 0; |
181 | } |
182 | |
183 | EmulatedFakeCamera3::~EmulatedFakeCamera3() { |
184 | for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) { |
185 | if (mDefaultTemplates[i] != NULL) { |
186 | free_camera_metadata(mDefaultTemplates[i]); |
187 | } |
188 | } |
189 | |
190 | if (mCameraInfo != NULL) { |
191 | CAMHAL_LOGIA("free mCameraInfo"); |
192 | free_camera_metadata(mCameraInfo); |
193 | mCameraInfo = NULL; |
194 | } |
195 | } |
196 | |
197 | status_t EmulatedFakeCamera3::Initialize() { |
198 | DBG_LOGB("mCameraID=%d,mStatus=%d,ddd\n", mCameraID, mStatus); |
199 | status_t res; |
200 | |
201 | #ifdef HAVE_VERSION_INFO |
202 | CAMHAL_LOGIB("\n--------------------------------\n" |
203 | "author:aml.sh multi-media team\n" |
204 | "branch name: %s\n" |
205 | "git version: %s \n" |
206 | "last changed: %s\n" |
207 | "build-time: %s\n" |
208 | "build-name: %s\n" |
209 | "uncommitted-file-num:%d\n" |
210 | "ssh user@%s, cd %s\n" |
211 | "hostname %s\n" |
212 | "--------------------------------\n", |
213 | CAMHAL_BRANCH_NAME, |
214 | CAMHAL_GIT_VERSION, |
215 | CAMHAL_LAST_CHANGED, |
216 | CAMHAL_BUILD_TIME, |
217 | CAMHAL_BUILD_NAME, |
218 | CAMHAL_GIT_UNCOMMIT_FILE_NUM, |
219 | CAMHAL_IP, CAMHAL_PATH, CAMHAL_HOSTNAME |
220 | ); |
221 | #endif |
222 | |
223 | |
224 | if (mStatus != STATUS_ERROR) { |
225 | ALOGE("%s: Already initialized!", __FUNCTION__); |
226 | return INVALID_OPERATION; |
227 | } |
228 | |
229 | res = constructStaticInfo(); |
230 | if (res != OK) { |
231 | ALOGE("%s: Unable to allocate static info: %s (%d)", |
232 | __FUNCTION__, strerror(-res), res); |
233 | return res; |
234 | } |
235 | |
236 | return EmulatedCamera3::Initialize(); |
237 | } |
238 | |
239 | status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) { |
240 | ALOGV("%s: E", __FUNCTION__); |
241 | DBG_LOGA("ddd"); |
242 | Mutex::Autolock l(mLock); |
243 | status_t res; |
244 | |
245 | if ((mStatus != STATUS_CLOSED) || !mPlugged) { |
246 | ALOGE("%s: Can't connect in state %d, mPlugged=%d", |
247 | __FUNCTION__, mStatus, mPlugged); |
248 | return INVALID_OPERATION; |
249 | } |
250 | |
251 | mSensor = new Sensor(); |
252 | mSensor->setSensorListener(this); |
253 | |
254 | res = mSensor->startUp(mCameraID); |
255 | DBG_LOGB("mSensor startUp, mCameraID=%d\n", mCameraID); |
256 | if (res != NO_ERROR) return res; |
257 | |
258 | mSupportCap = mSensor->IoctlStateProbe(); |
259 | if (mSupportCap & IOCTL_MASK_ROTATE) { |
260 | mSupportRotate = true; |
261 | } |
262 | |
263 | mReadoutThread = new ReadoutThread(this); |
264 | mJpegCompressor = new JpegCompressor(); |
265 | |
266 | res = mReadoutThread->setJpegCompressorListener(this); |
267 | if (res != NO_ERROR) { |
268 | return res; |
269 | } |
270 | res = mReadoutThread->startJpegCompressor(this); |
271 | if (res != NO_ERROR) { |
272 | return res; |
273 | } |
274 | |
275 | res = mReadoutThread->run("EmuCam3::readoutThread"); |
276 | if (res != NO_ERROR) return res; |
277 | |
278 | // Initialize fake 3A |
279 | |
280 | mControlMode = ANDROID_CONTROL_MODE_AUTO; |
281 | mFacePriority = false; |
282 | mAeMode = ANDROID_CONTROL_AE_MODE_ON; |
283 | mAfMode = ANDROID_CONTROL_AF_MODE_AUTO; |
284 | mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO; |
285 | mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;//ANDROID_CONTROL_AE_STATE_INACTIVE; |
286 | mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; |
287 | mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
288 | mAfTriggerId = 0; |
289 | mAeCurrentExposureTime = kNormalExposureTime; |
290 | mAeCurrentSensitivity = kNormalSensitivity; |
291 | |
292 | return EmulatedCamera3::connectCamera(device); |
293 | } |
294 | |
295 | status_t EmulatedFakeCamera3::plugCamera() { |
296 | { |
297 | Mutex::Autolock l(mLock); |
298 | |
299 | if (!mPlugged) { |
300 | CAMHAL_LOGIB("%s: Plugged back in", __FUNCTION__); |
301 | mPlugged = true; |
302 | } |
303 | } |
304 | |
305 | return NO_ERROR; |
306 | } |
307 | |
308 | status_t EmulatedFakeCamera3::unplugCamera() { |
309 | { |
310 | Mutex::Autolock l(mLock); |
311 | |
312 | if (mPlugged) { |
313 | CAMHAL_LOGIB("%s: Unplugged camera", __FUNCTION__); |
314 | mPlugged = false; |
315 | } |
316 | } |
317 | |
318 | return closeCamera(); |
319 | } |
320 | |
321 | camera_device_status_t EmulatedFakeCamera3::getHotplugStatus() { |
322 | Mutex::Autolock l(mLock); |
323 | return mPlugged ? |
324 | CAMERA_DEVICE_STATUS_PRESENT : |
325 | CAMERA_DEVICE_STATUS_NOT_PRESENT; |
326 | } |
327 | |
328 | void EmulatedFakeCamera3::setCameraStatus(camera_status_t status) |
329 | { |
330 | mCameraStatus = status; |
331 | } |
332 | |
333 | camera_status_t EmulatedFakeCamera3::getCameraStatus() |
334 | { |
335 | CAMHAL_LOGVB("%s, mCameraStatus = %d",__FUNCTION__,mCameraStatus); |
336 | return mCameraStatus; |
337 | } |
338 | |
339 | status_t EmulatedFakeCamera3::closeCamera() { |
340 | CAMHAL_LOGVB("%s, %d\n", __FUNCTION__, __LINE__); |
341 | status_t res; |
342 | { |
343 | Mutex::Autolock l(mLock); |
344 | if (mStatus == STATUS_CLOSED) return OK; |
345 | //res = mSensor->streamOff(); |
346 | |
347 | res = mSensor->shutDown(); |
348 | if (res != NO_ERROR) { |
349 | ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res); |
350 | return res; |
351 | } |
352 | mSensor.clear(); |
353 | |
354 | res = mReadoutThread->shutdownJpegCompressor(this); |
355 | if (res != OK) { |
356 | ALOGE("%s: Unable to shut down JpegCompressor: %d", __FUNCTION__, res); |
357 | return res; |
358 | } |
359 | |
360 | mReadoutThread->requestExit(); |
361 | } |
362 | |
363 | mReadoutThread->join(); |
364 | |
365 | { |
366 | Mutex::Autolock l(mLock); |
367 | // Clear out private stream information |
368 | for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) { |
369 | PrivateStreamInfo *privStream = |
370 | static_cast<PrivateStreamInfo*>((*s)->priv); |
371 | delete privStream; |
372 | (*s)->priv = NULL; |
373 | } |
374 | mStreams.clear(); |
375 | mReadoutThread.clear(); |
376 | } |
377 | |
378 | return EmulatedCamera3::closeCamera(); |
379 | } |
380 | |
381 | status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) { |
382 | char property[PROPERTY_VALUE_MAX]; |
383 | info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT; |
384 | if (mSensorType == SENSOR_USB) { |
385 | if (mFacingBack) { |
386 | property_get("hw.camera.orientation.back", property, "0"); |
387 | } else { |
388 | property_get("hw.camera.orientation.front", property, "0"); |
389 | } |
390 | int32_t orientation = atoi(property); |
391 | property_get("hw.camera.usb.orientation_offset", property, "0"); |
392 | orientation += atoi(property); |
393 | orientation %= 360; |
394 | info->orientation = orientation ; |
395 | } else { |
396 | if (mFacingBack) { |
397 | property_get("hw.camera.orientation.back", property, "270"); |
398 | } else { |
399 | property_get("hw.camera.orientation.front", property, "90"); |
400 | } |
401 | info->orientation = atoi(property); |
402 | } |
403 | return EmulatedCamera3::getCameraInfo(info); |
404 | } |
405 | |
406 | /** |
407 | * Camera3 interface methods |
408 | */ |
409 | |
410 | void EmulatedFakeCamera3::getValidJpegSize(uint32_t picSizes[], uint32_t availablejpegsize[], int count) { |
411 | int i,j,k; |
412 | bool valid = true; |
413 | for (i=0,j=0; i < count; i+= 4) { |
414 | for (k= 0; k<=j ;k+=2) { |
415 | if ((availablejpegsize[k]*availablejpegsize[k+1]) == (picSizes[i+1]*picSizes[i+2])) { |
416 | |
417 | valid = false; |
418 | } |
419 | } |
420 | if (valid) { |
421 | availablejpegsize[j] = picSizes[i+1]; |
422 | availablejpegsize[j+1] = picSizes[i+2]; |
423 | j+=2; |
424 | } |
425 | valid = true; |
426 | } |
427 | } |
428 | |
429 | status_t EmulatedFakeCamera3::checkValidJpegSize(uint32_t width, uint32_t height) { |
430 | |
431 | int validsizecount = 0; |
432 | uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]); |
433 | for (uint32_t f = 0; f < count; f+=2) { |
434 | if (mAvailableJpegSize[f] != 0) { |
435 | if ((mAvailableJpegSize[f] == width)&&(mAvailableJpegSize[f+1] == height)) { |
436 | validsizecount++; |
437 | } |
438 | } else { |
439 | break; |
440 | } |
441 | } |
442 | if (validsizecount == 0) |
443 | return BAD_VALUE; |
444 | return OK; |
445 | } |
446 | |
447 | status_t EmulatedFakeCamera3::configureStreams( |
448 | camera3_stream_configuration *streamList) { |
449 | Mutex::Autolock l(mLock); |
450 | uint32_t width, height, pixelfmt; |
451 | bool isRestart = false; |
452 | DBG_LOGB("%s: %d streams", __FUNCTION__, streamList->num_streams); |
453 | |
454 | if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) { |
455 | ALOGE("%s: Cannot configure streams in state %d", |
456 | __FUNCTION__, mStatus); |
457 | return NO_INIT; |
458 | } |
459 | |
460 | /** |
461 | * Sanity-check input list. |
462 | */ |
463 | if (streamList == NULL) { |
464 | ALOGE("%s: NULL stream configuration", __FUNCTION__); |
465 | return BAD_VALUE; |
466 | } |
467 | |
468 | if (streamList->streams == NULL) { |
469 | ALOGE("%s: NULL stream list", __FUNCTION__); |
470 | return BAD_VALUE; |
471 | } |
472 | |
473 | if (streamList->num_streams < 1) { |
474 | ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__, |
475 | streamList->num_streams); |
476 | return BAD_VALUE; |
477 | } |
478 | |
479 | camera3_stream_t *inputStream = NULL; |
480 | for (size_t i = 0; i < streamList->num_streams; i++) { |
481 | camera3_stream_t *newStream = streamList->streams[i]; |
482 | |
483 | if (newStream == NULL) { |
484 | ALOGE("%s: Stream index %zu was NULL", |
485 | __FUNCTION__, i); |
486 | return BAD_VALUE; |
487 | } |
488 | |
489 | if (newStream->max_buffers <= 0) { |
490 | isRestart = true;//mSensor->isNeedRestart(newStream->width, newStream->height, newStream->format); |
491 | DBG_LOGB("format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d, isRestart=%d\n", |
492 | newStream->format, newStream->width, newStream->height, |
493 | newStream->stream_type, newStream->max_buffers, |
494 | isRestart); |
495 | } |
496 | ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x", |
497 | __FUNCTION__, newStream, i, newStream->stream_type, |
498 | newStream->usage, |
499 | newStream->format); |
500 | |
501 | if (newStream->stream_type == CAMERA3_STREAM_INPUT || |
502 | newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { |
503 | if (inputStream != NULL) { |
504 | |
505 | ALOGE("%s: Multiple input streams requested!", __FUNCTION__); |
506 | return BAD_VALUE; |
507 | } |
508 | inputStream = newStream; |
509 | } |
510 | |
511 | bool validFormat = false; |
512 | for (size_t f = 0; |
513 | f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]); |
514 | f++) { |
515 | if (newStream->format == kAvailableFormats[f]) { |
516 | validFormat = true; |
517 | //HAL_PIXEL_FORMAT_YCrCb_420_SP, |
518 | if (HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) |
519 | newStream->format = HAL_PIXEL_FORMAT_YCrCb_420_SP; |
520 | |
521 | break; |
522 | } |
523 | DBG_LOGB("stream_type=%d\n", newStream->stream_type); |
524 | } |
525 | if (!validFormat) { |
526 | ALOGE("%s: Unsupported stream format 0x%x requested", |
527 | __FUNCTION__, newStream->format); |
528 | return BAD_VALUE; |
529 | } |
530 | |
531 | status_t ret = checkValidJpegSize(newStream->width, newStream->height); |
532 | if (ret != OK) { |
533 | return BAD_VALUE; |
534 | } |
535 | |
536 | } |
537 | mInputStream = inputStream; |
538 | width = 0; |
539 | height = 0; |
540 | for (size_t i = 0; i < streamList->num_streams; i++) { |
541 | camera3_stream_t *newStream = streamList->streams[i]; |
542 | DBG_LOGB("find propert width and height, format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d\n", |
543 | newStream->format, newStream->width, newStream->height, newStream->stream_type, newStream->max_buffers); |
544 | if ((HAL_PIXEL_FORMAT_BLOB != newStream->format) && |
545 | (CAMERA3_STREAM_OUTPUT == newStream->stream_type)) { |
546 | |
547 | if (width < newStream->width) |
548 | width = newStream->width; |
549 | |
550 | if (height < newStream->height) |
551 | height = newStream->height; |
552 | |
553 | pixelfmt = (uint32_t)newStream->format; |
554 | if (HAL_PIXEL_FORMAT_YCbCr_420_888 == pixelfmt) |
555 | pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP; |
556 | } |
557 | |
558 | } |
559 | |
560 | //TODO modify this ugly code |
561 | if (isRestart) { |
562 | isRestart = mSensor->isNeedRestart(width, height, pixelfmt); |
563 | } |
564 | |
565 | if (isRestart) { |
566 | mSensor->streamOff(); |
567 | pixelfmt = mSensor->halFormatToSensorFormat(pixelfmt); |
568 | mSensor->setOutputFormat(width, height, pixelfmt, 0); |
569 | mSensor->streamOn(); |
570 | DBG_LOGB("width=%d, height=%d, pixelfmt=%.4s\n", |
571 | width, height, (char*)&pixelfmt); |
572 | } |
573 | |
574 | /** |
575 | * Initially mark all existing streams as not alive |
576 | */ |
577 | for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) { |
578 | PrivateStreamInfo *privStream = |
579 | static_cast<PrivateStreamInfo*>((*s)->priv); |
580 | privStream->alive = false; |
581 | } |
582 | |
583 | /** |
584 | * Find new streams and mark still-alive ones |
585 | */ |
586 | for (size_t i = 0; i < streamList->num_streams; i++) { |
587 | camera3_stream_t *newStream = streamList->streams[i]; |
588 | if (newStream->priv == NULL) { |
589 | // New stream, construct info |
590 | PrivateStreamInfo *privStream = new PrivateStreamInfo(); |
591 | privStream->alive = true; |
592 | privStream->registered = false; |
593 | |
594 | newStream->usage = |
595 | mSensor->getStreamUsage(newStream->stream_type); |
596 | |
597 | DBG_LOGB("stream_type=%d\n", newStream->stream_type); |
598 | newStream->max_buffers = kMaxBufferCount; |
599 | newStream->priv = privStream; |
600 | mStreams.push_back(newStream); |
601 | } else { |
602 | // Existing stream, mark as still alive. |
603 | PrivateStreamInfo *privStream = |
604 | static_cast<PrivateStreamInfo*>(newStream->priv); |
605 | CAMHAL_LOGDA("Existing stream ?"); |
606 | privStream->alive = true; |
607 | } |
608 | DBG_LOGB("%d, newStream=%p, stream_type=%d, usage=%x, priv=%p, w*h=%dx%d\n", |
609 | i, newStream, newStream->stream_type, newStream->usage, newStream->priv, newStream->width, newStream->height); |
610 | } |
611 | |
612 | /** |
613 | * Reap the dead streams |
614 | */ |
615 | for (StreamIterator s = mStreams.begin(); s != mStreams.end();) { |
616 | PrivateStreamInfo *privStream = |
617 | static_cast<PrivateStreamInfo*>((*s)->priv); |
618 | if (!privStream->alive) { |
619 | DBG_LOGA("delete not alive streams"); |
620 | (*s)->priv = NULL; |
621 | delete privStream; |
622 | s = mStreams.erase(s); |
623 | } else { |
624 | ++s; |
625 | } |
626 | } |
627 | |
628 | /** |
629 | * Can't reuse settings across configure call |
630 | */ |
631 | mPrevSettings.clear(); |
632 | |
633 | return OK; |
634 | } |
635 | |
636 | status_t EmulatedFakeCamera3::registerStreamBuffers( |
637 | const camera3_stream_buffer_set *bufferSet) { |
638 | DBG_LOGB("%s: E", __FUNCTION__); |
639 | Mutex::Autolock l(mLock); |
640 | |
641 | /** |
642 | * Sanity checks |
643 | */ |
644 | DBG_LOGA("==========sanity checks\n"); |
645 | |
646 | // OK: register streams at any time during configure |
647 | // (but only once per stream) |
648 | if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) { |
649 | ALOGE("%s: Cannot register buffers in state %d", |
650 | __FUNCTION__, mStatus); |
651 | return NO_INIT; |
652 | } |
653 | |
654 | if (bufferSet == NULL) { |
655 | ALOGE("%s: NULL buffer set!", __FUNCTION__); |
656 | return BAD_VALUE; |
657 | } |
658 | |
659 | StreamIterator s = mStreams.begin(); |
660 | for (; s != mStreams.end(); ++s) { |
661 | if (bufferSet->stream == *s) break; |
662 | } |
663 | if (s == mStreams.end()) { |
664 | ALOGE("%s: Trying to register buffers for a non-configured stream!", |
665 | __FUNCTION__); |
666 | return BAD_VALUE; |
667 | } |
668 | |
669 | /** |
670 | * Register the buffers. This doesn't mean anything to the emulator besides |
671 | * marking them off as registered. |
672 | */ |
673 | |
674 | PrivateStreamInfo *privStream = |
675 | static_cast<PrivateStreamInfo*>((*s)->priv); |
676 | |
677 | #if 0 |
678 | if (privStream->registered) { |
679 | ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__); |
680 | return BAD_VALUE; |
681 | } |
682 | #endif |
683 | |
684 | privStream->registered = true; |
685 | |
686 | return OK; |
687 | } |
688 | |
689 | const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings( |
690 | int type) { |
691 | DBG_LOGB("%s: E", __FUNCTION__); |
692 | Mutex::Autolock l(mLock); |
693 | |
694 | if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) { |
695 | ALOGE("%s: Unknown request settings template: %d", |
696 | __FUNCTION__, type); |
697 | return NULL; |
698 | } |
699 | |
700 | /** |
701 | * Cache is not just an optimization - pointer returned has to live at |
702 | * least as long as the camera device instance does. |
703 | */ |
704 | if (mDefaultTemplates[type] != NULL) { |
705 | return mDefaultTemplates[type]; |
706 | } |
707 | |
708 | CameraMetadata settings; |
709 | |
710 | /** android.request */ |
711 | static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; |
712 | settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); |
713 | |
714 | static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL; |
715 | settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1); |
716 | |
717 | static const int32_t id = 0; |
718 | settings.update(ANDROID_REQUEST_ID, &id, 1); |
719 | |
720 | static const int32_t frameCount = 0; |
721 | settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1); |
722 | |
723 | /** android.lens */ |
724 | |
725 | static const float focusDistance = 0; |
726 | settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1); |
727 | |
728 | static const float aperture = 2.8f; |
729 | settings.update(ANDROID_LENS_APERTURE, &aperture, 1); |
730 | |
731 | // static const float focalLength = 5.0f; |
732 | static const float focalLength = 3.299999952316284f; |
733 | settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1); |
734 | |
735 | static const float filterDensity = 0; |
736 | settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1); |
737 | |
738 | static const uint8_t opticalStabilizationMode = |
739 | ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; |
740 | settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, |
741 | &opticalStabilizationMode, 1); |
742 | |
743 | // FOCUS_RANGE set only in frame |
744 | |
745 | /** android.sensor */ |
746 | |
747 | static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; |
748 | settings.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1); |
749 | static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; |
750 | settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1); |
751 | static const int64_t exposureTime = 10 * MSEC; |
752 | settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1); |
753 | |
754 | int64_t frameDuration = mSensor->getMinFrameDuration(); |
755 | settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1); |
756 | |
757 | static const int32_t sensitivity = 100; |
758 | settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1); |
759 | |
760 | static const int64_t rollingShutterSkew = 0; |
761 | settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1); |
762 | // TIMESTAMP set only in frame |
763 | |
764 | /** android.flash */ |
765 | |
766 | static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE; |
767 | settings.update(ANDROID_FLASH_STATE, &flashstate, 1); |
768 | |
769 | static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; |
770 | settings.update(ANDROID_FLASH_MODE, &flashMode, 1); |
771 | |
772 | static const uint8_t flashPower = 10; |
773 | settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1); |
774 | |
775 | static const int64_t firingTime = 0; |
776 | settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1); |
777 | |
778 | /** Processing block modes */ |
779 | uint8_t hotPixelMode = 0; |
780 | uint8_t demosaicMode = 0; |
781 | uint8_t noiseMode = 0; |
782 | uint8_t shadingMode = 0; |
783 | uint8_t colorMode = 0; |
784 | uint8_t tonemapMode = 0; |
785 | uint8_t edgeMode = 0; |
786 | switch (type) { |
787 | |
788 | case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: |
789 | case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: |
790 | noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY; |
791 | // fall-through |
792 | case CAMERA3_TEMPLATE_STILL_CAPTURE: |
793 | hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY; |
794 | demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY; |
795 | shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY; |
796 | colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY; |
797 | tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY; |
798 | edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY; |
799 | break; |
800 | case CAMERA3_TEMPLATE_PREVIEW: |
801 | // fall-through |
802 | case CAMERA3_TEMPLATE_VIDEO_RECORD: |
803 | // fall-through |
804 | case CAMERA3_TEMPLATE_MANUAL: |
805 | // fall-through |
806 | default: |
807 | hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST; |
808 | demosaicMode = ANDROID_DEMOSAIC_MODE_FAST; |
809 | noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST; |
810 | shadingMode = ANDROID_SHADING_MODE_FAST; |
811 | colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST; |
812 | tonemapMode = ANDROID_TONEMAP_MODE_FAST; |
813 | edgeMode = ANDROID_EDGE_MODE_FAST; |
814 | break; |
815 | } |
816 | settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1); |
817 | settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1); |
818 | settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1); |
819 | settings.update(ANDROID_SHADING_MODE, &shadingMode, 1); |
820 | settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1); |
821 | settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1); |
822 | settings.update(ANDROID_EDGE_MODE, &edgeMode, 1); |
823 | |
824 | /** android.noise */ |
825 | static const uint8_t noiseStrength = 5; |
826 | settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1); |
827 | static uint8_t availableNBModes[] = { |
828 | ANDROID_NOISE_REDUCTION_MODE_OFF, |
829 | ANDROID_NOISE_REDUCTION_MODE_FAST, |
830 | ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY, |
831 | }; |
832 | settings.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, |
833 | availableNBModes, sizeof(availableNBModes)/sizeof(availableNBModes)); |
834 | |
835 | |
836 | /** android.color */ |
837 | static const float colorTransform[9] = { |
838 | 1.0f, 0.f, 0.f, |
839 | 0.f, 1.f, 0.f, |
840 | 0.f, 0.f, 1.f |
841 | }; |
842 | settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9); |
843 | |
844 | /** android.tonemap */ |
845 | static const float tonemapCurve[4] = { |
846 | 0.f, 0.f, |
847 | 1.f, 1.f |
848 | }; |
849 | settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4); |
850 | settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4); |
851 | settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4); |
852 | |
853 | /** android.edge */ |
854 | static const uint8_t edgeStrength = 5; |
855 | settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1); |
856 | |
857 | /** android.scaler */ |
858 | static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; |
859 | settings.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1); |
860 | |
861 | static const int32_t cropRegion[] = { |
862 | 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1], |
863 | }; |
864 | settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4); |
865 | |
866 | /** android.jpeg */ |
867 | static const uint8_t jpegQuality = 80; |
868 | settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1); |
869 | |
870 | static const int32_t thumbnailSize[2] = { |
871 | 160, 120 |
872 | }; |
873 | settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); |
874 | |
875 | static const uint8_t thumbnailQuality = 80; |
876 | settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1); |
877 | |
878 | static const double gpsCoordinates[3] = { |
879 | 0, 0, 0 |
880 | }; |
881 | settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3); //default 2 value |
882 | |
883 | static const uint8_t gpsProcessingMethod[32] = "None"; |
884 | settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32); |
885 | |
886 | static const int64_t gpsTimestamp = 0; |
887 | settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1); |
888 | |
889 | static const int32_t jpegOrientation = 0; |
890 | settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); |
891 | |
892 | /** android.stats */ |
893 | |
894 | static const uint8_t faceDetectMode = |
895 | ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; |
896 | settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1); |
897 | |
898 | static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF; |
899 | settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1); |
900 | |
901 | static const uint8_t sharpnessMapMode = |
902 | ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF; |
903 | settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1); |
904 | |
905 | static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; |
906 | settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1); |
907 | static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; |
908 | settings.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1); |
909 | static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; |
910 | settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1); |
911 | // faceRectangles, faceScores, faceLandmarks, faceIds, histogram, |
912 | // sharpnessMap only in frames |
913 | |
914 | /** android.control */ |
915 | |
916 | uint8_t controlIntent = 0; |
917 | uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; //default value |
918 | uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; |
919 | uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; |
920 | switch (type) { |
921 | case CAMERA3_TEMPLATE_PREVIEW: |
922 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; |
923 | break; |
924 | case CAMERA3_TEMPLATE_STILL_CAPTURE: |
925 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; |
926 | break; |
927 | case CAMERA3_TEMPLATE_VIDEO_RECORD: |
928 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; |
929 | break; |
930 | case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: |
931 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; |
932 | break; |
933 | case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: |
934 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; |
935 | break; |
936 | case CAMERA3_TEMPLATE_MANUAL: |
937 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL; |
938 | controlMode = ANDROID_CONTROL_MODE_OFF; |
939 | aeMode = ANDROID_CONTROL_AE_MODE_OFF; |
940 | awbMode = ANDROID_CONTROL_AWB_MODE_OFF; |
941 | break; |
942 | default: |
943 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; |
944 | break; |
945 | } |
946 | settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); |
947 | settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); |
948 | |
949 | static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; |
950 | settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); |
951 | |
952 | static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; |
953 | settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); |
954 | |
955 | settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); |
956 | |
957 | static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; |
958 | settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); |
959 | |
960 | static const uint8_t aePrecaptureTrigger = |
961 | ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; |
962 | settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1); |
963 | |
964 | static const int32_t mAfTriggerId = 0; |
965 | settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,&mAfTriggerId, 1); |
966 | static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; |
967 | settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1); |
968 | |
969 | static const int32_t controlRegions[5] = { |
970 | 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1], |
971 | 1000 |
972 | }; |
973 | // settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5); |
974 | |
975 | static const int32_t aeExpCompensation = 0; |
976 | settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1); |
977 | |
978 | static const int32_t aeTargetFpsRange[2] = { |
979 | 30, 30 |
980 | }; |
981 | settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2); |
982 | |
983 | static const uint8_t aeAntibandingMode = |
984 | ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; |
985 | settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1); |
986 | |
987 | settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); |
988 | |
989 | static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; |
990 | settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); |
991 | |
992 | // settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5); |
993 | |
994 | uint8_t afMode = 0; |
995 | switch (type) { |
996 | case CAMERA3_TEMPLATE_PREVIEW: |
997 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
998 | break; |
999 | case CAMERA3_TEMPLATE_STILL_CAPTURE: |
1000 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1001 | break; |
1002 | case CAMERA3_TEMPLATE_VIDEO_RECORD: |
1003 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1004 | //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; |
1005 | break; |
1006 | case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: |
1007 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1008 | //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; |
1009 | break; |
1010 | case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: |
1011 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1012 | //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; |
1013 | break; |
1014 | case CAMERA3_TEMPLATE_MANUAL: |
1015 | afMode = ANDROID_CONTROL_AF_MODE_OFF; |
1016 | break; |
1017 | default: |
1018 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1019 | break; |
1020 | } |
1021 | settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1); |
1022 | |
1023 | static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE; |
1024 | settings.update(ANDROID_CONTROL_AF_STATE,&afstate,1); |
1025 | |
1026 | // settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5); |
1027 | |
1028 | static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED; |
1029 | settings.update(ANDROID_CONTROL_AE_STATE,&aestate,1); |
1030 | static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
1031 | settings.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1); |
1032 | static const uint8_t vstabMode = |
1033 | ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; |
1034 | settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1); |
1035 | |
1036 | // aeState, awbState, afState only in frame |
1037 | |
1038 | mDefaultTemplates[type] = settings.release(); |
1039 | |
1040 | return mDefaultTemplates[type]; |
1041 | } |
1042 | |
1043 | status_t EmulatedFakeCamera3::processCaptureRequest( |
1044 | camera3_capture_request *request) { |
1045 | |
1046 | Mutex::Autolock l(mLock); |
1047 | status_t res; |
1048 | |
1049 | /** Validation */ |
1050 | |
1051 | if (mStatus < STATUS_READY) { |
1052 | ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__, |
1053 | mStatus); |
1054 | return INVALID_OPERATION; |
1055 | } |
1056 | |
1057 | if (request == NULL) { |
1058 | ALOGE("%s: NULL request!", __FUNCTION__); |
1059 | return BAD_VALUE; |
1060 | } |
1061 | |
1062 | uint32_t frameNumber = request->frame_number; |
1063 | |
1064 | if (request->settings == NULL && mPrevSettings.isEmpty()) { |
1065 | ALOGE("%s: Request %d: NULL settings for first request after" |
1066 | "configureStreams()", __FUNCTION__, frameNumber); |
1067 | return BAD_VALUE; |
1068 | } |
1069 | |
1070 | if (request->input_buffer != NULL && |
1071 | request->input_buffer->stream != mInputStream) { |
1072 | DBG_LOGB("%s: Request %d: Input buffer not from input stream!", |
1073 | __FUNCTION__, frameNumber); |
1074 | DBG_LOGB("%s: Bad stream %p, expected: %p", |
1075 | __FUNCTION__, request->input_buffer->stream, |
1076 | mInputStream); |
1077 | DBG_LOGB("%s: Bad stream type %d, expected stream type %d", |
1078 | __FUNCTION__, request->input_buffer->stream->stream_type, |
1079 | mInputStream ? mInputStream->stream_type : -1); |
1080 | |
1081 | return BAD_VALUE; |
1082 | } |
1083 | |
1084 | if (request->num_output_buffers < 1 || request->output_buffers == NULL) { |
1085 | ALOGE("%s: Request %d: No output buffers provided!", |
1086 | __FUNCTION__, frameNumber); |
1087 | return BAD_VALUE; |
1088 | } |
1089 | |
1090 | // Validate all buffers, starting with input buffer if it's given |
1091 | |
1092 | ssize_t idx; |
1093 | const camera3_stream_buffer_t *b; |
1094 | if (request->input_buffer != NULL) { |
1095 | idx = -1; |
1096 | b = request->input_buffer; |
1097 | } else { |
1098 | idx = 0; |
1099 | b = request->output_buffers; |
1100 | } |
1101 | do { |
1102 | PrivateStreamInfo *priv = |
1103 | static_cast<PrivateStreamInfo*>(b->stream->priv); |
1104 | if (priv == NULL) { |
1105 | ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!", |
1106 | __FUNCTION__, frameNumber, idx); |
1107 | return BAD_VALUE; |
1108 | } |
1109 | #if 0 |
1110 | if (!priv->alive || !priv->registered) { |
1111 | ALOGE("%s: Request %d: Buffer %zu: Unregistered or dead stream! alive=%d, registered=%d\n", |
1112 | __FUNCTION__, frameNumber, idx, |
1113 | priv->alive, priv->registered); |
1114 | //return BAD_VALUE; |
1115 | } |
1116 | #endif |
1117 | if (b->status != CAMERA3_BUFFER_STATUS_OK) { |
1118 | ALOGE("%s: Request %d: Buffer %zu: Status not OK!", |
1119 | __FUNCTION__, frameNumber, idx); |
1120 | return BAD_VALUE; |
1121 | } |
1122 | if (b->release_fence != -1) { |
1123 | ALOGE("%s: Request %d: Buffer %zu: Has a release fence!", |
1124 | __FUNCTION__, frameNumber, idx); |
1125 | return BAD_VALUE; |
1126 | } |
1127 | if (b->buffer == NULL) { |
1128 | ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!", |
1129 | __FUNCTION__, frameNumber, idx); |
1130 | return BAD_VALUE; |
1131 | } |
1132 | idx++; |
1133 | b = &(request->output_buffers[idx]); |
1134 | } while (idx < (ssize_t)request->num_output_buffers); |
1135 | |
1136 | // TODO: Validate settings parameters |
1137 | |
1138 | /** |
1139 | * Start processing this request |
1140 | */ |
1141 | |
1142 | mStatus = STATUS_ACTIVE; |
1143 | |
1144 | CameraMetadata settings; |
1145 | camera_metadata_entry e; |
1146 | |
1147 | if (request->settings == NULL) { |
1148 | settings.acquire(mPrevSettings); |
1149 | } else { |
1150 | settings = request->settings; |
1151 | |
1152 | uint8_t antiBanding = 0; |
1153 | uint8_t effectMode = 0; |
1154 | int exposureCmp = 0; |
1155 | |
1156 | e = settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE); |
1157 | if (e.count == 0) { |
1158 | ALOGE("%s: No antibanding entry!", __FUNCTION__); |
1159 | return BAD_VALUE; |
1160 | } |
1161 | antiBanding = e.data.u8[0]; |
1162 | mSensor->setAntiBanding(antiBanding); |
1163 | |
1164 | e = settings.find(ANDROID_CONTROL_EFFECT_MODE); |
1165 | if (e.count == 0) { |
1166 | ALOGE("%s: No antibanding entry!", __FUNCTION__); |
1167 | return BAD_VALUE; |
1168 | } |
1169 | effectMode = e.data.u8[0]; |
1170 | mSensor->setEffect(effectMode); |
1171 | |
1172 | |
1173 | e = settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION); |
1174 | if (e.count == 0) { |
1175 | ALOGE("%s: No exposure entry!", __FUNCTION__); |
1176 | //return BAD_VALUE; |
1177 | } else { |
1178 | exposureCmp = e.data.i32[0]; |
1179 | DBG_LOGB("set expsore compensaton %d\n", exposureCmp); |
1180 | mSensor->setExposure(exposureCmp); |
1181 | } |
1182 | |
1183 | int32_t cropRegion[4]; |
1184 | int32_t cropWidth; |
1185 | int32_t outputWidth = request->output_buffers[0].stream->width; |
1186 | |
1187 | e = settings.find(ANDROID_SCALER_CROP_REGION); |
1188 | if (e.count == 0) { |
1189 | ALOGE("%s: No corp region entry!", __FUNCTION__); |
1190 | //return BAD_VALUE; |
1191 | } else { |
1192 | cropRegion[0] = e.data.i32[0]; |
1193 | cropRegion[1] = e.data.i32[1]; |
1194 | cropWidth = cropRegion[2] = e.data.i32[2]; |
1195 | cropRegion[3] = e.data.i32[3]; |
1196 | for (int i = mZoomMin; i <= mZoomMax; i += mZoomStep) { |
1197 | //if ( (float) i / mZoomMin >= (float) outputWidth / cropWidth) { |
1198 | if ( i * cropWidth >= outputWidth * mZoomMin ) { |
1199 | mSensor->setZoom(i); |
1200 | break; |
1201 | } |
1202 | } |
1203 | DBG_LOGB("cropRegion:%d, %d, %d, %d\n", cropRegion[0], cropRegion[1],cropRegion[2],cropRegion[3]); |
1204 | } |
1205 | } |
1206 | |
1207 | uint8_t len[] = {1}; |
1208 | settings.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1); |
1209 | |
1210 | uint8_t maxlen[] = {0}; |
1211 | settings.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1); |
1212 | |
1213 | res = process3A(settings); |
1214 | if (res != OK) { |
1215 | CAMHAL_LOGDB("%s: process3A failed!", __FUNCTION__); |
1216 | //return res; |
1217 | } |
1218 | |
1219 | // TODO: Handle reprocessing |
1220 | |
1221 | /** |
1222 | * Get ready for sensor config |
1223 | */ |
1224 | |
1225 | nsecs_t exposureTime; |
1226 | nsecs_t frameDuration; |
1227 | uint32_t sensitivity; |
1228 | bool needJpeg = false; |
1229 | struct ExifInfo info; |
1230 | ssize_t jpegbuffersize; |
1231 | uint32_t jpegpixelfmt; |
1232 | bool mHaveThumbnail = false; |
1233 | |
1234 | exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; |
1235 | frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; |
1236 | sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; |
1237 | |
1238 | Buffers *sensorBuffers = new Buffers(); |
1239 | HalBufferVector *buffers = new HalBufferVector(); |
1240 | |
1241 | sensorBuffers->setCapacity(request->num_output_buffers); |
1242 | buffers->setCapacity(request->num_output_buffers); |
1243 | |
1244 | // Process all the buffers we got for output, constructing internal buffer |
1245 | // structures for them, and lock them for writing. |
1246 | for (size_t i = 0; i < request->num_output_buffers; i++) { |
1247 | const camera3_stream_buffer &srcBuf = request->output_buffers[i]; |
1248 | const private_handle_t *privBuffer = |
1249 | (const private_handle_t*)(*srcBuf.buffer); |
1250 | StreamBuffer destBuf; |
1251 | destBuf.streamId = kGenericStreamId; |
1252 | destBuf.width = srcBuf.stream->width; |
1253 | destBuf.height = srcBuf.stream->height; |
1254 | destBuf.format = privBuffer->format; // Use real private format |
1255 | destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc |
1256 | destBuf.buffer = srcBuf.buffer; |
1257 | destBuf.share_fd = privBuffer->share_fd; |
1258 | |
1259 | if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) { |
1260 | needJpeg = true; |
1261 | memset(&info,0,sizeof(struct ExifInfo)); |
1262 | info.orientation = settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0]; |
1263 | jpegpixelfmt = mSensor->getOutputFormat(); |
1264 | if (!mSupportRotate) { |
1265 | info.mainwidth = srcBuf.stream->width; |
1266 | info.mainheight = srcBuf.stream->height; |
1267 | } else { |
1268 | if ((info.orientation == 90) || (info.orientation == 270)) { |
1269 | info.mainwidth = srcBuf.stream->height; |
1270 | info.mainheight = srcBuf.stream->width; |
1271 | } else { |
1272 | info.mainwidth = srcBuf.stream->width; |
1273 | info.mainheight = srcBuf.stream->height; |
1274 | } |
1275 | } |
1276 | if ((jpegpixelfmt == V4L2_PIX_FMT_MJPEG)||(jpegpixelfmt == V4L2_PIX_FMT_YUYV)) { |
1277 | mSensor->setOutputFormat(info.mainwidth,info.mainheight,jpegpixelfmt,1); |
1278 | } else { |
1279 | mSensor->setOutputFormat(info.mainwidth,info.mainheight,V4L2_PIX_FMT_RGB24,1); |
1280 | } |
1281 | } |
1282 | |
1283 | // Wait on fence |
1284 | sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence); |
1285 | res = bufferAcquireFence->wait(kFenceTimeoutMs); |
1286 | if (res == TIMED_OUT) { |
1287 | ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms", |
1288 | __FUNCTION__, frameNumber, i, kFenceTimeoutMs); |
1289 | } |
1290 | if (res == OK) { |
1291 | // Lock buffer for writing |
1292 | const Rect rect(destBuf.width, destBuf.height); |
1293 | if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) { |
1294 | if (privBuffer->format == HAL_PIXEL_FORMAT_YCbCr_420_888/*HAL_PIXEL_FORMAT_YCrCb_420_SP*/) { |
1295 | android_ycbcr ycbcr = android_ycbcr(); |
1296 | res = GraphicBufferMapper::get().lockYCbCr( |
1297 | *(destBuf.buffer), |
1298 | GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK |
1299 | , rect, |
1300 | &ycbcr); |
1301 | // This is only valid because we know that emulator's |
1302 | // YCbCr_420_888 is really contiguous NV21 under the hood |
1303 | destBuf.img = static_cast<uint8_t*>(ycbcr.y); |
1304 | } else { |
1305 | ALOGE("Unexpected private format for flexible YUV: 0x%x", |
1306 | privBuffer->format); |
1307 | res = INVALID_OPERATION; |
1308 | } |
1309 | } else { |
1310 | res = GraphicBufferMapper::get().lock(*(destBuf.buffer), |
1311 | GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK |
1312 | , rect, |
1313 | (void**)&(destBuf.img)); |
1314 | } |
1315 | if (res != OK) { |
1316 | ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer", |
1317 | __FUNCTION__, frameNumber, i); |
1318 | } |
1319 | } |
1320 | |
1321 | if (res != OK) { |
1322 | // Either waiting or locking failed. Unlock locked buffers and bail |
1323 | // out. |
1324 | for (size_t j = 0; j < i; j++) { |
1325 | GraphicBufferMapper::get().unlock( |
1326 | *(request->output_buffers[i].buffer)); |
1327 | } |
1328 | ALOGE("line:%d, format for this usage: %d x %d, usage %x, format=%x, returned\n", |
1329 | __LINE__, destBuf.width, destBuf.height, privBuffer->usage, privBuffer->format); |
1330 | return NO_INIT; |
1331 | } |
1332 | |
1333 | sensorBuffers->push_back(destBuf); |
1334 | buffers->push_back(srcBuf); |
1335 | } |
1336 | |
1337 | if (needJpeg) { |
1338 | if (!mSupportRotate) { |
1339 | info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; |
1340 | info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; |
1341 | } else { |
1342 | if ((info.orientation == 90) || (info.orientation == 270)) { |
1343 | info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; |
1344 | info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; |
1345 | } else { |
1346 | info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; |
1347 | info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; |
1348 | } |
1349 | } |
1350 | if (settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { |
1351 | info.latitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[0]; |
1352 | info.longitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[1]; |
1353 | info.altitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[2]; |
1354 | info.has_latitude = true; |
1355 | info.has_longitude = true; |
1356 | info.has_altitude = true; |
1357 | } else { |
1358 | info.has_latitude = false; |
1359 | info.has_longitude = false; |
1360 | info.has_altitude = false; |
1361 | } |
1362 | if (settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { |
1363 | info.gpsProcessingMethod = settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8; |
1364 | info.has_gpsProcessingMethod = true; |
1365 | } else { |
1366 | info.has_gpsProcessingMethod = false; |
1367 | } |
1368 | if (settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { |
1369 | info.gpsTimestamp = settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0]; |
1370 | info.has_gpsTimestamp = true; |
1371 | } else { |
1372 | info.has_gpsTimestamp = false; |
1373 | } |
1374 | if (settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { |
1375 | info.focallen = settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; |
1376 | info.has_focallen = true; |
1377 | } else { |
1378 | info.has_focallen = false; |
1379 | } |
1380 | jpegbuffersize = getJpegBufferSize(info.mainwidth,info.mainheight); |
1381 | |
1382 | mJpegCompressor->SetMaxJpegBufferSize(jpegbuffersize); |
1383 | mJpegCompressor->SetExifInfo(info); |
1384 | mSensor->setPictureRotate(info.orientation); |
1385 | if ((info.thumbwidth > 0) && (info.thumbheight > 0)) { |
1386 | mHaveThumbnail = true; |
1387 | } |
1388 | DBG_LOGB("%s::thumbnailSize_width=%d,thumbnailSize_height=%d,mainsize_width=%d,mainsize_height=%d,jpegOrientation=%d",__FUNCTION__, |
1389 | info.thumbwidth,info.thumbheight,info.mainwidth,info.mainheight,info.orientation); |
1390 | } |
1391 | /** |
1392 | * Wait for JPEG compressor to not be busy, if needed |
1393 | */ |
1394 | #if 0 |
1395 | if (needJpeg) { |
1396 | bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs); |
1397 | if (!ready) { |
1398 | ALOGE("%s: Timeout waiting for JPEG compression to complete!", |
1399 | __FUNCTION__); |
1400 | return NO_INIT; |
1401 | } |
1402 | } |
1403 | #else |
1404 | while (needJpeg) { |
1405 | bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs); |
1406 | if (ready) { |
1407 | break; |
1408 | } |
1409 | } |
1410 | #endif |
1411 | /** |
1412 | * Wait until the in-flight queue has room |
1413 | */ |
1414 | res = mReadoutThread->waitForReadout(); |
1415 | if (res != OK) { |
1416 | ALOGE("%s: Timeout waiting for previous requests to complete!", |
1417 | __FUNCTION__); |
1418 | return NO_INIT; |
1419 | } |
1420 | |
1421 | /** |
1422 | * Wait until sensor's ready. This waits for lengthy amounts of time with |
1423 | * mLock held, but the interface spec is that no other calls may by done to |
1424 | * the HAL by the framework while process_capture_request is happening. |
1425 | */ |
1426 | int syncTimeoutCount = 0; |
1427 | while(!mSensor->waitForVSync(kSyncWaitTimeout)) { |
1428 | if (mStatus == STATUS_ERROR) { |
1429 | return NO_INIT; |
1430 | } |
1431 | if (syncTimeoutCount == kMaxSyncTimeoutCount) { |
1432 | ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms", |
1433 | __FUNCTION__, frameNumber, |
1434 | kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000); |
1435 | return NO_INIT; |
1436 | } |
1437 | syncTimeoutCount++; |
1438 | } |
1439 | |
1440 | /** |
1441 | * Configure sensor and queue up the request to the readout thread |
1442 | */ |
1443 | mSensor->setExposureTime(exposureTime); |
1444 | mSensor->setFrameDuration(frameDuration); |
1445 | mSensor->setSensitivity(sensitivity); |
1446 | mSensor->setDestinationBuffers(sensorBuffers); |
1447 | mSensor->setFrameNumber(request->frame_number); |
1448 | |
1449 | ReadoutThread::Request r; |
1450 | r.frameNumber = request->frame_number; |
1451 | r.settings = settings; |
1452 | r.sensorBuffers = sensorBuffers; |
1453 | r.buffers = buffers; |
1454 | r.havethumbnail = mHaveThumbnail; |
1455 | |
1456 | mReadoutThread->queueCaptureRequest(r); |
1457 | ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number); |
1458 | |
1459 | // Cache the settings for next time |
1460 | mPrevSettings.acquire(settings); |
1461 | |
1462 | return OK; |
1463 | } |
1464 | |
1465 | /** Debug methods */ |
1466 | |
1467 | void EmulatedFakeCamera3::dump(int fd) { |
1468 | |
1469 | String8 result; |
1470 | uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]); |
1471 | result = String8::format("%s, valid resolution\n", __FILE__); |
1472 | |
1473 | for (uint32_t f = 0; f < count; f+=2) { |
1474 | if (mAvailableJpegSize[f] == 0) |
1475 | break; |
1476 | result.appendFormat("width: %d , height =%d\n", |
1477 | mAvailableJpegSize[f], mAvailableJpegSize[f+1]); |
1478 | } |
1479 | result.appendFormat("\nmZoomMin: %d , mZoomMax =%d, mZoomStep=%d\n", |
1480 | mZoomMin, mZoomMax, mZoomStep); |
1481 | |
1482 | if (mZoomStep <= 0) { |
1483 | result.appendFormat("!!!!!!!!!camera apk may have no picture out\n"); |
1484 | } |
1485 | |
1486 | write(fd, result.string(), result.size()); |
1487 | |
1488 | if (mSensor.get() != NULL) { |
1489 | mSensor->dump(fd); |
1490 | } |
1491 | |
1492 | } |
1493 | //flush all request |
1494 | //TODO returned buffers every request held immediately with |
1495 | //CAMERA3_BUFFER_STATUS_ERROR flag. |
1496 | int EmulatedFakeCamera3::flush_all_requests() { |
1497 | DBG_LOGA("flush all request"); |
1498 | return 0; |
1499 | } |
1500 | /** Tag query methods */ |
1501 | const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) { |
1502 | return NULL; |
1503 | } |
1504 | |
1505 | const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) { |
1506 | return NULL; |
1507 | } |
1508 | |
1509 | int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) { |
1510 | return 0; |
1511 | } |
1512 | |
1513 | /** |
1514 | * Private methods |
1515 | */ |
1516 | |
1517 | camera_metadata_ro_entry_t EmulatedFakeCamera3::staticInfo(const CameraMetadata *info, uint32_t tag, |
1518 | size_t minCount, size_t maxCount, bool required) const { |
1519 | |
1520 | camera_metadata_ro_entry_t entry = info->find(tag); |
1521 | |
1522 | if (CC_UNLIKELY( entry.count == 0 ) && required) { |
1523 | const char* tagSection = get_camera_metadata_section_name(tag); |
1524 | if (tagSection == NULL) tagSection = "<unknown>"; |
1525 | const char* tagName = get_camera_metadata_tag_name(tag); |
1526 | if (tagName == NULL) tagName = "<unknown>"; |
1527 | |
1528 | ALOGE("Error finding static metadata entry '%s.%s' (%x)", |
1529 | tagSection, tagName, tag); |
1530 | } else if (CC_UNLIKELY( |
1531 | (minCount != 0 && entry.count < minCount) || |
1532 | (maxCount != 0 && entry.count > maxCount) ) ) { |
1533 | const char* tagSection = get_camera_metadata_section_name(tag); |
1534 | if (tagSection == NULL) tagSection = "<unknown>"; |
1535 | const char* tagName = get_camera_metadata_tag_name(tag); |
1536 | if (tagName == NULL) tagName = "<unknown>"; |
1537 | ALOGE("Malformed static metadata entry '%s.%s' (%x):" |
1538 | "Expected between %zu and %zu values, but got %zu values", |
1539 | tagSection, tagName, tag, minCount, maxCount, entry.count); |
1540 | } |
1541 | |
1542 | return entry; |
1543 | } |
1544 | |
1545 | //this is only for debug |
1546 | void EmulatedFakeCamera3::getStreamConfigurationp(CameraMetadata *info) { |
1547 | const int STREAM_CONFIGURATION_SIZE = 4; |
1548 | const int STREAM_FORMAT_OFFSET = 0; |
1549 | const int STREAM_WIDTH_OFFSET = 1; |
1550 | const int STREAM_HEIGHT_OFFSET = 2; |
1551 | const int STREAM_IS_INPUT_OFFSET = 3; |
1552 | |
1553 | camera_metadata_ro_entry_t availableStreamConfigs = |
1554 | staticInfo(info, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS); |
1555 | CAMHAL_LOGDB(" stream, availableStreamConfigs.count=%d\n", availableStreamConfigs.count); |
1556 | |
1557 | for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) { |
1558 | int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET]; |
1559 | int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET]; |
1560 | int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET]; |
1561 | int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET]; |
1562 | CAMHAL_LOGDB("f=%x, w*h=%dx%d, du=%d\n", format, width, height, isInput); |
1563 | } |
1564 | |
1565 | } |
1566 | |
1567 | //this is only for debug |
1568 | void EmulatedFakeCamera3::getStreamConfigurationDurations(CameraMetadata *info) { |
1569 | const int STREAM_CONFIGURATION_SIZE = 4; |
1570 | const int STREAM_FORMAT_OFFSET = 0; |
1571 | const int STREAM_WIDTH_OFFSET = 1; |
1572 | const int STREAM_HEIGHT_OFFSET = 2; |
1573 | const int STREAM_IS_INPUT_OFFSET = 3; |
1574 | |
1575 | camera_metadata_ro_entry_t availableStreamConfigs = |
1576 | staticInfo(info, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS); |
1577 | CAMHAL_LOGDB("availableStreamConfigs.count=%d\n", availableStreamConfigs.count); |
1578 | |
1579 | for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) { |
1580 | int64_t format = availableStreamConfigs.data.i64[i + STREAM_FORMAT_OFFSET]; |
1581 | int64_t width = availableStreamConfigs.data.i64[i + STREAM_WIDTH_OFFSET]; |
1582 | int64_t height = availableStreamConfigs.data.i64[i + STREAM_HEIGHT_OFFSET]; |
1583 | int64_t isInput = availableStreamConfigs.data.i64[i + STREAM_IS_INPUT_OFFSET]; |
1584 | CAMHAL_LOGDB("f=%llx, w*h=%lldx%lld, du=%lld\n", format, width, height, isInput); |
1585 | } |
1586 | } |
1587 | |
1588 | void EmulatedFakeCamera3::updateCameraMetaData(CameraMetadata *info) { |
1589 | |
1590 | } |
1591 | |
1592 | status_t EmulatedFakeCamera3::constructStaticInfo() { |
1593 | |
1594 | status_t ret = OK; |
1595 | CameraMetadata info; |
1596 | uint32_t picSizes[64 * 8]; |
1597 | int64_t* duration = NULL; |
1598 | int count, duration_count, availablejpegsize; |
1599 | uint8_t maxCount = 10; |
1600 | char property[PROPERTY_VALUE_MAX]; |
1601 | unsigned int supportrotate; |
1602 | availablejpegsize = ARRAY_SIZE(mAvailableJpegSize); |
1603 | memset(mAvailableJpegSize,0,(sizeof(uint32_t))*availablejpegsize); |
1604 | sp<Sensor> s = new Sensor(); |
1605 | ret = s->startUp(mCameraID); |
1606 | if (ret != OK) { |
1607 | DBG_LOGA("sensor start up failed"); |
1608 | return ret; |
1609 | } |
1610 | |
1611 | mSensorType = s->getSensorType(); |
1612 | |
1613 | if ( mSensorType == SENSOR_USB) { |
1614 | char property[PROPERTY_VALUE_MAX]; |
1615 | property_get("rw.camera.usb.faceback", property, "false"); |
1616 | if (strstr(property, "true")) |
1617 | mFacingBack = 1; |
1618 | else |
1619 | mFacingBack = 0; |
1620 | ALOGI("Setting usb camera cameraID:%d to back camera:%s\n", |
1621 | mCameraID, property); |
1622 | } else { |
1623 | if (s->mSensorFace == SENSOR_FACE_FRONT) { |
1624 | mFacingBack = 0; |
1625 | } else if (s->mSensorFace == SENSOR_FACE_BACK) { |
1626 | mFacingBack = 1; |
1627 | } else if (s->mSensorFace == SENSOR_FACE_NONE) { |
1628 | if (gEmulatedCameraFactory.getEmulatedCameraNum() == 1) { |
1629 | mFacingBack = 1; |
1630 | } else if ( mCameraID == 0) { |
1631 | mFacingBack = 1; |
1632 | } else { |
1633 | mFacingBack = 0; |
1634 | } |
1635 | } |
1636 | |
1637 | ALOGI("Setting on board camera cameraID:%d to back camera:%d[0 false, 1 true]\n", |
1638 | mCameraID, mFacingBack); |
1639 | } |
1640 | |
1641 | mSupportCap = s->IoctlStateProbe(); |
1642 | if (mSupportCap & IOCTL_MASK_ROTATE) { |
1643 | supportrotate = true; |
1644 | } else { |
1645 | supportrotate = false; |
1646 | } |
1647 | // android.lens |
1648 | |
1649 | // 5 cm min focus distance for back camera, infinity (fixed focus) for front |
1650 | // TODO read this ioctl from camera driver |
1651 | DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo); |
1652 | const float minFocusDistance = 0.0; |
1653 | info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, |
1654 | &minFocusDistance, 1); |
1655 | |
1656 | // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front |
1657 | const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0; |
1658 | info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, |
1659 | &minFocusDistance, 1); |
1660 | |
1661 | static const float focalLength = 3.30f; // mm |
1662 | info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, |
1663 | &focalLength, 1); |
1664 | static const float aperture = 2.8f; |
1665 | info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, |
1666 | &aperture, 1); |
1667 | static const float filterDensity = 0; |
1668 | info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, |
1669 | &filterDensity, 1); |
1670 | static const uint8_t availableOpticalStabilization = |
1671 | ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; |
1672 | info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, |
1673 | &availableOpticalStabilization, 1); |
1674 | |
1675 | static const int32_t lensShadingMapSize[] = {1, 1}; |
1676 | info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize, |
1677 | sizeof(lensShadingMapSize)/sizeof(int32_t)); |
1678 | |
1679 | uint8_t lensFacing = mFacingBack ? |
1680 | ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; |
1681 | info.update(ANDROID_LENS_FACING, &lensFacing, 1); |
1682 | |
1683 | float lensPosition[3]; |
1684 | if (mFacingBack) { |
1685 | // Back-facing camera is center-top on device |
1686 | lensPosition[0] = 0; |
1687 | lensPosition[1] = 20; |
1688 | lensPosition[2] = -5; |
1689 | } else { |
1690 | // Front-facing camera is center-right on device |
1691 | lensPosition[0] = 20; |
1692 | lensPosition[1] = 20; |
1693 | lensPosition[2] = 0; |
1694 | } |
1695 | info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/ |
1696 | sizeof(float)); |
1697 | static const uint8_t lensCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED; |
1698 | info.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,&lensCalibration,1); |
1699 | |
1700 | // android.sensor |
1701 | |
1702 | static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; |
1703 | info.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1); |
1704 | static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; |
1705 | info.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1); |
1706 | info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, |
1707 | Sensor::kExposureTimeRange, 2); |
1708 | |
1709 | info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, |
1710 | &Sensor::kFrameDurationRange[1], 1); |
1711 | |
1712 | info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, |
1713 | Sensor::kSensitivityRange, |
1714 | sizeof(Sensor::kSensitivityRange) |
1715 | /sizeof(int32_t)); |
1716 | |
1717 | info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, |
1718 | &Sensor::kColorFilterArrangement, 1); |
1719 | |
1720 | static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm |
1721 | info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, |
1722 | sensorPhysicalSize, 2); |
1723 | |
1724 | info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, |
1725 | (int32_t*)Sensor::kResolution, 2); |
1726 | |
1727 | //(int32_t*)Sensor::kResolution, 2); |
1728 | |
1729 | info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, |
1730 | (int32_t*)&Sensor::kMaxRawValue, 1); |
1731 | |
1732 | static const int32_t blackLevelPattern[4] = { |
1733 | (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel, |
1734 | (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel |
1735 | }; |
1736 | info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, |
1737 | blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t)); |
1738 | |
1739 | static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN; |
1740 | info.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1); |
1741 | if (mSensorType == SENSOR_USB) { |
1742 | if (mFacingBack) { |
1743 | property_get("hw.camera.orientation.back", property, "0"); |
1744 | } else { |
1745 | property_get("hw.camera.orientation.front", property, "0"); |
1746 | } |
1747 | int32_t orientation = atoi(property); |
1748 | property_get("hw.camera.usb.orientation_offset", property, "0"); |
1749 | orientation += atoi(property); |
1750 | orientation %= 360; |
1751 | info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1); |
1752 | } else { |
1753 | if (mFacingBack) { |
1754 | property_get("hw.camera.orientation.back", property, "270"); |
1755 | const int32_t orientation = atoi(property); |
1756 | info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1); |
1757 | } else { |
1758 | property_get("hw.camera.orientation.front", property, "90"); |
1759 | const int32_t orientation = atoi(property); |
1760 | info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1); |
1761 | } |
1762 | } |
1763 | |
1764 | static const int64_t rollingShutterSkew = 0; |
1765 | info.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1); |
1766 | |
1767 | //TODO: sensor color calibration fields |
1768 | |
1769 | // android.flash |
1770 | static const uint8_t flashAvailable = 0; |
1771 | info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1); |
1772 | |
1773 | static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE; |
1774 | info.update(ANDROID_FLASH_STATE, &flashstate, 1); |
1775 | |
1776 | static const int64_t flashChargeDuration = 0; |
1777 | info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1); |
1778 | |
1779 | /** android.noise */ |
1780 | static const uint8_t availableNBModes = ANDROID_NOISE_REDUCTION_MODE_OFF; |
1781 | info.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, &availableNBModes, 1); |
1782 | |
1783 | // android.tonemap |
1784 | |
1785 | static const int32_t tonemapCurvePoints = 128; |
1786 | info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1); |
1787 | |
1788 | // android.scaler |
1789 | |
1790 | static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; |
1791 | info.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1); |
1792 | |
1793 | info.update(ANDROID_SCALER_AVAILABLE_FORMATS, |
1794 | kAvailableFormats, |
1795 | sizeof(kAvailableFormats)/sizeof(int32_t)); |
1796 | |
1797 | info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, |
1798 | (int64_t*)kAvailableRawMinDurations, |
1799 | sizeof(kAvailableRawMinDurations)/sizeof(uint64_t)); |
1800 | |
1801 | //for version 3.2 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS |
1802 | count = sizeof(picSizes)/sizeof(picSizes[0]); |
1803 | count = s->getStreamConfigurations(picSizes, kAvailableFormats, count); |
1804 | |
1805 | info.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, |
1806 | (int32_t*)picSizes, count); |
1807 | |
1808 | if (count < availablejpegsize) { |
1809 | availablejpegsize = count; |
1810 | } |
1811 | getValidJpegSize(picSizes,mAvailableJpegSize,availablejpegsize); |
1812 | |
1813 | maxJpegResolution = getMaxJpegResolution(picSizes,count); |
1814 | int32_t full_size[4]; |
1815 | if (mFacingBack) { |
1816 | full_size[0] = 0; |
1817 | full_size[1] = 0; |
1818 | full_size[2] = maxJpegResolution.width; |
1819 | full_size[3] = maxJpegResolution.height; |
1820 | } else { |
1821 | full_size[0] = 0; |
1822 | full_size[1] = 0; |
1823 | full_size[2] = maxJpegResolution.width; |
1824 | full_size[3] = maxJpegResolution.height; |
1825 | } |
1826 | info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, |
1827 | (int32_t*)full_size, |
1828 | sizeof(full_size)/sizeof(full_size[0])); |
1829 | duration = new int64_t[count]; |
1830 | if (duration == NULL) { |
1831 | DBG_LOGA("allocate memory for duration failed"); |
1832 | return NO_MEMORY; |
1833 | } else { |
1834 | memset(duration,0,sizeof(int64_t)*count); |
1835 | } |
1836 | duration_count = s->getStreamConfigurationDurations(picSizes, duration , count); |
1837 | |
1838 | info.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, |
1839 | duration, duration_count); |
1840 | info.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, |
1841 | duration, duration_count); |
1842 | |
1843 | info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, |
1844 | (int64_t*)kAvailableProcessedMinDurations, |
1845 | sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t)); |
1846 | |
1847 | info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, |
1848 | (int64_t*)kAvailableJpegMinDurations, |
1849 | sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t)); |
1850 | |
1851 | |
1852 | // android.jpeg |
1853 | |
1854 | static const int32_t jpegThumbnailSizes[] = { |
1855 | 0, 0, |
1856 | 160, 120, |
1857 | 320, 240 |
1858 | }; |
1859 | info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, |
1860 | jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t)); |
1861 | |
1862 | static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize; |
1863 | info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1); |
1864 | |
1865 | // android.stats |
1866 | |
1867 | static const uint8_t availableFaceDetectModes[] = { |
1868 | ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, |
1869 | ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, |
1870 | ANDROID_STATISTICS_FACE_DETECT_MODE_FULL |
1871 | }; |
1872 | |
1873 | info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, |
1874 | availableFaceDetectModes, |
1875 | sizeof(availableFaceDetectModes)); |
1876 | |
1877 | static const int32_t maxFaceCount = 8; |
1878 | info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, |
1879 | &maxFaceCount, 1); |
1880 | |
1881 | static const int32_t histogramSize = 64; |
1882 | info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, |
1883 | &histogramSize, 1); |
1884 | |
1885 | static const int32_t maxHistogramCount = 1000; |
1886 | info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, |
1887 | &maxHistogramCount, 1); |
1888 | |
1889 | static const int32_t sharpnessMapSize[2] = {64, 64}; |
1890 | info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, |
1891 | sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t)); |
1892 | |
1893 | static const int32_t maxSharpnessMapValue = 1000; |
1894 | info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, |
1895 | &maxSharpnessMapValue, 1); |
1896 | static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; |
1897 | info.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1); |
1898 | |
1899 | static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; |
1900 | info.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1); |
1901 | static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; |
1902 | info.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1); |
1903 | // android.control |
1904 | |
1905 | static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; |
1906 | info.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); |
1907 | |
1908 | static const uint8_t availableSceneModes[] = { |
1909 | // ANDROID_CONTROL_SCENE_MODE_DISABLED, |
1910 | ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY |
1911 | }; |
1912 | info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, |
1913 | availableSceneModes, sizeof(availableSceneModes)); |
1914 | |
1915 | static const uint8_t availableEffects[] = { |
1916 | ANDROID_CONTROL_EFFECT_MODE_OFF |
1917 | }; |
1918 | info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, |
1919 | availableEffects, sizeof(availableEffects)); |
1920 | |
1921 | static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0}; |
1922 | info.update(ANDROID_CONTROL_MAX_REGIONS, |
1923 | max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0])); |
1924 | |
1925 | static const uint8_t availableAeModes[] = { |
1926 | ANDROID_CONTROL_AE_MODE_OFF, |
1927 | ANDROID_CONTROL_AE_MODE_ON |
1928 | }; |
1929 | info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, |
1930 | availableAeModes, sizeof(availableAeModes)); |
1931 | |
1932 | |
1933 | static const int32_t availableTargetFpsRanges[] = { |
1934 | 5, 15, 15, 15, 5, 30, 30, 30, |
1935 | }; |
1936 | info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, |
1937 | availableTargetFpsRanges, |
1938 | sizeof(availableTargetFpsRanges)/sizeof(int32_t)); |
1939 | |
1940 | uint8_t awbModes[maxCount]; |
1941 | count = s->getAWB(awbModes, maxCount); |
1942 | if (count < 0) { |
1943 | static const uint8_t availableAwbModes[] = { |
1944 | ANDROID_CONTROL_AWB_MODE_OFF, |
1945 | ANDROID_CONTROL_AWB_MODE_AUTO, |
1946 | ANDROID_CONTROL_AWB_MODE_INCANDESCENT, |
1947 | ANDROID_CONTROL_AWB_MODE_FLUORESCENT, |
1948 | ANDROID_CONTROL_AWB_MODE_DAYLIGHT, |
1949 | ANDROID_CONTROL_AWB_MODE_SHADE |
1950 | }; |
1951 | info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, |
1952 | availableAwbModes, sizeof(availableAwbModes)); |
1953 | } else { |
1954 | DBG_LOGB("getAWB %d ",count); |
1955 | info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, |
1956 | awbModes, count); |
1957 | } |
1958 | |
1959 | static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE; |
1960 | info.update(ANDROID_CONTROL_AF_STATE,&afstate,1); |
1961 | |
1962 | static const uint8_t availableAfModesFront[] = { |
1963 | ANDROID_CONTROL_AF_MODE_OFF |
1964 | }; |
1965 | |
1966 | if (mFacingBack) { |
1967 | uint8_t afMode[maxCount]; |
1968 | count = s->getAutoFocus(afMode, maxCount); |
1969 | if (count < 0) { |
1970 | static const uint8_t availableAfModesBack[] = { |
1971 | ANDROID_CONTROL_AF_MODE_OFF, |
1972 | //ANDROID_CONTROL_AF_MODE_AUTO, |
1973 | //ANDROID_CONTROL_AF_MODE_MACRO, |
1974 | //ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, |
1975 | //ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, |
1976 | }; |
1977 | |
1978 | info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, |
1979 | availableAfModesBack, sizeof(availableAfModesBack)); |
1980 | } else { |
1981 | info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, |
1982 | afMode, count); |
1983 | } |
1984 | } else { |
1985 | info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, |
1986 | availableAfModesFront, sizeof(availableAfModesFront)); |
1987 | } |
1988 | |
1989 | uint8_t antiBanding[maxCount]; |
1990 | count = s->getAntiBanding(antiBanding, maxCount); |
1991 | if (count < 0) { |
1992 | static const uint8_t availableAntibanding[] = { |
1993 | ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, |
1994 | ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, |
1995 | }; |
1996 | info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, |
1997 | availableAntibanding, sizeof(availableAntibanding)); |
1998 | } else { |
1999 | info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, |
2000 | antiBanding, count); |
2001 | } |
2002 | |
2003 | camera_metadata_rational step; |
2004 | int maxExp, minExp, def; |
2005 | ret = s->getExposure(&maxExp, &minExp, &def, &step); |
2006 | if (ret < 0) { |
2007 | static const int32_t aeExpCompensation = 0; |
2008 | info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1); |
2009 | |
2010 | static const camera_metadata_rational exposureCompensationStep = { |
2011 | 1, 3 |
2012 | }; |
2013 | info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, |
2014 | &exposureCompensationStep, 1); |
2015 | |
2016 | int32_t exposureCompensationRange[] = {0, 0}; |
2017 | info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, |
2018 | exposureCompensationRange, |
2019 | sizeof(exposureCompensationRange)/sizeof(int32_t)); |
2020 | } else { |
2021 | DBG_LOGB("exposure compensation support:(%d, %d)\n", minExp, maxExp); |
2022 | int32_t exposureCompensationRange[] = {minExp, maxExp}; |
2023 | info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, |
2024 | exposureCompensationRange, |
2025 | sizeof(exposureCompensationRange)/sizeof(int32_t)); |
2026 | info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, |
2027 | &step, 1); |
2028 | info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &def, 1); |
2029 | } |
2030 | |
2031 | ret = s->getZoom(&mZoomMin, &mZoomMax, &mZoomStep); |
2032 | if (ret < 0) { |
2033 | float maxZoom = 1.0; |
2034 | info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, |
2035 | &maxZoom, 1); |
2036 | } else { |
2037 | float maxZoom = mZoomMax / mZoomMin; |
2038 | info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, |
2039 | &maxZoom, 1); |
2040 | } |
2041 | |
2042 | static const uint8_t availableVstabModes[] = { |
2043 | ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF |
2044 | }; |
2045 | info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, |
2046 | availableVstabModes, sizeof(availableVstabModes)); |
2047 | |
2048 | static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED; |
2049 | info.update(ANDROID_CONTROL_AE_STATE,&aestate,1); |
2050 | static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
2051 | info.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1); |
2052 | // android.info |
2053 | const uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; |
2054 | //mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL : |
2055 | // ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; |
2056 | info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, |
2057 | &supportedHardwareLevel, |
2058 | /*count*/1); |
2059 | |
2060 | int32_t android_sync_max_latency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN; |
2061 | info.update(ANDROID_SYNC_MAX_LATENCY, &android_sync_max_latency, 1); |
2062 | |
2063 | uint8_t len[] = {1}; |
2064 | info.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1); |
2065 | |
2066 | uint8_t maxlen[] = {2}; |
2067 | info.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1); |
2068 | uint8_t cap[] = { |
2069 | ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE, |
2070 | }; |
2071 | info.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, |
2072 | (uint8_t *)cap, sizeof(cap)/sizeof(cap[0])); |
2073 | |
2074 | |
2075 | int32_t partialResultCount = 1; |
2076 | info.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,&partialResultCount,1); |
2077 | int32_t maxNumOutputStreams[3] = {0,2,1}; |
2078 | info.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,maxNumOutputStreams,3); |
2079 | uint8_t aberrationMode[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF}; |
2080 | info.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, |
2081 | aberrationMode, 1); |
2082 | info.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, |
2083 | aberrationMode, 1); |
2084 | |
2085 | getAvailableChKeys(&info, supportedHardwareLevel); |
2086 | |
2087 | if (mCameraInfo != NULL) { |
2088 | CAMHAL_LOGDA("mCameraInfo is not null, mem leak?"); |
2089 | } |
2090 | mCameraInfo = info.release(); |
2091 | DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo); |
2092 | |
2093 | if (duration != NULL) { |
2094 | delete [] duration; |
2095 | } |
2096 | |
2097 | s->shutDown(); |
2098 | s.clear(); |
2099 | mPlugged = true; |
2100 | |
2101 | return OK; |
2102 | } |
2103 | |
2104 | status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) { |
2105 | /** |
2106 | * Extract top-level 3A controls |
2107 | */ |
2108 | status_t res; |
2109 | |
2110 | bool facePriority = false; |
2111 | |
2112 | camera_metadata_entry e; |
2113 | |
2114 | e = settings.find(ANDROID_CONTROL_MODE); |
2115 | if (e.count == 0) { |
2116 | ALOGE("%s: No control mode entry!", __FUNCTION__); |
2117 | return BAD_VALUE; |
2118 | } |
2119 | uint8_t controlMode = e.data.u8[0]; |
2120 | |
2121 | e = settings.find(ANDROID_CONTROL_SCENE_MODE); |
2122 | if (e.count == 0) { |
2123 | ALOGE("%s: No scene mode entry!", __FUNCTION__); |
2124 | return BAD_VALUE; |
2125 | } |
2126 | uint8_t sceneMode = e.data.u8[0]; |
2127 | |
2128 | if (controlMode == ANDROID_CONTROL_MODE_OFF) { |
2129 | mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; |
2130 | mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; |
2131 | mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
2132 | update3A(settings); |
2133 | return OK; |
2134 | } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) { |
2135 | switch(sceneMode) { |
2136 | case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY: |
2137 | mFacePriority = true; |
2138 | break; |
2139 | default: |
2140 | ALOGE("%s: Emulator doesn't support scene mode %d", |
2141 | __FUNCTION__, sceneMode); |
2142 | return BAD_VALUE; |
2143 | } |
2144 | } else { |
2145 | mFacePriority = false; |
2146 | } |
2147 | |
2148 | // controlMode == AUTO or sceneMode = FACE_PRIORITY |
2149 | // Process individual 3A controls |
2150 | |
2151 | res = doFakeAE(settings); |
2152 | if (res != OK) return res; |
2153 | |
2154 | res = doFakeAF(settings); |
2155 | if (res != OK) return res; |
2156 | |
2157 | res = doFakeAWB(settings); |
2158 | if (res != OK) return res; |
2159 | |
2160 | update3A(settings); |
2161 | return OK; |
2162 | } |
2163 | |
2164 | status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) { |
2165 | camera_metadata_entry e; |
2166 | |
2167 | e = settings.find(ANDROID_CONTROL_AE_MODE); |
2168 | if (e.count == 0) { |
2169 | ALOGE("%s: No AE mode entry!", __FUNCTION__); |
2170 | return BAD_VALUE; |
2171 | } |
2172 | uint8_t aeMode = e.data.u8[0]; |
2173 | |
2174 | switch (aeMode) { |
2175 | case ANDROID_CONTROL_AE_MODE_OFF: |
2176 | // AE is OFF |
2177 | mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; |
2178 | return OK; |
2179 | case ANDROID_CONTROL_AE_MODE_ON: |
2180 | // OK for AUTO modes |
2181 | break; |
2182 | default: |
2183 | ALOGE("%s: Emulator doesn't support AE mode %d", |
2184 | __FUNCTION__, aeMode); |
2185 | return BAD_VALUE; |
2186 | } |
2187 | |
2188 | e = settings.find(ANDROID_CONTROL_AE_LOCK); |
2189 | if (e.count == 0) { |
2190 | ALOGE("%s: No AE lock entry!", __FUNCTION__); |
2191 | return BAD_VALUE; |
2192 | } |
2193 | bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON); |
2194 | |
2195 | e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER); |
2196 | bool precaptureTrigger = false; |
2197 | if (e.count != 0) { |
2198 | precaptureTrigger = |
2199 | (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START); |
2200 | } |
2201 | |
2202 | if (precaptureTrigger) { |
2203 | ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger); |
2204 | } else if (e.count > 0) { |
2205 | ALOGV("%s: Pre capture trigger was present? %zu", |
2206 | __FUNCTION__, |
2207 | e.count); |
2208 | } |
2209 | |
2210 | if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) { |
2211 | // Run precapture sequence |
2212 | if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) { |
2213 | mAeCounter = 0; |
2214 | } |
2215 | |
2216 | if (mFacePriority) { |
2217 | mAeTargetExposureTime = kFacePriorityExposureTime; |
2218 | } else { |
2219 | mAeTargetExposureTime = kNormalExposureTime; |
2220 | } |
2221 | |
2222 | if (mAeCounter > kPrecaptureMinFrames && |
2223 | (mAeTargetExposureTime - mAeCurrentExposureTime) < |
2224 | mAeTargetExposureTime / 10) { |
2225 | // Done with precapture |
2226 | mAeCounter = 0; |
2227 | mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED : |
2228 | ANDROID_CONTROL_AE_STATE_CONVERGED; |
2229 | } else { |
2230 | // Converge some more |
2231 | mAeCurrentExposureTime += |
2232 | (mAeTargetExposureTime - mAeCurrentExposureTime) * |
2233 | kExposureTrackRate; |
2234 | mAeCounter++; |
2235 | mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE; |
2236 | } |
2237 | |
2238 | } else if (!aeLocked) { |
2239 | // Run standard occasional AE scan |
2240 | switch (mAeState) { |
2241 | case ANDROID_CONTROL_AE_STATE_CONVERGED: |
2242 | case ANDROID_CONTROL_AE_STATE_INACTIVE: |
2243 | mAeCounter++; |
2244 | if (mAeCounter > kStableAeMaxFrames) { |
2245 | mAeTargetExposureTime = |
2246 | mFacePriority ? kFacePriorityExposureTime : |
2247 | kNormalExposureTime; |
2248 | float exposureStep = ((double)rand() / RAND_MAX) * |
2249 | (kExposureWanderMax - kExposureWanderMin) + |
2250 | kExposureWanderMin; |
2251 | mAeTargetExposureTime *= std::pow(2, exposureStep); |
2252 | mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING; |
2253 | } |
2254 | break; |
2255 | case ANDROID_CONTROL_AE_STATE_SEARCHING: |
2256 | mAeCurrentExposureTime += |
2257 | (mAeTargetExposureTime - mAeCurrentExposureTime) * |
2258 | kExposureTrackRate; |
2259 | if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) < |
2260 | mAeTargetExposureTime / 10) { |
2261 | // Close enough |
2262 | mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED; |
2263 | mAeCounter = 0; |
2264 | } |
2265 | break; |
2266 | case ANDROID_CONTROL_AE_STATE_LOCKED: |
2267 | mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED; |
2268 | mAeCounter = 0; |
2269 | break; |
2270 | default: |
2271 | ALOGE("%s: Emulator in unexpected AE state %d", |
2272 | __FUNCTION__, mAeState); |
2273 | return INVALID_OPERATION; |
2274 | } |
2275 | } else { |
2276 | // AE is locked |
2277 | mAeState = ANDROID_CONTROL_AE_STATE_LOCKED; |
2278 | } |
2279 | |
2280 | return OK; |
2281 | } |
2282 | |
2283 | status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) { |
2284 | camera_metadata_entry e; |
2285 | |
2286 | e = settings.find(ANDROID_CONTROL_AF_MODE); |
2287 | if (e.count == 0) { |
2288 | ALOGE("%s: No AF mode entry!", __FUNCTION__); |
2289 | return BAD_VALUE; |
2290 | } |
2291 | uint8_t afMode = e.data.u8[0]; |
2292 | |
2293 | e = settings.find(ANDROID_CONTROL_AF_TRIGGER); |
2294 | typedef camera_metadata_enum_android_control_af_trigger af_trigger_t; |
2295 | af_trigger_t afTrigger; |
2296 | // If we have an afTrigger, afTriggerId should be set too |
2297 | if (e.count != 0) { |
2298 | afTrigger = static_cast<af_trigger_t>(e.data.u8[0]); |
2299 | |
2300 | e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID); |
2301 | |
2302 | if (e.count == 0) { |
2303 | ALOGE("%s: When android.control.afTrigger is set " |
2304 | " in the request, afTriggerId needs to be set as well", |
2305 | __FUNCTION__); |
2306 | return BAD_VALUE; |
2307 | } |
2308 | |
2309 | mAfTriggerId = e.data.i32[0]; |
2310 | |
2311 | ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger); |
2312 | ALOGV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId); |
2313 | ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode); |
2314 | } else { |
2315 | afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; |
2316 | } |
2317 | if (!mFacingBack) { |
2318 | afMode = ANDROID_CONTROL_AF_MODE_OFF; |
2319 | } |
2320 | |
2321 | switch (afMode) { |
2322 | case ANDROID_CONTROL_AF_MODE_OFF: |
2323 | mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; |
2324 | return OK; |
2325 | case ANDROID_CONTROL_AF_MODE_AUTO: |
2326 | case ANDROID_CONTROL_AF_MODE_MACRO: |
2327 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2328 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2329 | if (!mFacingBack) { |
2330 | ALOGE("%s: Front camera doesn't support AF mode %d", |
2331 | __FUNCTION__, afMode); |
2332 | return BAD_VALUE; |
2333 | } |
2334 | mSensor->setAutoFocuas(afMode); |
2335 | // OK, handle transitions lower on |
2336 | break; |
2337 | default: |
2338 | ALOGE("%s: Emulator doesn't support AF mode %d", |
2339 | __FUNCTION__, afMode); |
2340 | return BAD_VALUE; |
2341 | } |
2342 | #if 0 |
2343 | e = settings.find(ANDROID_CONTROL_AF_REGIONS); |
2344 | if (e.count == 0) { |
2345 | ALOGE("%s:Get ANDROID_CONTROL_AF_REGIONS failed\n", __FUNCTION__); |
2346 | return BAD_VALUE; |
2347 | } |
2348 | int32_t x0 = e.data.i32[0]; |
2349 | int32_t y0 = e.data.i32[1]; |
2350 | int32_t x1 = e.data.i32[2]; |
2351 | int32_t y1 = e.data.i32[3]; |
2352 | mSensor->setFocuasArea(x0, y0, x1, y1); |
2353 | DBG_LOGB(" x0:%d, y0:%d,x1:%d,y1:%d,\n", x0, y0, x1, y1); |
2354 | #endif |
2355 | |
2356 | |
2357 | bool afModeChanged = mAfMode != afMode; |
2358 | mAfMode = afMode; |
2359 | |
2360 | /** |
2361 | * Simulate AF triggers. Transition at most 1 state per frame. |
2362 | * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN). |
2363 | */ |
2364 | |
2365 | bool afTriggerStart = false; |
2366 | bool afTriggerCancel = false; |
2367 | switch (afTrigger) { |
2368 | case ANDROID_CONTROL_AF_TRIGGER_IDLE: |
2369 | break; |
2370 | case ANDROID_CONTROL_AF_TRIGGER_START: |
2371 | afTriggerStart = true; |
2372 | break; |
2373 | case ANDROID_CONTROL_AF_TRIGGER_CANCEL: |
2374 | afTriggerCancel = true; |
2375 | // Cancel trigger always transitions into INACTIVE |
2376 | mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; |
2377 | |
2378 | ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__); |
2379 | |
2380 | // Stay in 'inactive' until at least next frame |
2381 | return OK; |
2382 | default: |
2383 | ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger); |
2384 | return BAD_VALUE; |
2385 | } |
2386 | |
2387 | // If we get down here, we're either in an autofocus mode |
2388 | // or in a continuous focus mode (and no other modes) |
2389 | |
2390 | int oldAfState = mAfState; |
2391 | switch (mAfState) { |
2392 | case ANDROID_CONTROL_AF_STATE_INACTIVE: |
2393 | if (afTriggerStart) { |
2394 | switch (afMode) { |
2395 | case ANDROID_CONTROL_AF_MODE_AUTO: |
2396 | // fall-through |
2397 | case ANDROID_CONTROL_AF_MODE_MACRO: |
2398 | mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; |
2399 | break; |
2400 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2401 | // fall-through |
2402 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2403 | mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; |
2404 | break; |
2405 | } |
2406 | } else { |
2407 | // At least one frame stays in INACTIVE |
2408 | if (!afModeChanged) { |
2409 | switch (afMode) { |
2410 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2411 | // fall-through |
2412 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2413 | mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN; |
2414 | break; |
2415 | } |
2416 | } |
2417 | } |
2418 | break; |
2419 | case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: |
2420 | /** |
2421 | * When the AF trigger is activated, the algorithm should finish |
2422 | * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED |
2423 | * or AF_NOT_FOCUSED as appropriate |
2424 | */ |
2425 | if (afTriggerStart) { |
2426 | // Randomly transition to focused or not focused |
2427 | if (rand() % 3) { |
2428 | mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; |
2429 | } else { |
2430 | mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; |
2431 | } |
2432 | } |
2433 | /** |
2434 | * When the AF trigger is not involved, the AF algorithm should |
2435 | * start in INACTIVE state, and then transition into PASSIVE_SCAN |
2436 | * and PASSIVE_FOCUSED states |
2437 | */ |
2438 | else if (!afTriggerCancel) { |
2439 | // Randomly transition to passive focus |
2440 | if (rand() % 3 == 0) { |
2441 | mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED; |
2442 | } |
2443 | } |
2444 | |
2445 | break; |
2446 | case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: |
2447 | if (afTriggerStart) { |
2448 | // Randomly transition to focused or not focused |
2449 | if (rand() % 3) { |
2450 | mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; |
2451 | } else { |
2452 | mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; |
2453 | } |
2454 | } |
2455 | // TODO: initiate passive scan (PASSIVE_SCAN) |
2456 | break; |
2457 | case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN: |
2458 | // Simulate AF sweep completing instantaneously |
2459 | |
2460 | // Randomly transition to focused or not focused |
2461 | if (rand() % 3) { |
2462 | mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; |
2463 | } else { |
2464 | mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; |
2465 | } |
2466 | break; |
2467 | case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: |
2468 | if (afTriggerStart) { |
2469 | switch (afMode) { |
2470 | case ANDROID_CONTROL_AF_MODE_AUTO: |
2471 | // fall-through |
2472 | case ANDROID_CONTROL_AF_MODE_MACRO: |
2473 | mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; |
2474 | break; |
2475 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2476 | // fall-through |
2477 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2478 | // continuous autofocus => trigger start has no effect |
2479 | break; |
2480 | } |
2481 | } |
2482 | break; |
2483 | case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: |
2484 | if (afTriggerStart) { |
2485 | switch (afMode) { |
2486 | case ANDROID_CONTROL_AF_MODE_AUTO: |
2487 | // fall-through |
2488 | case ANDROID_CONTROL_AF_MODE_MACRO: |
2489 | mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; |
2490 | break; |
2491 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2492 | // fall-through |
2493 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2494 | // continuous autofocus => trigger start has no effect |
2495 | break; |
2496 | } |
2497 | } |
2498 | break; |
2499 | default: |
2500 | ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState); |
2501 | } |
2502 | |
2503 | { |
2504 | char afStateString[100] = {0,}; |
2505 | camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, |
2506 | oldAfState, |
2507 | afStateString, |
2508 | sizeof(afStateString)); |
2509 | |
2510 | char afNewStateString[100] = {0,}; |
2511 | camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, |
2512 | mAfState, |
2513 | afNewStateString, |
2514 | sizeof(afNewStateString)); |
2515 | ALOGVV("%s: AF state transitioned from %s to %s", |
2516 | __FUNCTION__, afStateString, afNewStateString); |
2517 | } |
2518 | |
2519 | |
2520 | return OK; |
2521 | } |
2522 | |
2523 | status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) { |
2524 | camera_metadata_entry e; |
2525 | |
2526 | e = settings.find(ANDROID_CONTROL_AWB_MODE); |
2527 | if (e.count == 0) { |
2528 | ALOGE("%s: No AWB mode entry!", __FUNCTION__); |
2529 | return BAD_VALUE; |
2530 | } |
2531 | uint8_t awbMode = e.data.u8[0]; |
2532 | //DBG_LOGB(" awbMode%d\n", awbMode); |
2533 | |
2534 | // TODO: Add white balance simulation |
2535 | |
2536 | switch (awbMode) { |
2537 | case ANDROID_CONTROL_AWB_MODE_OFF: |
2538 | mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
2539 | return OK; |
2540 | case ANDROID_CONTROL_AWB_MODE_AUTO: |
2541 | case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: |
2542 | case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: |
2543 | case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: |
2544 | case ANDROID_CONTROL_AWB_MODE_SHADE: |
2545 | mAwbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; //add for cts |
2546 | return mSensor->setAWB(awbMode); |
2547 | // OK |
2548 | break; |
2549 | default: |
2550 | ALOGE("%s: Emulator doesn't support AWB mode %d", |
2551 | __FUNCTION__, awbMode); |
2552 | return BAD_VALUE; |
2553 | } |
2554 | |
2555 | return OK; |
2556 | } |
2557 | |
2558 | |
2559 | void EmulatedFakeCamera3::update3A(CameraMetadata &settings) { |
2560 | if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) { |
2561 | settings.update(ANDROID_SENSOR_EXPOSURE_TIME, |
2562 | &mAeCurrentExposureTime, 1); |
2563 | settings.update(ANDROID_SENSOR_SENSITIVITY, |
2564 | &mAeCurrentSensitivity, 1); |
2565 | } |
2566 | |
2567 | settings.update(ANDROID_CONTROL_AE_STATE, |
2568 | &mAeState, 1); |
2569 | settings.update(ANDROID_CONTROL_AF_STATE, |
2570 | &mAfState, 1); |
2571 | settings.update(ANDROID_CONTROL_AWB_STATE, |
2572 | &mAwbState, 1); |
2573 | /** |
2574 | * TODO: Trigger IDs need a think-through |
2575 | */ |
2576 | settings.update(ANDROID_CONTROL_AF_TRIGGER_ID, |
2577 | &mAfTriggerId, 1); |
2578 | } |
2579 | |
2580 | void EmulatedFakeCamera3::signalReadoutIdle() { |
2581 | Mutex::Autolock l(mLock); |
2582 | // Need to chek isIdle again because waiting on mLock may have allowed |
2583 | // something to be placed in the in-flight queue. |
2584 | if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) { |
2585 | ALOGV("Now idle"); |
2586 | mStatus = STATUS_READY; |
2587 | } |
2588 | } |
2589 | |
2590 | void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e, |
2591 | nsecs_t timestamp) { |
2592 | switch(e) { |
2593 | case Sensor::SensorListener::EXPOSURE_START: { |
2594 | ALOGVV("%s: Frame %d: Sensor started exposure at %lld", |
2595 | __FUNCTION__, frameNumber, timestamp); |
2596 | // Trigger shutter notify to framework |
2597 | camera3_notify_msg_t msg; |
2598 | msg.type = CAMERA3_MSG_SHUTTER; |
2599 | msg.message.shutter.frame_number = frameNumber; |
2600 | msg.message.shutter.timestamp = timestamp; |
2601 | sendNotify(&msg); |
2602 | break; |
2603 | } |
2604 | default: |
2605 | ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__, |
2606 | e, timestamp); |
2607 | break; |
2608 | } |
2609 | } |
2610 | |
2611 | EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) : |
2612 | mParent(parent), mJpegWaiting(false) { |
2613 | } |
2614 | |
2615 | EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() { |
2616 | for (List<Request>::iterator i = mInFlightQueue.begin(); |
2617 | i != mInFlightQueue.end(); i++) { |
2618 | delete i->buffers; |
2619 | delete i->sensorBuffers; |
2620 | } |
2621 | } |
2622 | |
2623 | void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) { |
2624 | Mutex::Autolock l(mLock); |
2625 | |
2626 | mInFlightQueue.push_back(r); |
2627 | mInFlightSignal.signal(); |
2628 | } |
2629 | |
2630 | bool EmulatedFakeCamera3::ReadoutThread::isIdle() { |
2631 | Mutex::Autolock l(mLock); |
2632 | return mInFlightQueue.empty() && !mThreadActive; |
2633 | } |
2634 | |
2635 | status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() { |
2636 | status_t res; |
2637 | Mutex::Autolock l(mLock); |
2638 | int loopCount = 0; |
2639 | while (mInFlightQueue.size() >= kMaxQueueSize) { |
2640 | res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop); |
2641 | if (res != OK && res != TIMED_OUT) { |
2642 | ALOGE("%s: Error waiting for in-flight queue to shrink", |
2643 | __FUNCTION__); |
2644 | return INVALID_OPERATION; |
2645 | } |
2646 | if (loopCount == kMaxWaitLoops) { |
2647 | ALOGE("%s: Timed out waiting for in-flight queue to shrink", |
2648 | __FUNCTION__); |
2649 | return TIMED_OUT; |
2650 | } |
2651 | loopCount++; |
2652 | } |
2653 | return OK; |
2654 | } |
2655 | |
2656 | status_t EmulatedFakeCamera3::ReadoutThread::setJpegCompressorListener(EmulatedFakeCamera3 *parent) { |
2657 | status_t res; |
2658 | res = mParent->mJpegCompressor->setlistener(this); |
2659 | if (res != NO_ERROR) { |
2660 | ALOGE("%s: set JpegCompressor Listner failed",__FUNCTION__); |
2661 | } |
2662 | return res; |
2663 | } |
2664 | |
2665 | status_t EmulatedFakeCamera3::ReadoutThread::startJpegCompressor(EmulatedFakeCamera3 *parent) { |
2666 | status_t res; |
2667 | res = mParent->mJpegCompressor->start(); |
2668 | if (res != NO_ERROR) { |
2669 | ALOGE("%s: JpegCompressor start failed",__FUNCTION__); |
2670 | } |
2671 | return res; |
2672 | } |
2673 | |
2674 | status_t EmulatedFakeCamera3::ReadoutThread::shutdownJpegCompressor(EmulatedFakeCamera3 *parent) { |
2675 | status_t res; |
2676 | res = mParent->mJpegCompressor->cancel(); |
2677 | if (res != OK) { |
2678 | ALOGE("%s: JpegCompressor cancel failed",__FUNCTION__); |
2679 | } |
2680 | return res; |
2681 | } |
2682 | |
2683 | bool EmulatedFakeCamera3::ReadoutThread::threadLoop() { |
2684 | status_t res; |
2685 | ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__); |
2686 | |
2687 | // First wait for a request from the in-flight queue |
2688 | |
2689 | if (mCurrentRequest.settings.isEmpty()) { |
2690 | Mutex::Autolock l(mLock); |
2691 | if (mInFlightQueue.empty()) { |
2692 | res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop); |
2693 | if (res == TIMED_OUT) { |
2694 | ALOGVV("%s: ReadoutThread: Timed out waiting for request", |
2695 | __FUNCTION__); |
2696 | return true; |
2697 | } else if (res != NO_ERROR) { |
2698 | ALOGE("%s: Error waiting for capture requests: %d", |
2699 | __FUNCTION__, res); |
2700 | return false; |
2701 | } |
2702 | } |
2703 | mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber; |
2704 | mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings); |
2705 | mCurrentRequest.buffers = mInFlightQueue.begin()->buffers; |
2706 | mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers; |
2707 | mCurrentRequest.havethumbnail = mInFlightQueue.begin()->havethumbnail; |
2708 | mInFlightQueue.erase(mInFlightQueue.begin()); |
2709 | mInFlightSignal.signal(); |
2710 | mThreadActive = true; |
2711 | ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__, |
2712 | mCurrentRequest.frameNumber); |
2713 | } |
2714 | |
2715 | // Then wait for it to be delivered from the sensor |
2716 | ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor", |
2717 | __FUNCTION__); |
2718 | |
2719 | nsecs_t captureTime; |
2720 | bool gotFrame = |
2721 | mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime); |
2722 | if (!gotFrame) { |
2723 | ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame", |
2724 | __FUNCTION__); |
2725 | return true; |
2726 | } |
2727 | |
2728 | ALOGVV("Sensor done with readout for frame %d, captured at %lld ", |
2729 | mCurrentRequest.frameNumber, captureTime); |
2730 | |
2731 | // Check if we need to JPEG encode a buffer, and send it for async |
2732 | // compression if so. Otherwise prepare the buffer for return. |
2733 | bool needJpeg = false; |
2734 | HalBufferVector::iterator buf = mCurrentRequest.buffers->begin(); |
2735 | while (buf != mCurrentRequest.buffers->end()) { |
2736 | bool goodBuffer = true; |
2737 | if ( buf->stream->format == |
2738 | HAL_PIXEL_FORMAT_BLOB) { |
2739 | Mutex::Autolock jl(mJpegLock); |
2740 | needJpeg = true; |
2741 | CaptureRequest currentcapture; |
2742 | currentcapture.frameNumber = mCurrentRequest.frameNumber; |
2743 | currentcapture.sensorBuffers = mCurrentRequest.sensorBuffers; |
2744 | currentcapture.buf = buf; |
2745 | currentcapture.mNeedThumbnail = mCurrentRequest.havethumbnail; |
2746 | mParent->mJpegCompressor->queueRequest(currentcapture); |
2747 | //this sensorBuffers delete in the jpegcompress; |
2748 | mCurrentRequest.sensorBuffers = NULL; |
2749 | buf = mCurrentRequest.buffers->erase(buf); |
2750 | continue; |
2751 | } |
2752 | GraphicBufferMapper::get().unlock(*(buf->buffer)); |
2753 | |
2754 | buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK : |
2755 | CAMERA3_BUFFER_STATUS_ERROR; |
2756 | buf->acquire_fence = -1; |
2757 | buf->release_fence = -1; |
2758 | |
2759 | ++buf; |
2760 | } // end while |
2761 | |
2762 | // Construct result for all completed buffers and results |
2763 | |
2764 | camera3_capture_result result; |
2765 | |
2766 | mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP, |
2767 | &captureTime, 1); |
2768 | |
2769 | memset(&result, 0, sizeof(result)); |
2770 | result.frame_number = mCurrentRequest.frameNumber; |
2771 | result.result = mCurrentRequest.settings.getAndLock(); |
2772 | result.num_output_buffers = mCurrentRequest.buffers->size(); |
2773 | result.output_buffers = mCurrentRequest.buffers->array(); |
2774 | result.partial_result = 1; |
2775 | |
2776 | // Go idle if queue is empty, before sending result |
2777 | bool signalIdle = false; |
2778 | { |
2779 | Mutex::Autolock l(mLock); |
2780 | if (mInFlightQueue.empty()) { |
2781 | mThreadActive = false; |
2782 | signalIdle = true; |
2783 | } |
2784 | } |
2785 | if (signalIdle) mParent->signalReadoutIdle(); |
2786 | |
2787 | // Send it off to the framework |
2788 | ALOGVV("%s: ReadoutThread: Send result to framework", |
2789 | __FUNCTION__); |
2790 | mParent->sendCaptureResult(&result); |
2791 | |
2792 | // Clean up |
2793 | mCurrentRequest.settings.unlock(result.result); |
2794 | |
2795 | delete mCurrentRequest.buffers; |
2796 | mCurrentRequest.buffers = NULL; |
2797 | if (!needJpeg) { |
2798 | delete mCurrentRequest.sensorBuffers; |
2799 | mCurrentRequest.sensorBuffers = NULL; |
2800 | } |
2801 | mCurrentRequest.settings.clear(); |
2802 | |
2803 | return true; |
2804 | } |
2805 | |
2806 | void EmulatedFakeCamera3::ReadoutThread::onJpegDone( |
2807 | const StreamBuffer &jpegBuffer, bool success , CaptureRequest &r) { |
2808 | Mutex::Autolock jl(mJpegLock); |
2809 | GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer)); |
2810 | |
2811 | mJpegHalBuffer = *(r.buf); |
2812 | mJpegHalBuffer.status = success ? |
2813 | CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR; |
2814 | mJpegHalBuffer.acquire_fence = -1; |
2815 | mJpegHalBuffer.release_fence = -1; |
2816 | mJpegWaiting = false; |
2817 | |
2818 | camera3_capture_result result; |
2819 | result.frame_number = r.frameNumber; |
2820 | result.result = NULL; |
2821 | result.num_output_buffers = 1; |
2822 | result.output_buffers = &mJpegHalBuffer; |
2823 | result.partial_result = 1; |
2824 | |
2825 | if (!success) { |
2826 | ALOGE("%s: Compression failure, returning error state buffer to" |
2827 | " framework", __FUNCTION__); |
2828 | } else { |
2829 | DBG_LOGB("%s: Compression complete, returning buffer to framework", |
2830 | __FUNCTION__); |
2831 | } |
2832 | |
2833 | mParent->sendCaptureResult(&result); |
2834 | |
2835 | } |
2836 | |
2837 | void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone( |
2838 | const StreamBuffer &inputBuffer) { |
2839 | // Should never get here, since the input buffer has to be returned |
2840 | // by end of processCaptureRequest |
2841 | ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__); |
2842 | } |
2843 | |
2844 | |
2845 | }; // namespace android |
2846 |