blob: a23fa1dbc47dde5d5fb4a00ba8a6e71102580956
1 | /* |
2 | * Copyright (C) 2013 The Android Open Source Project |
3 | * |
4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
5 | * you may not use this file except in compliance with the License. |
6 | * You may obtain a copy of the License at |
7 | * |
8 | * http://www.apache.org/licenses/LICENSE-2.0 |
9 | * |
10 | * Unless required by applicable law or agreed to in writing, software |
11 | * distributed under the License is distributed on an "AS IS" BASIS, |
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13 | * See the License for the specific language governing permissions and |
14 | * limitations under the License. |
15 | */ |
16 | |
17 | /* |
18 | * Contains implementation of a class EmulatedFakeCamera3 that encapsulates |
19 | * functionality of an advanced fake camera. |
20 | */ |
21 | |
22 | #include <inttypes.h> |
23 | |
24 | //#define LOG_NDEBUG 0 |
25 | //#define LOG_NNDEBUG 0 |
26 | #define LOG_TAG "EmulatedCamera_FakeCamera3" |
27 | #include <utils/Log.h> |
28 | |
29 | #include "EmulatedFakeCamera3.h" |
30 | #include "EmulatedCameraFactory.h" |
31 | #include <ui/Fence.h> |
32 | #include <ui/Rect.h> |
33 | #include <ui/GraphicBufferMapper.h> |
34 | #include <sys/types.h> |
35 | |
36 | #include <cutils/properties.h> |
37 | #include "fake-pipeline2/Sensor.h" |
38 | #include "fake-pipeline2/JpegCompressor.h" |
39 | #include <cmath> |
40 | #include <gralloc_priv.h> |
41 | #include <binder/IPCThreadState.h> |
42 | |
43 | #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0 |
44 | #define ALOGVV ALOGV |
45 | #else |
46 | #define ALOGVV(...) ((void)0) |
47 | #endif |
48 | |
49 | namespace android { |
50 | |
51 | /** |
52 | * Constants for camera capabilities |
53 | */ |
54 | |
55 | const int64_t USEC = 1000LL; |
56 | const int64_t MSEC = USEC * 1000LL; |
57 | const int64_t SEC = MSEC * 1000LL; |
58 | |
59 | |
60 | const int32_t EmulatedFakeCamera3::kAvailableFormats[] = { |
61 | //HAL_PIXEL_FORMAT_RAW_SENSOR, |
62 | HAL_PIXEL_FORMAT_BLOB, |
63 | //HAL_PIXEL_FORMAT_RGBA_8888, |
64 | HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, |
65 | // These are handled by YCbCr_420_888 |
66 | HAL_PIXEL_FORMAT_YV12, |
67 | HAL_PIXEL_FORMAT_YCrCb_420_SP, |
68 | //HAL_PIXEL_FORMAT_YCbCr_422_I, |
69 | HAL_PIXEL_FORMAT_YCbCr_420_888 |
70 | }; |
71 | |
72 | const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = { |
73 | 640, 480 |
74 | // Sensor::kResolution[0], Sensor::kResolution[1] |
75 | }; |
76 | |
77 | const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = { |
78 | (const uint64_t)Sensor::kFrameDurationRange[0] |
79 | }; |
80 | |
81 | const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[6] = { |
82 | 640, 480, 320, 240,// 1280, 720 |
83 | // Sensor::kResolution[0], Sensor::kResolution[1] |
84 | }; |
85 | |
86 | const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = { |
87 | 640, 480, 320, 240 |
88 | // Sensor::kResolution[0], Sensor::kResolution[1] |
89 | }; |
90 | |
91 | const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = { |
92 | (const uint64_t)Sensor::kFrameDurationRange[0] |
93 | }; |
94 | |
95 | const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = { |
96 | 1280,720 |
97 | // Sensor::kResolution[0], Sensor::kResolution[1] |
98 | }; |
99 | |
100 | const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = { |
101 | 640, 480 |
102 | // Sensor::kResolution[0], Sensor::kResolution[1] |
103 | }; |
104 | |
105 | |
106 | const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = { |
107 | (const uint64_t)Sensor::kFrameDurationRange[0] |
108 | }; |
109 | |
110 | /** |
111 | * 3A constants |
112 | */ |
113 | |
114 | // Default exposure and gain targets for different scenarios |
115 | const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC; |
116 | const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC; |
117 | const int EmulatedFakeCamera3::kNormalSensitivity = 100; |
118 | const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400; |
119 | const float EmulatedFakeCamera3::kExposureTrackRate = 0.1; |
120 | const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10; |
121 | const int EmulatedFakeCamera3::kStableAeMaxFrames = 100; |
122 | const float EmulatedFakeCamera3::kExposureWanderMin = -2; |
123 | const float EmulatedFakeCamera3::kExposureWanderMax = 1; |
124 | |
125 | /** |
126 | * Camera device lifecycle methods |
127 | */ |
128 | static const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(camera3_jpeg_blob); |
129 | jpegsize EmulatedFakeCamera3::getMaxJpegResolution(uint32_t picSizes[],int count) { |
130 | uint32_t maxJpegWidth = 0, maxJpegHeight = 0; |
131 | jpegsize maxJpegResolution; |
132 | for (int i=0; i < count; i+= 4) { |
133 | uint32_t width = picSizes[i+1]; |
134 | uint32_t height = picSizes[i+2]; |
135 | if (picSizes[i+0] == HAL_PIXEL_FORMAT_BLOB && |
136 | (width * height > maxJpegWidth * maxJpegHeight)) { |
137 | maxJpegWidth = width; |
138 | maxJpegHeight = height; |
139 | } |
140 | } |
141 | maxJpegResolution.width = maxJpegWidth; |
142 | maxJpegResolution.height = maxJpegHeight; |
143 | return maxJpegResolution; |
144 | } |
145 | ssize_t EmulatedFakeCamera3::getJpegBufferSize(int width, int height) { |
146 | if (maxJpegResolution.width == 0) { |
147 | return BAD_VALUE; |
148 | } |
149 | ssize_t maxJpegBufferSize = JpegCompressor::kMaxJpegSize; |
150 | |
151 | // Calculate final jpeg buffer size for the given resolution. |
152 | float scaleFactor = ((float) (width * height)) / |
153 | (maxJpegResolution.width * maxJpegResolution.height); |
154 | ssize_t jpegBufferSize = scaleFactor * maxJpegBufferSize; |
155 | // Bound the buffer size to [MIN_JPEG_BUFFER_SIZE, maxJpegBufferSize]. |
156 | if (jpegBufferSize > maxJpegBufferSize) { |
157 | jpegBufferSize = maxJpegBufferSize; |
158 | } else if (jpegBufferSize < kMinJpegBufferSize) { |
159 | jpegBufferSize = kMinJpegBufferSize; |
160 | } |
161 | return jpegBufferSize; |
162 | } |
163 | |
164 | EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, struct hw_module_t* module) : |
165 | EmulatedCamera3(cameraId, module) { |
166 | ALOGI("Constructing emulated fake camera 3 cameraID:%d", mCameraID); |
167 | |
168 | for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) { |
169 | mDefaultTemplates[i] = NULL; |
170 | } |
171 | |
172 | /** |
173 | * Front cameras = limited mode |
174 | * Back cameras = full mode |
175 | */ |
176 | //TODO limited or full mode, read this from camera driver |
177 | //mFullMode = facingBack; |
178 | mCameraStatus = CAMERA_INIT; |
179 | mSupportCap = 0; |
180 | mSupportRotate = 0; |
181 | mFullMode = 0; |
182 | |
183 | gLoadXml.parseXMLFile(); |
184 | } |
185 | |
186 | EmulatedFakeCamera3::~EmulatedFakeCamera3() { |
187 | for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) { |
188 | if (mDefaultTemplates[i] != NULL) { |
189 | free_camera_metadata(mDefaultTemplates[i]); |
190 | } |
191 | } |
192 | |
193 | if (mCameraInfo != NULL) { |
194 | CAMHAL_LOGIA("free mCameraInfo"); |
195 | free_camera_metadata(mCameraInfo); |
196 | mCameraInfo = NULL; |
197 | } |
198 | } |
199 | |
200 | status_t EmulatedFakeCamera3::Initialize() { |
201 | DBG_LOGB("mCameraID=%d,mStatus=%d,ddd\n", mCameraID, mStatus); |
202 | status_t res; |
203 | |
204 | #ifdef HAVE_VERSION_INFO |
205 | CAMHAL_LOGIB("\n--------------------------------\n" |
206 | "author:aml.sh multi-media team\n" |
207 | "branch name: %s\n" |
208 | "git version: %s \n" |
209 | "last changed: %s\n" |
210 | "build-time: %s\n" |
211 | "build-name: %s\n" |
212 | "uncommitted-file-num:%d\n" |
213 | "ssh user@%s, cd %s\n" |
214 | "hostname %s\n" |
215 | "--------------------------------\n", |
216 | CAMHAL_BRANCH_NAME, |
217 | CAMHAL_GIT_VERSION, |
218 | CAMHAL_LAST_CHANGED, |
219 | CAMHAL_BUILD_TIME, |
220 | CAMHAL_BUILD_NAME, |
221 | CAMHAL_GIT_UNCOMMIT_FILE_NUM, |
222 | CAMHAL_IP, CAMHAL_PATH, CAMHAL_HOSTNAME |
223 | ); |
224 | #endif |
225 | |
226 | |
227 | if (mStatus != STATUS_ERROR) { |
228 | ALOGE("%s: Already initialized!", __FUNCTION__); |
229 | return INVALID_OPERATION; |
230 | } |
231 | |
232 | res = constructStaticInfo(); |
233 | if (res != OK) { |
234 | ALOGE("%s: Unable to allocate static info: %s (%d)", |
235 | __FUNCTION__, strerror(-res), res); |
236 | return res; |
237 | } |
238 | |
239 | return EmulatedCamera3::Initialize(); |
240 | } |
241 | |
242 | status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) { |
243 | ALOGV("%s: E", __FUNCTION__); |
244 | DBG_LOGA("ddd"); |
245 | Mutex::Autolock l(mLock); |
246 | status_t res; |
247 | |
248 | if ((mStatus != STATUS_CLOSED) || !mPlugged) { |
249 | ALOGE("%s: Can't connect in state %d, mPlugged=%d", |
250 | __FUNCTION__, mStatus, mPlugged); |
251 | return INVALID_OPERATION; |
252 | } |
253 | |
254 | mSensor = new Sensor(); |
255 | mSensor->setSensorListener(this); |
256 | |
257 | res = mSensor->startUp(mCameraID); |
258 | DBG_LOGB("mSensor startUp, mCameraID=%d\n", mCameraID); |
259 | if (res != NO_ERROR) return res; |
260 | |
261 | mSupportCap = mSensor->IoctlStateProbe(); |
262 | if (mSupportCap & IOCTL_MASK_ROTATE) { |
263 | mSupportRotate = true; |
264 | } |
265 | |
266 | mReadoutThread = new ReadoutThread(this); |
267 | mJpegCompressor = new JpegCompressor(); |
268 | |
269 | res = mReadoutThread->setJpegCompressorListener(this); |
270 | if (res != NO_ERROR) { |
271 | return res; |
272 | } |
273 | res = mReadoutThread->startJpegCompressor(this); |
274 | if (res != NO_ERROR) { |
275 | return res; |
276 | } |
277 | |
278 | res = mReadoutThread->run("EmuCam3::readoutThread"); |
279 | if (res != NO_ERROR) return res; |
280 | |
281 | // Initialize fake 3A |
282 | |
283 | mControlMode = ANDROID_CONTROL_MODE_AUTO; |
284 | mFacePriority = false; |
285 | mAeMode = ANDROID_CONTROL_AE_MODE_ON; |
286 | mAfMode = ANDROID_CONTROL_AF_MODE_AUTO; |
287 | mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO; |
288 | mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;//ANDROID_CONTROL_AE_STATE_INACTIVE; |
289 | mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; |
290 | mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
291 | mAfTriggerId = 0; |
292 | mAeCurrentExposureTime = kNormalExposureTime; |
293 | mAeCurrentSensitivity = kNormalSensitivity; |
294 | |
295 | return EmulatedCamera3::connectCamera(device); |
296 | } |
297 | |
298 | status_t EmulatedFakeCamera3::plugCamera() { |
299 | { |
300 | Mutex::Autolock l(mLock); |
301 | |
302 | if (!mPlugged) { |
303 | CAMHAL_LOGIB("%s: Plugged back in", __FUNCTION__); |
304 | mPlugged = true; |
305 | } |
306 | } |
307 | |
308 | return NO_ERROR; |
309 | } |
310 | |
311 | status_t EmulatedFakeCamera3::unplugCamera() { |
312 | { |
313 | Mutex::Autolock l(mLock); |
314 | |
315 | if (mPlugged) { |
316 | CAMHAL_LOGIB("%s: Unplugged camera", __FUNCTION__); |
317 | mPlugged = false; |
318 | } |
319 | } |
320 | return true; |
321 | } |
322 | |
323 | camera_device_status_t EmulatedFakeCamera3::getHotplugStatus() { |
324 | Mutex::Autolock l(mLock); |
325 | return mPlugged ? |
326 | CAMERA_DEVICE_STATUS_PRESENT : |
327 | CAMERA_DEVICE_STATUS_NOT_PRESENT; |
328 | } |
329 | |
330 | bool EmulatedFakeCamera3::getCameraStatus() |
331 | { |
332 | CAMHAL_LOGVB("%s, mCameraStatus = %d",__FUNCTION__,mCameraStatus); |
333 | bool ret = false; |
334 | if (mStatus == STATUS_CLOSED) { |
335 | ret = true; |
336 | } else { |
337 | ret = false; |
338 | } |
339 | return ret; |
340 | } |
341 | |
342 | status_t EmulatedFakeCamera3::closeCamera() { |
343 | DBG_LOGB("%s, %d\n", __FUNCTION__, __LINE__); |
344 | |
345 | status_t res; |
346 | { |
347 | Mutex::Autolock l(mLock); |
348 | if (mStatus == STATUS_CLOSED) return OK; |
349 | } |
350 | mSensor->sendExitSingalToSensor(); |
351 | res = mSensor->shutDown(); |
352 | if (res != NO_ERROR) { |
353 | ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res); |
354 | return res; |
355 | } |
356 | mSensor.clear(); |
357 | |
358 | { |
359 | Mutex::Autolock l(mLock); |
360 | res = mReadoutThread->shutdownJpegCompressor(this); |
361 | if (res != OK) { |
362 | ALOGE("%s: Unable to shut down JpegCompressor: %d", __FUNCTION__, res); |
363 | return res; |
364 | } |
365 | mReadoutThread->sendExitReadoutThreadSignal(); |
366 | mReadoutThread->requestExit(); |
367 | } |
368 | mReadoutThread->join(); |
369 | DBG_LOGA("Sucess exit ReadOutThread"); |
370 | { |
371 | Mutex::Autolock l(mLock); |
372 | // Clear out private stream information |
373 | for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) { |
374 | PrivateStreamInfo *privStream = |
375 | static_cast<PrivateStreamInfo*>((*s)->priv); |
376 | delete privStream; |
377 | (*s)->priv = NULL; |
378 | } |
379 | mStreams.clear(); |
380 | mReadoutThread.clear(); |
381 | } |
382 | |
383 | return EmulatedCamera3::closeCamera(); |
384 | } |
385 | |
386 | status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) { |
387 | char property[PROPERTY_VALUE_MAX]; |
388 | char* tempApkName = gLoadXml.getApkPackageName(IPCThreadState::self()->getCallingPid()); |
389 | List_Or * temp=new List_Or(); |
390 | info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT; |
391 | if (mSensorType == SENSOR_USB) { |
392 | if (mFacingBack) { |
393 | property_get("hw.camera.orientation.back", property, "0"); |
394 | } else { |
395 | property_get("hw.camera.orientation.front", property, "0"); |
396 | } |
397 | int32_t orientation = atoi(property); |
398 | |
399 | if (gLoadXml.findApkCp(tempApkName, temp)) { |
400 | orientation = atoi(temp->pro); |
401 | } |
402 | if (temp != NULL) { |
403 | delete temp; |
404 | temp = NULL; |
405 | } |
406 | |
407 | property_get("hw.camera.usb.orientation_offset", property, "0"); |
408 | orientation += atoi(property); |
409 | orientation %= 360; |
410 | info->orientation = orientation ; |
411 | } else { |
412 | if (mFacingBack) { |
413 | property_get("hw.camera.orientation.back", property, "270"); |
414 | } else { |
415 | property_get("hw.camera.orientation.front", property, "90"); |
416 | } |
417 | info->orientation = atoi(property); |
418 | } |
419 | return EmulatedCamera3::getCameraInfo(info); |
420 | } |
421 | |
422 | /** |
423 | * Camera3 interface methods |
424 | */ |
425 | |
426 | void EmulatedFakeCamera3::getValidJpegSize(uint32_t picSizes[], uint32_t availablejpegsize[], int count) { |
427 | int i,j,k; |
428 | bool valid = true; |
429 | for (i=0,j=0; i < count; i+= 4) { |
430 | for (k= 0; k<=j ;k+=2) { |
431 | if ((availablejpegsize[k]*availablejpegsize[k+1]) == (picSizes[i+1]*picSizes[i+2])) { |
432 | |
433 | valid = false; |
434 | } |
435 | } |
436 | if (valid) { |
437 | availablejpegsize[j] = picSizes[i+1]; |
438 | availablejpegsize[j+1] = picSizes[i+2]; |
439 | j+=2; |
440 | } |
441 | valid = true; |
442 | } |
443 | } |
444 | |
445 | status_t EmulatedFakeCamera3::checkValidJpegSize(uint32_t width, uint32_t height) { |
446 | |
447 | int validsizecount = 0; |
448 | uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]); |
449 | for (uint32_t f = 0; f < count; f+=2) { |
450 | if (mAvailableJpegSize[f] != 0) { |
451 | if ((mAvailableJpegSize[f] == width)&&(mAvailableJpegSize[f+1] == height)) { |
452 | validsizecount++; |
453 | } |
454 | } else { |
455 | break; |
456 | } |
457 | } |
458 | if (validsizecount == 0) |
459 | return BAD_VALUE; |
460 | return OK; |
461 | } |
462 | |
463 | status_t EmulatedFakeCamera3::configureStreams( |
464 | camera3_stream_configuration *streamList) { |
465 | Mutex::Autolock l(mLock); |
466 | uint32_t width, height, pixelfmt; |
467 | bool isRestart = false; |
468 | DBG_LOGB("%s: %d streams", __FUNCTION__, streamList->num_streams); |
469 | |
470 | if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) { |
471 | ALOGE("%s: Cannot configure streams in state %d", |
472 | __FUNCTION__, mStatus); |
473 | return NO_INIT; |
474 | } |
475 | |
476 | /** |
477 | * Sanity-check input list. |
478 | */ |
479 | if (streamList == NULL) { |
480 | ALOGE("%s: NULL stream configuration", __FUNCTION__); |
481 | return BAD_VALUE; |
482 | } |
483 | |
484 | if (streamList->streams == NULL) { |
485 | ALOGE("%s: NULL stream list", __FUNCTION__); |
486 | return BAD_VALUE; |
487 | } |
488 | |
489 | if (streamList->num_streams < 1) { |
490 | ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__, |
491 | streamList->num_streams); |
492 | return BAD_VALUE; |
493 | } |
494 | |
495 | camera3_stream_t *inputStream = NULL; |
496 | for (size_t i = 0; i < streamList->num_streams; i++) { |
497 | camera3_stream_t *newStream = streamList->streams[i]; |
498 | |
499 | if (newStream == NULL) { |
500 | ALOGE("%s: Stream index %zu was NULL", |
501 | __FUNCTION__, i); |
502 | return BAD_VALUE; |
503 | } |
504 | |
505 | if (newStream->max_buffers <= 0) { |
506 | isRestart = true;//mSensor->isNeedRestart(newStream->width, newStream->height, newStream->format); |
507 | DBG_LOGB("format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d, isRestart=%d\n", |
508 | newStream->format, newStream->width, newStream->height, |
509 | newStream->stream_type, newStream->max_buffers, |
510 | isRestart); |
511 | } |
512 | ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x", |
513 | __FUNCTION__, newStream, i, newStream->stream_type, |
514 | newStream->usage, |
515 | newStream->format); |
516 | |
517 | if (newStream->stream_type == CAMERA3_STREAM_INPUT || |
518 | newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { |
519 | if (inputStream != NULL) { |
520 | |
521 | ALOGE("%s: Multiple input streams requested!", __FUNCTION__); |
522 | return BAD_VALUE; |
523 | } |
524 | inputStream = newStream; |
525 | } |
526 | |
527 | bool validFormat = false; |
528 | for (size_t f = 0; |
529 | f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]); |
530 | f++) { |
531 | if (newStream->format == kAvailableFormats[f]) { |
532 | validFormat = true; |
533 | //HAL_PIXEL_FORMAT_YCrCb_420_SP, |
534 | if (HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) |
535 | newStream->format = HAL_PIXEL_FORMAT_YCrCb_420_SP; |
536 | |
537 | break; |
538 | } |
539 | DBG_LOGB("stream_type=%d\n", newStream->stream_type); |
540 | } |
541 | if (!validFormat) { |
542 | ALOGE("%s: Unsupported stream format 0x%x requested", |
543 | __FUNCTION__, newStream->format); |
544 | return BAD_VALUE; |
545 | } |
546 | |
547 | status_t ret = checkValidJpegSize(newStream->width, newStream->height); |
548 | if (ret != OK) { |
549 | return BAD_VALUE; |
550 | } |
551 | |
552 | } |
553 | mInputStream = inputStream; |
554 | width = 0; |
555 | height = 0; |
556 | for (size_t i = 0; i < streamList->num_streams; i++) { |
557 | camera3_stream_t *newStream = streamList->streams[i]; |
558 | DBG_LOGB("find propert width and height, format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d\n", |
559 | newStream->format, newStream->width, newStream->height, newStream->stream_type, newStream->max_buffers); |
560 | if ((HAL_PIXEL_FORMAT_BLOB != newStream->format) && |
561 | (CAMERA3_STREAM_OUTPUT == newStream->stream_type)) { |
562 | |
563 | if (width < newStream->width) |
564 | width = newStream->width; |
565 | |
566 | if (height < newStream->height) |
567 | height = newStream->height; |
568 | |
569 | pixelfmt = (uint32_t)newStream->format; |
570 | if (HAL_PIXEL_FORMAT_YCbCr_420_888 == pixelfmt) |
571 | pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP; |
572 | } |
573 | |
574 | } |
575 | |
576 | //TODO modify this ugly code |
577 | if (isRestart) { |
578 | isRestart = mSensor->isNeedRestart(width, height, pixelfmt); |
579 | } |
580 | |
581 | if (isRestart) { |
582 | mSensor->streamOff(); |
583 | pixelfmt = mSensor->halFormatToSensorFormat(pixelfmt); |
584 | mSensor->setOutputFormat(width, height, pixelfmt, 0); |
585 | mSensor->streamOn(); |
586 | DBG_LOGB("width=%d, height=%d, pixelfmt=%.4s\n", |
587 | width, height, (char*)&pixelfmt); |
588 | } |
589 | |
590 | /** |
591 | * Initially mark all existing streams as not alive |
592 | */ |
593 | for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) { |
594 | PrivateStreamInfo *privStream = |
595 | static_cast<PrivateStreamInfo*>((*s)->priv); |
596 | privStream->alive = false; |
597 | } |
598 | |
599 | /** |
600 | * Find new streams and mark still-alive ones |
601 | */ |
602 | for (size_t i = 0; i < streamList->num_streams; i++) { |
603 | camera3_stream_t *newStream = streamList->streams[i]; |
604 | if (newStream->priv == NULL) { |
605 | // New stream, construct info |
606 | PrivateStreamInfo *privStream = new PrivateStreamInfo(); |
607 | privStream->alive = true; |
608 | privStream->registered = false; |
609 | |
610 | newStream->usage = |
611 | mSensor->getStreamUsage(newStream->stream_type); |
612 | |
613 | DBG_LOGB("stream_type=%d\n", newStream->stream_type); |
614 | newStream->max_buffers = kMaxBufferCount; |
615 | newStream->priv = privStream; |
616 | mStreams.push_back(newStream); |
617 | } else { |
618 | // Existing stream, mark as still alive. |
619 | PrivateStreamInfo *privStream = |
620 | static_cast<PrivateStreamInfo*>(newStream->priv); |
621 | CAMHAL_LOGDA("Existing stream ?"); |
622 | privStream->alive = true; |
623 | } |
624 | DBG_LOGB("%d, newStream=%p, stream_type=%d, usage=%x, priv=%p, w*h=%dx%d\n", |
625 | i, newStream, newStream->stream_type, newStream->usage, newStream->priv, newStream->width, newStream->height); |
626 | } |
627 | |
628 | /** |
629 | * Reap the dead streams |
630 | */ |
631 | for (StreamIterator s = mStreams.begin(); s != mStreams.end();) { |
632 | PrivateStreamInfo *privStream = |
633 | static_cast<PrivateStreamInfo*>((*s)->priv); |
634 | if (!privStream->alive) { |
635 | DBG_LOGA("delete not alive streams"); |
636 | (*s)->priv = NULL; |
637 | delete privStream; |
638 | s = mStreams.erase(s); |
639 | } else { |
640 | ++s; |
641 | } |
642 | } |
643 | |
644 | /** |
645 | * Can't reuse settings across configure call |
646 | */ |
647 | mPrevSettings.clear(); |
648 | |
649 | return OK; |
650 | } |
651 | |
652 | status_t EmulatedFakeCamera3::registerStreamBuffers( |
653 | const camera3_stream_buffer_set *bufferSet) { |
654 | DBG_LOGB("%s: E", __FUNCTION__); |
655 | Mutex::Autolock l(mLock); |
656 | |
657 | /** |
658 | * Sanity checks |
659 | */ |
660 | DBG_LOGA("==========sanity checks\n"); |
661 | |
662 | // OK: register streams at any time during configure |
663 | // (but only once per stream) |
664 | if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) { |
665 | ALOGE("%s: Cannot register buffers in state %d", |
666 | __FUNCTION__, mStatus); |
667 | return NO_INIT; |
668 | } |
669 | |
670 | if (bufferSet == NULL) { |
671 | ALOGE("%s: NULL buffer set!", __FUNCTION__); |
672 | return BAD_VALUE; |
673 | } |
674 | |
675 | StreamIterator s = mStreams.begin(); |
676 | for (; s != mStreams.end(); ++s) { |
677 | if (bufferSet->stream == *s) break; |
678 | } |
679 | if (s == mStreams.end()) { |
680 | ALOGE("%s: Trying to register buffers for a non-configured stream!", |
681 | __FUNCTION__); |
682 | return BAD_VALUE; |
683 | } |
684 | |
685 | /** |
686 | * Register the buffers. This doesn't mean anything to the emulator besides |
687 | * marking them off as registered. |
688 | */ |
689 | |
690 | PrivateStreamInfo *privStream = |
691 | static_cast<PrivateStreamInfo*>((*s)->priv); |
692 | |
693 | #if 0 |
694 | if (privStream->registered) { |
695 | ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__); |
696 | return BAD_VALUE; |
697 | } |
698 | #endif |
699 | |
700 | privStream->registered = true; |
701 | |
702 | return OK; |
703 | } |
704 | |
705 | const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings( |
706 | int type) { |
707 | DBG_LOGB("%s: E", __FUNCTION__); |
708 | Mutex::Autolock l(mLock); |
709 | |
710 | if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) { |
711 | ALOGE("%s: Unknown request settings template: %d", |
712 | __FUNCTION__, type); |
713 | return NULL; |
714 | } |
715 | |
716 | /** |
717 | * Cache is not just an optimization - pointer returned has to live at |
718 | * least as long as the camera device instance does. |
719 | */ |
720 | if (mDefaultTemplates[type] != NULL) { |
721 | return mDefaultTemplates[type]; |
722 | } |
723 | |
724 | CameraMetadata settings; |
725 | |
726 | /** android.request */ |
727 | static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; |
728 | settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); |
729 | |
730 | static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL; |
731 | settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1); |
732 | |
733 | static const int32_t id = 0; |
734 | settings.update(ANDROID_REQUEST_ID, &id, 1); |
735 | |
736 | static const int32_t frameCount = 0; |
737 | settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1); |
738 | |
739 | /** android.lens */ |
740 | |
741 | static const float focusDistance = 0; |
742 | settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1); |
743 | |
744 | static const float aperture = 2.8f; |
745 | settings.update(ANDROID_LENS_APERTURE, &aperture, 1); |
746 | |
747 | // static const float focalLength = 5.0f; |
748 | static const float focalLength = 3.299999952316284f; |
749 | settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1); |
750 | |
751 | static const float filterDensity = 0; |
752 | settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1); |
753 | |
754 | static const uint8_t opticalStabilizationMode = |
755 | ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; |
756 | settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, |
757 | &opticalStabilizationMode, 1); |
758 | |
759 | // FOCUS_RANGE set only in frame |
760 | |
761 | /** android.sensor */ |
762 | |
763 | static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; |
764 | settings.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1); |
765 | static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; |
766 | settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1); |
767 | static const int64_t exposureTime = 10 * MSEC; |
768 | settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1); |
769 | |
770 | int64_t frameDuration = mSensor->getMinFrameDuration(); |
771 | settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1); |
772 | |
773 | static const int32_t sensitivity = 100; |
774 | settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1); |
775 | |
776 | static const int64_t rollingShutterSkew = 0; |
777 | settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1); |
778 | // TIMESTAMP set only in frame |
779 | |
780 | /** android.flash */ |
781 | |
782 | static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE; |
783 | settings.update(ANDROID_FLASH_STATE, &flashstate, 1); |
784 | |
785 | static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; |
786 | settings.update(ANDROID_FLASH_MODE, &flashMode, 1); |
787 | |
788 | static const uint8_t flashPower = 10; |
789 | settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1); |
790 | |
791 | static const int64_t firingTime = 0; |
792 | settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1); |
793 | |
794 | /** Processing block modes */ |
795 | uint8_t hotPixelMode = 0; |
796 | uint8_t demosaicMode = 0; |
797 | uint8_t noiseMode = 0; |
798 | uint8_t shadingMode = 0; |
799 | uint8_t colorMode = 0; |
800 | uint8_t tonemapMode = 0; |
801 | uint8_t edgeMode = 0; |
802 | switch (type) { |
803 | |
804 | case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: |
805 | case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: |
806 | noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY; |
807 | // fall-through |
808 | case CAMERA3_TEMPLATE_STILL_CAPTURE: |
809 | hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY; |
810 | demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY; |
811 | shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY; |
812 | colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY; |
813 | tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY; |
814 | edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY; |
815 | break; |
816 | case CAMERA3_TEMPLATE_PREVIEW: |
817 | // fall-through |
818 | case CAMERA3_TEMPLATE_VIDEO_RECORD: |
819 | // fall-through |
820 | case CAMERA3_TEMPLATE_MANUAL: |
821 | // fall-through |
822 | default: |
823 | hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST; |
824 | demosaicMode = ANDROID_DEMOSAIC_MODE_FAST; |
825 | noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST; |
826 | shadingMode = ANDROID_SHADING_MODE_FAST; |
827 | colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST; |
828 | tonemapMode = ANDROID_TONEMAP_MODE_FAST; |
829 | edgeMode = ANDROID_EDGE_MODE_FAST; |
830 | break; |
831 | } |
832 | settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1); |
833 | settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1); |
834 | settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1); |
835 | settings.update(ANDROID_SHADING_MODE, &shadingMode, 1); |
836 | settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1); |
837 | settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1); |
838 | settings.update(ANDROID_EDGE_MODE, &edgeMode, 1); |
839 | |
840 | /** android.noise */ |
841 | static const uint8_t noiseStrength = 5; |
842 | settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1); |
843 | static uint8_t availableNBModes[] = { |
844 | ANDROID_NOISE_REDUCTION_MODE_OFF, |
845 | ANDROID_NOISE_REDUCTION_MODE_FAST, |
846 | ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY, |
847 | }; |
848 | settings.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, |
849 | availableNBModes, sizeof(availableNBModes)/sizeof(availableNBModes)); |
850 | |
851 | |
852 | /** android.color */ |
853 | static const float colorTransform[9] = { |
854 | 1.0f, 0.f, 0.f, |
855 | 0.f, 1.f, 0.f, |
856 | 0.f, 0.f, 1.f |
857 | }; |
858 | settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9); |
859 | |
860 | /** android.tonemap */ |
861 | static const float tonemapCurve[4] = { |
862 | 0.f, 0.f, |
863 | 1.f, 1.f |
864 | }; |
865 | settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4); |
866 | settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4); |
867 | settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4); |
868 | |
869 | /** android.edge */ |
870 | static const uint8_t edgeStrength = 5; |
871 | settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1); |
872 | |
873 | /** android.scaler */ |
874 | static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; |
875 | settings.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1); |
876 | |
877 | static const int32_t cropRegion[] = { |
878 | 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1], |
879 | }; |
880 | settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4); |
881 | |
882 | /** android.jpeg */ |
883 | static const uint8_t jpegQuality = 80; |
884 | settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1); |
885 | |
886 | static const int32_t thumbnailSize[2] = { |
887 | 160, 120 |
888 | }; |
889 | settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); |
890 | |
891 | static const uint8_t thumbnailQuality = 80; |
892 | settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1); |
893 | |
894 | static const double gpsCoordinates[3] = { |
895 | 0, 0, 0 |
896 | }; |
897 | settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3); //default 2 value |
898 | |
899 | static const uint8_t gpsProcessingMethod[32] = "None"; |
900 | settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32); |
901 | |
902 | static const int64_t gpsTimestamp = 0; |
903 | settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1); |
904 | |
905 | static const int32_t jpegOrientation = 0; |
906 | settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); |
907 | |
908 | /** android.stats */ |
909 | |
910 | static const uint8_t faceDetectMode = |
911 | ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; |
912 | settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1); |
913 | |
914 | static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF; |
915 | settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1); |
916 | |
917 | static const uint8_t sharpnessMapMode = |
918 | ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF; |
919 | settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1); |
920 | |
921 | static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; |
922 | settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1); |
923 | static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; |
924 | settings.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1); |
925 | static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; |
926 | settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1); |
927 | // faceRectangles, faceScores, faceLandmarks, faceIds, histogram, |
928 | // sharpnessMap only in frames |
929 | |
930 | /** android.control */ |
931 | |
932 | uint8_t controlIntent = 0; |
933 | uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; //default value |
934 | uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; |
935 | uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; |
936 | switch (type) { |
937 | case CAMERA3_TEMPLATE_PREVIEW: |
938 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; |
939 | break; |
940 | case CAMERA3_TEMPLATE_STILL_CAPTURE: |
941 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; |
942 | break; |
943 | case CAMERA3_TEMPLATE_VIDEO_RECORD: |
944 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; |
945 | break; |
946 | case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: |
947 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; |
948 | break; |
949 | case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: |
950 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; |
951 | break; |
952 | case CAMERA3_TEMPLATE_MANUAL: |
953 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL; |
954 | controlMode = ANDROID_CONTROL_MODE_OFF; |
955 | aeMode = ANDROID_CONTROL_AE_MODE_OFF; |
956 | awbMode = ANDROID_CONTROL_AWB_MODE_OFF; |
957 | break; |
958 | default: |
959 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; |
960 | break; |
961 | } |
962 | settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); |
963 | settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); |
964 | |
965 | static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; |
966 | settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); |
967 | |
968 | static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; |
969 | settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); |
970 | |
971 | settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); |
972 | |
973 | static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; |
974 | settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); |
975 | |
976 | static const uint8_t aePrecaptureTrigger = |
977 | ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; |
978 | settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1); |
979 | |
980 | static const int32_t mAfTriggerId = 0; |
981 | settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,&mAfTriggerId, 1); |
982 | static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; |
983 | settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1); |
984 | |
985 | static const int32_t controlRegions[5] = { |
986 | 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1], |
987 | 1000 |
988 | }; |
989 | // settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5); |
990 | |
991 | static const int32_t aeExpCompensation = 0; |
992 | settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1); |
993 | |
994 | static const int32_t aeTargetFpsRange[2] = { |
995 | 30, 30 |
996 | }; |
997 | settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2); |
998 | |
999 | static const uint8_t aeAntibandingMode = |
1000 | ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; |
1001 | settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1); |
1002 | |
1003 | settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); |
1004 | |
1005 | static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; |
1006 | settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); |
1007 | |
1008 | // settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5); |
1009 | |
1010 | uint8_t afMode = 0; |
1011 | switch (type) { |
1012 | case CAMERA3_TEMPLATE_PREVIEW: |
1013 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1014 | break; |
1015 | case CAMERA3_TEMPLATE_STILL_CAPTURE: |
1016 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1017 | break; |
1018 | case CAMERA3_TEMPLATE_VIDEO_RECORD: |
1019 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1020 | //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; |
1021 | break; |
1022 | case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: |
1023 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1024 | //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; |
1025 | break; |
1026 | case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: |
1027 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1028 | //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; |
1029 | break; |
1030 | case CAMERA3_TEMPLATE_MANUAL: |
1031 | afMode = ANDROID_CONTROL_AF_MODE_OFF; |
1032 | break; |
1033 | default: |
1034 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1035 | break; |
1036 | } |
1037 | settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1); |
1038 | |
1039 | static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE; |
1040 | settings.update(ANDROID_CONTROL_AF_STATE,&afstate,1); |
1041 | |
1042 | // settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5); |
1043 | |
1044 | static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED; |
1045 | settings.update(ANDROID_CONTROL_AE_STATE,&aestate,1); |
1046 | static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
1047 | settings.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1); |
1048 | static const uint8_t vstabMode = |
1049 | ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; |
1050 | settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1); |
1051 | |
1052 | // aeState, awbState, afState only in frame |
1053 | |
1054 | mDefaultTemplates[type] = settings.release(); |
1055 | |
1056 | return mDefaultTemplates[type]; |
1057 | } |
1058 | |
1059 | status_t EmulatedFakeCamera3::processCaptureRequest( |
1060 | camera3_capture_request *request) { |
1061 | status_t res; |
1062 | nsecs_t exposureTime; |
1063 | nsecs_t frameDuration; |
1064 | uint32_t sensitivity; |
1065 | uint32_t frameNumber; |
1066 | bool mHaveThumbnail = false; |
1067 | CameraMetadata settings; |
1068 | Buffers *sensorBuffers = NULL; |
1069 | HalBufferVector *buffers = NULL; |
1070 | { |
1071 | Mutex::Autolock l(mLock); |
1072 | |
1073 | /** Validation */ |
1074 | |
1075 | if (mStatus < STATUS_READY) { |
1076 | ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__, |
1077 | mStatus); |
1078 | return INVALID_OPERATION; |
1079 | } |
1080 | |
1081 | if (request == NULL) { |
1082 | ALOGE("%s: NULL request!", __FUNCTION__); |
1083 | return BAD_VALUE; |
1084 | } |
1085 | |
1086 | frameNumber = request->frame_number; |
1087 | |
1088 | if (request->settings == NULL && mPrevSettings.isEmpty()) { |
1089 | ALOGE("%s: Request %d: NULL settings for first request after" |
1090 | "configureStreams()", __FUNCTION__, frameNumber); |
1091 | return BAD_VALUE; |
1092 | } |
1093 | |
1094 | if (request->input_buffer != NULL && |
1095 | request->input_buffer->stream != mInputStream) { |
1096 | DBG_LOGB("%s: Request %d: Input buffer not from input stream!", |
1097 | __FUNCTION__, frameNumber); |
1098 | DBG_LOGB("%s: Bad stream %p, expected: %p", |
1099 | __FUNCTION__, request->input_buffer->stream, |
1100 | mInputStream); |
1101 | DBG_LOGB("%s: Bad stream type %d, expected stream type %d", |
1102 | __FUNCTION__, request->input_buffer->stream->stream_type, |
1103 | mInputStream ? mInputStream->stream_type : -1); |
1104 | |
1105 | return BAD_VALUE; |
1106 | } |
1107 | |
1108 | if (request->num_output_buffers < 1 || request->output_buffers == NULL) { |
1109 | ALOGE("%s: Request %d: No output buffers provided!", |
1110 | __FUNCTION__, frameNumber); |
1111 | return BAD_VALUE; |
1112 | } |
1113 | |
1114 | // Validate all buffers, starting with input buffer if it's given |
1115 | |
1116 | ssize_t idx; |
1117 | const camera3_stream_buffer_t *b; |
1118 | if (request->input_buffer != NULL) { |
1119 | idx = -1; |
1120 | b = request->input_buffer; |
1121 | } else { |
1122 | idx = 0; |
1123 | b = request->output_buffers; |
1124 | } |
1125 | do { |
1126 | PrivateStreamInfo *priv = |
1127 | static_cast<PrivateStreamInfo*>(b->stream->priv); |
1128 | if (priv == NULL) { |
1129 | ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!", |
1130 | __FUNCTION__, frameNumber, idx); |
1131 | return BAD_VALUE; |
1132 | } |
1133 | #if 0 |
1134 | if (!priv->alive || !priv->registered) { |
1135 | ALOGE("%s: Request %d: Buffer %zu: Unregistered or dead stream! alive=%d, registered=%d\n", |
1136 | __FUNCTION__, frameNumber, idx, |
1137 | priv->alive, priv->registered); |
1138 | //return BAD_VALUE; |
1139 | } |
1140 | #endif |
1141 | if (b->status != CAMERA3_BUFFER_STATUS_OK) { |
1142 | ALOGE("%s: Request %d: Buffer %zu: Status not OK!", |
1143 | __FUNCTION__, frameNumber, idx); |
1144 | return BAD_VALUE; |
1145 | } |
1146 | if (b->release_fence != -1) { |
1147 | ALOGE("%s: Request %d: Buffer %zu: Has a release fence!", |
1148 | __FUNCTION__, frameNumber, idx); |
1149 | return BAD_VALUE; |
1150 | } |
1151 | if (b->buffer == NULL) { |
1152 | ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!", |
1153 | __FUNCTION__, frameNumber, idx); |
1154 | return BAD_VALUE; |
1155 | } |
1156 | idx++; |
1157 | b = &(request->output_buffers[idx]); |
1158 | } while (idx < (ssize_t)request->num_output_buffers); |
1159 | |
1160 | // TODO: Validate settings parameters |
1161 | |
1162 | /** |
1163 | * Start processing this request |
1164 | */ |
1165 | mStatus = STATUS_ACTIVE; |
1166 | |
1167 | camera_metadata_entry e; |
1168 | |
1169 | if (request->settings == NULL) { |
1170 | settings.acquire(mPrevSettings); |
1171 | } else { |
1172 | settings = request->settings; |
1173 | |
1174 | uint8_t antiBanding = 0; |
1175 | uint8_t effectMode = 0; |
1176 | int exposureCmp = 0; |
1177 | |
1178 | e = settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE); |
1179 | if (e.count == 0) { |
1180 | ALOGE("%s: No antibanding entry!", __FUNCTION__); |
1181 | return BAD_VALUE; |
1182 | } |
1183 | antiBanding = e.data.u8[0]; |
1184 | mSensor->setAntiBanding(antiBanding); |
1185 | |
1186 | e = settings.find(ANDROID_CONTROL_EFFECT_MODE); |
1187 | if (e.count == 0) { |
1188 | ALOGE("%s: No antibanding entry!", __FUNCTION__); |
1189 | return BAD_VALUE; |
1190 | } |
1191 | effectMode = e.data.u8[0]; |
1192 | mSensor->setEffect(effectMode); |
1193 | |
1194 | e = settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION); |
1195 | if (e.count == 0) { |
1196 | ALOGE("%s: No exposure entry!", __FUNCTION__); |
1197 | //return BAD_VALUE; |
1198 | } else { |
1199 | exposureCmp = e.data.i32[0]; |
1200 | DBG_LOGB("set expsore compensaton %d\n", exposureCmp); |
1201 | mSensor->setExposure(exposureCmp); |
1202 | } |
1203 | |
1204 | int32_t cropRegion[4]; |
1205 | int32_t cropWidth; |
1206 | int32_t outputWidth = request->output_buffers[0].stream->width; |
1207 | |
1208 | e = settings.find(ANDROID_SCALER_CROP_REGION); |
1209 | if (e.count == 0) { |
1210 | ALOGE("%s: No corp region entry!", __FUNCTION__); |
1211 | //return BAD_VALUE; |
1212 | } else { |
1213 | cropRegion[0] = e.data.i32[0]; |
1214 | cropRegion[1] = e.data.i32[1]; |
1215 | cropWidth = cropRegion[2] = e.data.i32[2]; |
1216 | cropRegion[3] = e.data.i32[3]; |
1217 | for (int i = mZoomMin; i <= mZoomMax; i += mZoomStep) { |
1218 | //if ( (float) i / mZoomMin >= (float) outputWidth / cropWidth) { |
1219 | if ( i * cropWidth >= outputWidth * mZoomMin ) { |
1220 | mSensor->setZoom(i); |
1221 | break; |
1222 | } |
1223 | } |
1224 | DBG_LOGB("cropRegion:%d, %d, %d, %d\n", cropRegion[0], cropRegion[1],cropRegion[2],cropRegion[3]); |
1225 | } |
1226 | } |
1227 | |
1228 | uint8_t len[] = {1}; |
1229 | settings.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1); |
1230 | |
1231 | uint8_t maxlen[] = {0}; |
1232 | settings.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1); |
1233 | |
1234 | res = process3A(settings); |
1235 | if (res != OK) { |
1236 | CAMHAL_LOGDB("%s: process3A failed!", __FUNCTION__); |
1237 | //return res; |
1238 | } |
1239 | |
1240 | // TODO: Handle reprocessing |
1241 | |
1242 | /** |
1243 | * Get ready for sensor config |
1244 | */ |
1245 | |
1246 | bool needJpeg = false; |
1247 | ssize_t jpegbuffersize; |
1248 | uint32_t jpegpixelfmt; |
1249 | |
1250 | exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; |
1251 | frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; |
1252 | sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; |
1253 | |
1254 | sensorBuffers = new Buffers(); |
1255 | buffers = new HalBufferVector(); |
1256 | |
1257 | sensorBuffers->setCapacity(request->num_output_buffers); |
1258 | buffers->setCapacity(request->num_output_buffers); |
1259 | |
1260 | // Process all the buffers we got for output, constructing internal buffer |
1261 | // structures for them, and lock them for writing. |
1262 | for (size_t i = 0; i < request->num_output_buffers; i++) { |
1263 | const camera3_stream_buffer &srcBuf = request->output_buffers[i]; |
1264 | const private_handle_t *privBuffer = |
1265 | (const private_handle_t*)(*srcBuf.buffer); |
1266 | StreamBuffer destBuf; |
1267 | destBuf.streamId = kGenericStreamId; |
1268 | destBuf.width = srcBuf.stream->width; |
1269 | destBuf.height = srcBuf.stream->height; |
1270 | destBuf.format = privBuffer->format; // Use real private format |
1271 | destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc |
1272 | destBuf.buffer = srcBuf.buffer; |
1273 | destBuf.share_fd = privBuffer->share_fd; |
1274 | |
1275 | if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) { |
1276 | needJpeg = true; |
1277 | memset(&info,0,sizeof(struct ExifInfo)); |
1278 | info.orientation = settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0]; |
1279 | jpegpixelfmt = mSensor->getOutputFormat(); |
1280 | if (!mSupportRotate) { |
1281 | info.mainwidth = srcBuf.stream->width; |
1282 | info.mainheight = srcBuf.stream->height; |
1283 | } else { |
1284 | if ((info.orientation == 90) || (info.orientation == 270)) { |
1285 | info.mainwidth = srcBuf.stream->height; |
1286 | info.mainheight = srcBuf.stream->width; |
1287 | } else { |
1288 | info.mainwidth = srcBuf.stream->width; |
1289 | info.mainheight = srcBuf.stream->height; |
1290 | } |
1291 | } |
1292 | if ((jpegpixelfmt == V4L2_PIX_FMT_MJPEG) || (jpegpixelfmt == V4L2_PIX_FMT_YUYV)) { |
1293 | mSensor->setOutputFormat(info.mainwidth,info.mainheight,jpegpixelfmt,1); |
1294 | } else { |
1295 | mSensor->setOutputFormat(info.mainwidth,info.mainheight,V4L2_PIX_FMT_RGB24,1); |
1296 | } |
1297 | } |
1298 | |
1299 | // Wait on fence |
1300 | sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence); |
1301 | res = bufferAcquireFence->wait(kFenceTimeoutMs); |
1302 | if (res == TIMED_OUT) { |
1303 | ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms", |
1304 | __FUNCTION__, frameNumber, i, kFenceTimeoutMs); |
1305 | } |
1306 | if (res == OK) { |
1307 | // Lock buffer for writing |
1308 | const Rect rect(destBuf.width, destBuf.height); |
1309 | if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) { |
1310 | if (privBuffer->format == HAL_PIXEL_FORMAT_YCbCr_420_888/*HAL_PIXEL_FORMAT_YCrCb_420_SP*/) { |
1311 | android_ycbcr ycbcr = android_ycbcr(); |
1312 | res = GraphicBufferMapper::get().lockYCbCr( |
1313 | *(destBuf.buffer), |
1314 | GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK, |
1315 | rect, |
1316 | &ycbcr); |
1317 | // This is only valid because we know that emulator's |
1318 | // YCbCr_420_888 is really contiguous NV21 under the hood |
1319 | destBuf.img = static_cast<uint8_t*>(ycbcr.y); |
1320 | } else { |
1321 | ALOGE("Unexpected private format for flexible YUV: 0x%x", |
1322 | privBuffer->format); |
1323 | res = INVALID_OPERATION; |
1324 | } |
1325 | } else { |
1326 | res = GraphicBufferMapper::get().lock(*(destBuf.buffer), |
1327 | GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK, |
1328 | rect, |
1329 | (void**)&(destBuf.img)); |
1330 | } |
1331 | if (res != OK) { |
1332 | ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer", |
1333 | __FUNCTION__, frameNumber, i); |
1334 | } |
1335 | } |
1336 | |
1337 | if (res != OK) { |
1338 | // Either waiting or locking failed. Unlock locked buffers and bail |
1339 | // out. |
1340 | for (size_t j = 0; j < i; j++) { |
1341 | GraphicBufferMapper::get().unlock( |
1342 | *(request->output_buffers[i].buffer)); |
1343 | } |
1344 | ALOGE("line:%d, format for this usage: %d x %d, usage %x, format=%x, returned\n", |
1345 | __LINE__, destBuf.width, destBuf.height, privBuffer->usage, privBuffer->format); |
1346 | return NO_INIT; |
1347 | } |
1348 | sensorBuffers->push_back(destBuf); |
1349 | buffers->push_back(srcBuf); |
1350 | } |
1351 | |
1352 | if (needJpeg) { |
1353 | if (!mSupportRotate) { |
1354 | info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; |
1355 | info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; |
1356 | } else { |
1357 | if ((info.orientation == 90) || (info.orientation == 270)) { |
1358 | info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; |
1359 | info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; |
1360 | } else { |
1361 | info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; |
1362 | info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; |
1363 | } |
1364 | } |
1365 | if (settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { |
1366 | info.latitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[0]; |
1367 | info.longitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[1]; |
1368 | info.altitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[2]; |
1369 | info.has_latitude = true; |
1370 | info.has_longitude = true; |
1371 | info.has_altitude = true; |
1372 | } else { |
1373 | info.has_latitude = false; |
1374 | info.has_longitude = false; |
1375 | info.has_altitude = false; |
1376 | } |
1377 | if (settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { |
1378 | uint8_t * gpsString = settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8; |
1379 | memcpy(info.gpsProcessingMethod, gpsString , sizeof(info.gpsProcessingMethod)-1); |
1380 | info.has_gpsProcessingMethod = true; |
1381 | } else { |
1382 | info.has_gpsProcessingMethod = false; |
1383 | } |
1384 | if (settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { |
1385 | info.gpsTimestamp = settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0]; |
1386 | info.has_gpsTimestamp = true; |
1387 | } else { |
1388 | info.has_gpsTimestamp = false; |
1389 | } |
1390 | if (settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { |
1391 | info.focallen = settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; |
1392 | info.has_focallen = true; |
1393 | } else { |
1394 | info.has_focallen = false; |
1395 | } |
1396 | jpegbuffersize = getJpegBufferSize(info.mainwidth,info.mainheight); |
1397 | |
1398 | mJpegCompressor->SetMaxJpegBufferSize(jpegbuffersize); |
1399 | mJpegCompressor->SetExifInfo(info); |
1400 | mSensor->setPictureRotate(info.orientation); |
1401 | if ((info.thumbwidth > 0) && (info.thumbheight > 0)) { |
1402 | mHaveThumbnail = true; |
1403 | } |
1404 | DBG_LOGB("%s::thumbnailSize_width=%d,thumbnailSize_height=%d,mainsize_width=%d,mainsize_height=%d,jpegOrientation=%d",__FUNCTION__, |
1405 | info.thumbwidth,info.thumbheight,info.mainwidth,info.mainheight,info.orientation); |
1406 | } |
1407 | /** |
1408 | * Wait for JPEG compressor to not be busy, if needed |
1409 | */ |
1410 | #if 0 |
1411 | if (needJpeg) { |
1412 | bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs); |
1413 | if (!ready) { |
1414 | ALOGE("%s: Timeout waiting for JPEG compression to complete!", |
1415 | __FUNCTION__); |
1416 | return NO_INIT; |
1417 | } |
1418 | } |
1419 | #else |
1420 | while (needJpeg) { |
1421 | bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs); |
1422 | if (ready) { |
1423 | break; |
1424 | } |
1425 | } |
1426 | #endif |
1427 | } |
1428 | /** |
1429 | * Wait until the in-flight queue has room |
1430 | */ |
1431 | res = mReadoutThread->waitForReadout(); |
1432 | if (res != OK) { |
1433 | ALOGE("%s: Timeout waiting for previous requests to complete!", |
1434 | __FUNCTION__); |
1435 | return NO_INIT; |
1436 | } |
1437 | |
1438 | /** |
1439 | * Wait until sensor's ready. This waits for lengthy amounts of time with |
1440 | * mLock held, but the interface spec is that no other calls may by done to |
1441 | * the HAL by the framework while process_capture_request is happening. |
1442 | */ |
1443 | { |
1444 | Mutex::Autolock l(mLock); |
1445 | int syncTimeoutCount = 0; |
1446 | while (!mSensor->waitForVSync(kSyncWaitTimeout)) { |
1447 | if (mStatus == STATUS_ERROR) { |
1448 | return NO_INIT; |
1449 | } |
1450 | if (syncTimeoutCount == kMaxSyncTimeoutCount) { |
1451 | ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms", |
1452 | __FUNCTION__, frameNumber, |
1453 | kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000); |
1454 | return NO_INIT; |
1455 | } |
1456 | syncTimeoutCount++; |
1457 | } |
1458 | |
1459 | /** |
1460 | * Configure sensor and queue up the request to the readout thread |
1461 | */ |
1462 | mSensor->setExposureTime(exposureTime); |
1463 | mSensor->setFrameDuration(frameDuration); |
1464 | mSensor->setSensitivity(sensitivity); |
1465 | mSensor->setDestinationBuffers(sensorBuffers); |
1466 | mSensor->setFrameNumber(request->frame_number); |
1467 | |
1468 | ReadoutThread::Request r; |
1469 | r.frameNumber = request->frame_number; |
1470 | r.settings = settings; |
1471 | r.sensorBuffers = sensorBuffers; |
1472 | r.buffers = buffers; |
1473 | r.havethumbnail = mHaveThumbnail; |
1474 | |
1475 | mReadoutThread->queueCaptureRequest(r); |
1476 | ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number); |
1477 | |
1478 | // Cache the settings for next time |
1479 | mPrevSettings.acquire(settings); |
1480 | } |
1481 | CAMHAL_LOGDB("%s , X" , __FUNCTION__); |
1482 | return OK; |
1483 | } |
1484 | |
1485 | /** Debug methods */ |
1486 | |
1487 | void EmulatedFakeCamera3::dump(int fd) { |
1488 | |
1489 | String8 result; |
1490 | uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]); |
1491 | result = String8::format("%s, valid resolution\n", __FILE__); |
1492 | |
1493 | for (uint32_t f = 0; f < count; f+=2) { |
1494 | if (mAvailableJpegSize[f] == 0) |
1495 | break; |
1496 | result.appendFormat("width: %d , height =%d\n", |
1497 | mAvailableJpegSize[f], mAvailableJpegSize[f+1]); |
1498 | } |
1499 | result.appendFormat("\nmZoomMin: %d , mZoomMax =%d, mZoomStep=%d\n", |
1500 | mZoomMin, mZoomMax, mZoomStep); |
1501 | |
1502 | if (mZoomStep <= 0) { |
1503 | result.appendFormat("!!!!!!!!!camera apk may have no picture out\n"); |
1504 | } |
1505 | |
1506 | write(fd, result.string(), result.size()); |
1507 | |
1508 | if (mSensor.get() != NULL) { |
1509 | mSensor->dump(fd); |
1510 | } |
1511 | |
1512 | } |
1513 | //flush all request |
1514 | //TODO returned buffers every request held immediately with |
1515 | //CAMERA3_BUFFER_STATUS_ERROR flag. |
1516 | int EmulatedFakeCamera3::flush_all_requests() { |
1517 | DBG_LOGA("flush all request"); |
1518 | return 0; |
1519 | } |
1520 | /** Tag query methods */ |
1521 | const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) { |
1522 | return NULL; |
1523 | } |
1524 | |
1525 | const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) { |
1526 | return NULL; |
1527 | } |
1528 | |
1529 | int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) { |
1530 | return 0; |
1531 | } |
1532 | |
1533 | /** |
1534 | * Private methods |
1535 | */ |
1536 | |
1537 | camera_metadata_ro_entry_t EmulatedFakeCamera3::staticInfo(const CameraMetadata *info, uint32_t tag, |
1538 | size_t minCount, size_t maxCount, bool required) const { |
1539 | |
1540 | camera_metadata_ro_entry_t entry = info->find(tag); |
1541 | |
1542 | if (CC_UNLIKELY( entry.count == 0 ) && required) { |
1543 | const char* tagSection = get_camera_metadata_section_name(tag); |
1544 | if (tagSection == NULL) tagSection = "<unknown>"; |
1545 | const char* tagName = get_camera_metadata_tag_name(tag); |
1546 | if (tagName == NULL) tagName = "<unknown>"; |
1547 | |
1548 | ALOGE("Error finding static metadata entry '%s.%s' (%x)", |
1549 | tagSection, tagName, tag); |
1550 | } else if (CC_UNLIKELY( |
1551 | (minCount != 0 && entry.count < minCount) || |
1552 | (maxCount != 0 && entry.count > maxCount) ) ) { |
1553 | const char* tagSection = get_camera_metadata_section_name(tag); |
1554 | if (tagSection == NULL) tagSection = "<unknown>"; |
1555 | const char* tagName = get_camera_metadata_tag_name(tag); |
1556 | if (tagName == NULL) tagName = "<unknown>"; |
1557 | ALOGE("Malformed static metadata entry '%s.%s' (%x):" |
1558 | "Expected between %zu and %zu values, but got %zu values", |
1559 | tagSection, tagName, tag, minCount, maxCount, entry.count); |
1560 | } |
1561 | |
1562 | return entry; |
1563 | } |
1564 | |
1565 | //this is only for debug |
1566 | void EmulatedFakeCamera3::getStreamConfigurationp(CameraMetadata *info) { |
1567 | const int STREAM_CONFIGURATION_SIZE = 4; |
1568 | const int STREAM_FORMAT_OFFSET = 0; |
1569 | const int STREAM_WIDTH_OFFSET = 1; |
1570 | const int STREAM_HEIGHT_OFFSET = 2; |
1571 | const int STREAM_IS_INPUT_OFFSET = 3; |
1572 | |
1573 | camera_metadata_ro_entry_t availableStreamConfigs = |
1574 | staticInfo(info, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS); |
1575 | CAMHAL_LOGDB(" stream, availableStreamConfigs.count=%d\n", availableStreamConfigs.count); |
1576 | |
1577 | for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) { |
1578 | int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET]; |
1579 | int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET]; |
1580 | int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET]; |
1581 | int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET]; |
1582 | CAMHAL_LOGDB("f=%x, w*h=%dx%d, du=%d\n", format, width, height, isInput); |
1583 | } |
1584 | |
1585 | } |
1586 | |
1587 | //this is only for debug |
1588 | void EmulatedFakeCamera3::getStreamConfigurationDurations(CameraMetadata *info) { |
1589 | const int STREAM_CONFIGURATION_SIZE = 4; |
1590 | const int STREAM_FORMAT_OFFSET = 0; |
1591 | const int STREAM_WIDTH_OFFSET = 1; |
1592 | const int STREAM_HEIGHT_OFFSET = 2; |
1593 | const int STREAM_IS_INPUT_OFFSET = 3; |
1594 | |
1595 | camera_metadata_ro_entry_t availableStreamConfigs = |
1596 | staticInfo(info, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS); |
1597 | CAMHAL_LOGDB("availableStreamConfigs.count=%d\n", availableStreamConfigs.count); |
1598 | |
1599 | for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) { |
1600 | int64_t format = availableStreamConfigs.data.i64[i + STREAM_FORMAT_OFFSET]; |
1601 | int64_t width = availableStreamConfigs.data.i64[i + STREAM_WIDTH_OFFSET]; |
1602 | int64_t height = availableStreamConfigs.data.i64[i + STREAM_HEIGHT_OFFSET]; |
1603 | int64_t isInput = availableStreamConfigs.data.i64[i + STREAM_IS_INPUT_OFFSET]; |
1604 | CAMHAL_LOGDB("f=%llx, w*h=%lldx%lld, du=%lld\n", format, width, height, isInput); |
1605 | } |
1606 | } |
1607 | |
1608 | void EmulatedFakeCamera3::updateCameraMetaData(CameraMetadata *info) { |
1609 | |
1610 | } |
1611 | |
1612 | status_t EmulatedFakeCamera3::constructStaticInfo() { |
1613 | |
1614 | status_t ret = OK; |
1615 | CameraMetadata info; |
1616 | uint32_t picSizes[64 * 8]; |
1617 | int64_t* duration = NULL; |
1618 | int count, duration_count, availablejpegsize; |
1619 | uint8_t maxCount = 10; |
1620 | char property[PROPERTY_VALUE_MAX]; |
1621 | unsigned int supportrotate; |
1622 | availablejpegsize = ARRAY_SIZE(mAvailableJpegSize); |
1623 | memset(mAvailableJpegSize,0,(sizeof(uint32_t))*availablejpegsize); |
1624 | sp<Sensor> s = new Sensor(); |
1625 | ret = s->startUp(mCameraID); |
1626 | if (ret != OK) { |
1627 | DBG_LOGA("sensor start up failed"); |
1628 | return ret; |
1629 | } |
1630 | |
1631 | mSensorType = s->getSensorType(); |
1632 | |
1633 | if ( mSensorType == SENSOR_USB) { |
1634 | char property[PROPERTY_VALUE_MAX]; |
1635 | property_get("rw.camera.usb.faceback", property, "false"); |
1636 | if (strstr(property, "true")) |
1637 | mFacingBack = 1; |
1638 | else |
1639 | mFacingBack = 0; |
1640 | ALOGI("Setting usb camera cameraID:%d to back camera:%s\n", |
1641 | mCameraID, property); |
1642 | } else { |
1643 | if (s->mSensorFace == SENSOR_FACE_FRONT) { |
1644 | mFacingBack = 0; |
1645 | } else if (s->mSensorFace == SENSOR_FACE_BACK) { |
1646 | mFacingBack = 1; |
1647 | } else if (s->mSensorFace == SENSOR_FACE_NONE) { |
1648 | if (gEmulatedCameraFactory.getEmulatedCameraNum() == 1) { |
1649 | mFacingBack = 1; |
1650 | } else if ( mCameraID == 0) { |
1651 | mFacingBack = 1; |
1652 | } else { |
1653 | mFacingBack = 0; |
1654 | } |
1655 | } |
1656 | |
1657 | ALOGI("Setting on board camera cameraID:%d to back camera:%d[0 false, 1 true]\n", |
1658 | mCameraID, mFacingBack); |
1659 | } |
1660 | |
1661 | mSupportCap = s->IoctlStateProbe(); |
1662 | if (mSupportCap & IOCTL_MASK_ROTATE) { |
1663 | supportrotate = true; |
1664 | } else { |
1665 | supportrotate = false; |
1666 | } |
1667 | // android.lens |
1668 | |
1669 | // 5 cm min focus distance for back camera, infinity (fixed focus) for front |
1670 | // TODO read this ioctl from camera driver |
1671 | DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo); |
1672 | const float minFocusDistance = 0.0; |
1673 | info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, |
1674 | &minFocusDistance, 1); |
1675 | |
1676 | // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front |
1677 | const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0; |
1678 | info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, |
1679 | &minFocusDistance, 1); |
1680 | |
1681 | static const float focalLength = 3.30f; // mm |
1682 | info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, |
1683 | &focalLength, 1); |
1684 | static const float aperture = 2.8f; |
1685 | info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, |
1686 | &aperture, 1); |
1687 | static const float filterDensity = 0; |
1688 | info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, |
1689 | &filterDensity, 1); |
1690 | static const uint8_t availableOpticalStabilization = |
1691 | ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; |
1692 | info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, |
1693 | &availableOpticalStabilization, 1); |
1694 | |
1695 | static const int32_t lensShadingMapSize[] = {1, 1}; |
1696 | info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize, |
1697 | sizeof(lensShadingMapSize)/sizeof(int32_t)); |
1698 | |
1699 | uint8_t lensFacing = mFacingBack ? |
1700 | ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; |
1701 | info.update(ANDROID_LENS_FACING, &lensFacing, 1); |
1702 | |
1703 | float lensPosition[3]; |
1704 | if (mFacingBack) { |
1705 | // Back-facing camera is center-top on device |
1706 | lensPosition[0] = 0; |
1707 | lensPosition[1] = 20; |
1708 | lensPosition[2] = -5; |
1709 | } else { |
1710 | // Front-facing camera is center-right on device |
1711 | lensPosition[0] = 20; |
1712 | lensPosition[1] = 20; |
1713 | lensPosition[2] = 0; |
1714 | } |
1715 | info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/ |
1716 | sizeof(float)); |
1717 | static const uint8_t lensCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED; |
1718 | info.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,&lensCalibration,1); |
1719 | |
1720 | // android.sensor |
1721 | |
1722 | static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; |
1723 | info.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1); |
1724 | static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; |
1725 | info.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1); |
1726 | info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, |
1727 | Sensor::kExposureTimeRange, 2); |
1728 | |
1729 | info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, |
1730 | &Sensor::kFrameDurationRange[1], 1); |
1731 | |
1732 | info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, |
1733 | Sensor::kSensitivityRange, |
1734 | sizeof(Sensor::kSensitivityRange) |
1735 | /sizeof(int32_t)); |
1736 | |
1737 | info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, |
1738 | &Sensor::kColorFilterArrangement, 1); |
1739 | |
1740 | static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm |
1741 | info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, |
1742 | sensorPhysicalSize, 2); |
1743 | |
1744 | info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, |
1745 | (int32_t*)Sensor::kResolution, 2); |
1746 | |
1747 | //(int32_t*)Sensor::kResolution, 2); |
1748 | |
1749 | info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, |
1750 | (int32_t*)&Sensor::kMaxRawValue, 1); |
1751 | |
1752 | static const int32_t blackLevelPattern[4] = { |
1753 | (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel, |
1754 | (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel |
1755 | }; |
1756 | info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, |
1757 | blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t)); |
1758 | |
1759 | static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN; |
1760 | info.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1); |
1761 | if (mSensorType == SENSOR_USB) { |
1762 | if (mFacingBack) { |
1763 | property_get("hw.camera.orientation.back", property, "0"); |
1764 | } else { |
1765 | property_get("hw.camera.orientation.front", property, "0"); |
1766 | } |
1767 | int32_t orientation = atoi(property); |
1768 | property_get("hw.camera.usb.orientation_offset", property, "0"); |
1769 | orientation += atoi(property); |
1770 | orientation %= 360; |
1771 | info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1); |
1772 | } else { |
1773 | if (mFacingBack) { |
1774 | property_get("hw.camera.orientation.back", property, "270"); |
1775 | const int32_t orientation = atoi(property); |
1776 | info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1); |
1777 | } else { |
1778 | property_get("hw.camera.orientation.front", property, "90"); |
1779 | const int32_t orientation = atoi(property); |
1780 | info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1); |
1781 | } |
1782 | } |
1783 | |
1784 | static const int64_t rollingShutterSkew = 0; |
1785 | info.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1); |
1786 | |
1787 | //TODO: sensor color calibration fields |
1788 | |
1789 | // android.flash |
1790 | static const uint8_t flashAvailable = 0; |
1791 | info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1); |
1792 | |
1793 | static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE; |
1794 | info.update(ANDROID_FLASH_STATE, &flashstate, 1); |
1795 | |
1796 | static const int64_t flashChargeDuration = 0; |
1797 | info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1); |
1798 | |
1799 | /** android.noise */ |
1800 | static const uint8_t availableNBModes = ANDROID_NOISE_REDUCTION_MODE_OFF; |
1801 | info.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, &availableNBModes, 1); |
1802 | |
1803 | // android.tonemap |
1804 | |
1805 | static const int32_t tonemapCurvePoints = 128; |
1806 | info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1); |
1807 | |
1808 | // android.scaler |
1809 | |
1810 | static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; |
1811 | info.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1); |
1812 | |
1813 | info.update(ANDROID_SCALER_AVAILABLE_FORMATS, |
1814 | kAvailableFormats, |
1815 | sizeof(kAvailableFormats)/sizeof(int32_t)); |
1816 | |
1817 | info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, |
1818 | (int64_t*)kAvailableRawMinDurations, |
1819 | sizeof(kAvailableRawMinDurations)/sizeof(uint64_t)); |
1820 | |
1821 | //for version 3.2 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS |
1822 | count = sizeof(picSizes)/sizeof(picSizes[0]); |
1823 | count = s->getStreamConfigurations(picSizes, kAvailableFormats, count); |
1824 | |
1825 | info.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, |
1826 | (int32_t*)picSizes, count); |
1827 | |
1828 | if (count < availablejpegsize) { |
1829 | availablejpegsize = count; |
1830 | } |
1831 | getValidJpegSize(picSizes,mAvailableJpegSize,availablejpegsize); |
1832 | |
1833 | maxJpegResolution = getMaxJpegResolution(picSizes,count); |
1834 | int32_t full_size[4]; |
1835 | if (mFacingBack) { |
1836 | full_size[0] = 0; |
1837 | full_size[1] = 0; |
1838 | full_size[2] = maxJpegResolution.width; |
1839 | full_size[3] = maxJpegResolution.height; |
1840 | } else { |
1841 | full_size[0] = 0; |
1842 | full_size[1] = 0; |
1843 | full_size[2] = maxJpegResolution.width; |
1844 | full_size[3] = maxJpegResolution.height; |
1845 | } |
1846 | info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, |
1847 | (int32_t*)full_size, |
1848 | sizeof(full_size)/sizeof(full_size[0])); |
1849 | duration = new int64_t[count]; |
1850 | if (duration == NULL) { |
1851 | DBG_LOGA("allocate memory for duration failed"); |
1852 | return NO_MEMORY; |
1853 | } else { |
1854 | memset(duration,0,sizeof(int64_t)*count); |
1855 | } |
1856 | duration_count = s->getStreamConfigurationDurations(picSizes, duration , count); |
1857 | |
1858 | info.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, |
1859 | duration, duration_count); |
1860 | info.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, |
1861 | duration, duration_count); |
1862 | |
1863 | info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, |
1864 | (int64_t*)kAvailableProcessedMinDurations, |
1865 | sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t)); |
1866 | |
1867 | info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, |
1868 | (int64_t*)kAvailableJpegMinDurations, |
1869 | sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t)); |
1870 | |
1871 | |
1872 | // android.jpeg |
1873 | |
1874 | static const int32_t jpegThumbnailSizes[] = { |
1875 | 0, 0, |
1876 | 160, 120, |
1877 | 320, 240 |
1878 | }; |
1879 | info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, |
1880 | jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t)); |
1881 | |
1882 | static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize; |
1883 | info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1); |
1884 | |
1885 | // android.stats |
1886 | |
1887 | static const uint8_t availableFaceDetectModes[] = { |
1888 | ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, |
1889 | ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, |
1890 | ANDROID_STATISTICS_FACE_DETECT_MODE_FULL |
1891 | }; |
1892 | |
1893 | info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, |
1894 | availableFaceDetectModes, |
1895 | sizeof(availableFaceDetectModes)); |
1896 | |
1897 | static const int32_t maxFaceCount = 8; |
1898 | info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, |
1899 | &maxFaceCount, 1); |
1900 | |
1901 | static const int32_t histogramSize = 64; |
1902 | info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, |
1903 | &histogramSize, 1); |
1904 | |
1905 | static const int32_t maxHistogramCount = 1000; |
1906 | info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, |
1907 | &maxHistogramCount, 1); |
1908 | |
1909 | static const int32_t sharpnessMapSize[2] = {64, 64}; |
1910 | info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, |
1911 | sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t)); |
1912 | |
1913 | static const int32_t maxSharpnessMapValue = 1000; |
1914 | info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, |
1915 | &maxSharpnessMapValue, 1); |
1916 | static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; |
1917 | info.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1); |
1918 | |
1919 | static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; |
1920 | info.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1); |
1921 | static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; |
1922 | info.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1); |
1923 | // android.control |
1924 | |
1925 | static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; |
1926 | info.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); |
1927 | |
1928 | static const uint8_t availableSceneModes[] = { |
1929 | // ANDROID_CONTROL_SCENE_MODE_DISABLED, |
1930 | ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY |
1931 | }; |
1932 | info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, |
1933 | availableSceneModes, sizeof(availableSceneModes)); |
1934 | |
1935 | static const uint8_t availableEffects[] = { |
1936 | ANDROID_CONTROL_EFFECT_MODE_OFF |
1937 | }; |
1938 | info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, |
1939 | availableEffects, sizeof(availableEffects)); |
1940 | |
1941 | static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0}; |
1942 | info.update(ANDROID_CONTROL_MAX_REGIONS, |
1943 | max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0])); |
1944 | |
1945 | static const uint8_t availableAeModes[] = { |
1946 | ANDROID_CONTROL_AE_MODE_OFF, |
1947 | ANDROID_CONTROL_AE_MODE_ON |
1948 | }; |
1949 | info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, |
1950 | availableAeModes, sizeof(availableAeModes)); |
1951 | |
1952 | |
1953 | static const int32_t availableTargetFpsRanges[] = { |
1954 | 5, 15, 15, 15, 5, 25, 25, 25, 5, 30, 30, 30, |
1955 | }; |
1956 | info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, |
1957 | availableTargetFpsRanges, |
1958 | sizeof(availableTargetFpsRanges)/sizeof(int32_t)); |
1959 | |
1960 | uint8_t awbModes[maxCount]; |
1961 | count = s->getAWB(awbModes, maxCount); |
1962 | if (count < 0) { |
1963 | static const uint8_t availableAwbModes[] = { |
1964 | ANDROID_CONTROL_AWB_MODE_OFF, |
1965 | ANDROID_CONTROL_AWB_MODE_AUTO, |
1966 | ANDROID_CONTROL_AWB_MODE_INCANDESCENT, |
1967 | ANDROID_CONTROL_AWB_MODE_FLUORESCENT, |
1968 | ANDROID_CONTROL_AWB_MODE_DAYLIGHT, |
1969 | ANDROID_CONTROL_AWB_MODE_SHADE |
1970 | }; |
1971 | info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, |
1972 | availableAwbModes, sizeof(availableAwbModes)); |
1973 | } else { |
1974 | DBG_LOGB("getAWB %d ",count); |
1975 | info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, |
1976 | awbModes, count); |
1977 | } |
1978 | |
1979 | static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE; |
1980 | info.update(ANDROID_CONTROL_AF_STATE,&afstate,1); |
1981 | |
1982 | static const uint8_t availableAfModesFront[] = { |
1983 | ANDROID_CONTROL_AF_MODE_OFF |
1984 | }; |
1985 | |
1986 | if (mFacingBack) { |
1987 | uint8_t afMode[maxCount]; |
1988 | count = s->getAutoFocus(afMode, maxCount); |
1989 | if (count < 0) { |
1990 | static const uint8_t availableAfModesBack[] = { |
1991 | ANDROID_CONTROL_AF_MODE_OFF, |
1992 | //ANDROID_CONTROL_AF_MODE_AUTO, |
1993 | //ANDROID_CONTROL_AF_MODE_MACRO, |
1994 | //ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, |
1995 | //ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, |
1996 | }; |
1997 | |
1998 | info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, |
1999 | availableAfModesBack, sizeof(availableAfModesBack)); |
2000 | } else { |
2001 | info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, |
2002 | afMode, count); |
2003 | } |
2004 | } else { |
2005 | info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, |
2006 | availableAfModesFront, sizeof(availableAfModesFront)); |
2007 | } |
2008 | |
2009 | uint8_t antiBanding[maxCount]; |
2010 | count = s->getAntiBanding(antiBanding, maxCount); |
2011 | if (count < 0) { |
2012 | static const uint8_t availableAntibanding[] = { |
2013 | ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, |
2014 | ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, |
2015 | }; |
2016 | info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, |
2017 | availableAntibanding, sizeof(availableAntibanding)); |
2018 | } else { |
2019 | info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, |
2020 | antiBanding, count); |
2021 | } |
2022 | |
2023 | camera_metadata_rational step; |
2024 | int maxExp, minExp, def; |
2025 | ret = s->getExposure(&maxExp, &minExp, &def, &step); |
2026 | if (ret < 0) { |
2027 | static const int32_t aeExpCompensation = 0; |
2028 | info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1); |
2029 | |
2030 | static const camera_metadata_rational exposureCompensationStep = { |
2031 | 1, 3 |
2032 | }; |
2033 | info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, |
2034 | &exposureCompensationStep, 1); |
2035 | |
2036 | int32_t exposureCompensationRange[] = {0, 0}; |
2037 | info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, |
2038 | exposureCompensationRange, |
2039 | sizeof(exposureCompensationRange)/sizeof(int32_t)); |
2040 | } else { |
2041 | DBG_LOGB("exposure compensation support:(%d, %d)\n", minExp, maxExp); |
2042 | int32_t exposureCompensationRange[] = {minExp, maxExp}; |
2043 | info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, |
2044 | exposureCompensationRange, |
2045 | sizeof(exposureCompensationRange)/sizeof(int32_t)); |
2046 | info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, |
2047 | &step, 1); |
2048 | info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &def, 1); |
2049 | } |
2050 | |
2051 | ret = s->getZoom(&mZoomMin, &mZoomMax, &mZoomStep); |
2052 | if (ret < 0) { |
2053 | float maxZoom = 1.0; |
2054 | info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, |
2055 | &maxZoom, 1); |
2056 | } else { |
2057 | float maxZoom = mZoomMax / mZoomMin; |
2058 | info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, |
2059 | &maxZoom, 1); |
2060 | } |
2061 | |
2062 | static const uint8_t availableVstabModes[] = { |
2063 | ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF |
2064 | }; |
2065 | info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, |
2066 | availableVstabModes, sizeof(availableVstabModes)); |
2067 | |
2068 | static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED; |
2069 | info.update(ANDROID_CONTROL_AE_STATE,&aestate,1); |
2070 | static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
2071 | info.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1); |
2072 | // android.info |
2073 | const uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; |
2074 | //mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL : |
2075 | // ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; |
2076 | info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, |
2077 | &supportedHardwareLevel, |
2078 | /*count*/1); |
2079 | |
2080 | int32_t android_sync_max_latency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN; |
2081 | info.update(ANDROID_SYNC_MAX_LATENCY, &android_sync_max_latency, 1); |
2082 | |
2083 | uint8_t len[] = {1}; |
2084 | info.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1); |
2085 | |
2086 | uint8_t maxlen[] = {2}; |
2087 | info.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1); |
2088 | uint8_t cap[] = { |
2089 | ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE, |
2090 | }; |
2091 | info.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, |
2092 | (uint8_t *)cap, sizeof(cap)/sizeof(cap[0])); |
2093 | |
2094 | |
2095 | int32_t partialResultCount = 1; |
2096 | info.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,&partialResultCount,1); |
2097 | int32_t maxNumOutputStreams[3] = {0,2,1}; |
2098 | info.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,maxNumOutputStreams,3); |
2099 | uint8_t aberrationMode[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF}; |
2100 | info.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, |
2101 | aberrationMode, 1); |
2102 | info.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, |
2103 | aberrationMode, 1); |
2104 | |
2105 | getAvailableChKeys(&info, supportedHardwareLevel); |
2106 | |
2107 | if (mCameraInfo != NULL) { |
2108 | CAMHAL_LOGDA("mCameraInfo is not null, mem leak?"); |
2109 | } |
2110 | mCameraInfo = info.release(); |
2111 | DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo); |
2112 | |
2113 | if (duration != NULL) { |
2114 | delete [] duration; |
2115 | } |
2116 | |
2117 | s->shutDown(); |
2118 | s.clear(); |
2119 | mPlugged = true; |
2120 | |
2121 | return OK; |
2122 | } |
2123 | |
2124 | status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) { |
2125 | /** |
2126 | * Extract top-level 3A controls |
2127 | */ |
2128 | status_t res; |
2129 | |
2130 | bool facePriority = false; |
2131 | |
2132 | camera_metadata_entry e; |
2133 | |
2134 | e = settings.find(ANDROID_CONTROL_MODE); |
2135 | if (e.count == 0) { |
2136 | ALOGE("%s: No control mode entry!", __FUNCTION__); |
2137 | return BAD_VALUE; |
2138 | } |
2139 | uint8_t controlMode = e.data.u8[0]; |
2140 | |
2141 | e = settings.find(ANDROID_CONTROL_SCENE_MODE); |
2142 | if (e.count == 0) { |
2143 | ALOGE("%s: No scene mode entry!", __FUNCTION__); |
2144 | return BAD_VALUE; |
2145 | } |
2146 | uint8_t sceneMode = e.data.u8[0]; |
2147 | |
2148 | if (controlMode == ANDROID_CONTROL_MODE_OFF) { |
2149 | mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; |
2150 | mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; |
2151 | mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
2152 | update3A(settings); |
2153 | return OK; |
2154 | } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) { |
2155 | switch(sceneMode) { |
2156 | case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY: |
2157 | mFacePriority = true; |
2158 | break; |
2159 | default: |
2160 | ALOGE("%s: Emulator doesn't support scene mode %d", |
2161 | __FUNCTION__, sceneMode); |
2162 | return BAD_VALUE; |
2163 | } |
2164 | } else { |
2165 | mFacePriority = false; |
2166 | } |
2167 | |
2168 | // controlMode == AUTO or sceneMode = FACE_PRIORITY |
2169 | // Process individual 3A controls |
2170 | |
2171 | res = doFakeAE(settings); |
2172 | if (res != OK) return res; |
2173 | |
2174 | res = doFakeAF(settings); |
2175 | if (res != OK) return res; |
2176 | |
2177 | res = doFakeAWB(settings); |
2178 | if (res != OK) return res; |
2179 | |
2180 | update3A(settings); |
2181 | return OK; |
2182 | } |
2183 | |
2184 | status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) { |
2185 | camera_metadata_entry e; |
2186 | |
2187 | e = settings.find(ANDROID_CONTROL_AE_MODE); |
2188 | if (e.count == 0) { |
2189 | ALOGE("%s: No AE mode entry!", __FUNCTION__); |
2190 | return BAD_VALUE; |
2191 | } |
2192 | uint8_t aeMode = e.data.u8[0]; |
2193 | |
2194 | switch (aeMode) { |
2195 | case ANDROID_CONTROL_AE_MODE_OFF: |
2196 | // AE is OFF |
2197 | mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; |
2198 | return OK; |
2199 | case ANDROID_CONTROL_AE_MODE_ON: |
2200 | // OK for AUTO modes |
2201 | break; |
2202 | default: |
2203 | ALOGE("%s: Emulator doesn't support AE mode %d", |
2204 | __FUNCTION__, aeMode); |
2205 | return BAD_VALUE; |
2206 | } |
2207 | |
2208 | e = settings.find(ANDROID_CONTROL_AE_LOCK); |
2209 | if (e.count == 0) { |
2210 | ALOGE("%s: No AE lock entry!", __FUNCTION__); |
2211 | return BAD_VALUE; |
2212 | } |
2213 | bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON); |
2214 | |
2215 | e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER); |
2216 | bool precaptureTrigger = false; |
2217 | if (e.count != 0) { |
2218 | precaptureTrigger = |
2219 | (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START); |
2220 | } |
2221 | |
2222 | if (precaptureTrigger) { |
2223 | ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger); |
2224 | } else if (e.count > 0) { |
2225 | ALOGV("%s: Pre capture trigger was present? %zu", |
2226 | __FUNCTION__, |
2227 | e.count); |
2228 | } |
2229 | |
2230 | if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) { |
2231 | // Run precapture sequence |
2232 | if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) { |
2233 | mAeCounter = 0; |
2234 | } |
2235 | |
2236 | if (mFacePriority) { |
2237 | mAeTargetExposureTime = kFacePriorityExposureTime; |
2238 | } else { |
2239 | mAeTargetExposureTime = kNormalExposureTime; |
2240 | } |
2241 | |
2242 | if (mAeCounter > kPrecaptureMinFrames && |
2243 | (mAeTargetExposureTime - mAeCurrentExposureTime) < |
2244 | mAeTargetExposureTime / 10) { |
2245 | // Done with precapture |
2246 | mAeCounter = 0; |
2247 | mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED : |
2248 | ANDROID_CONTROL_AE_STATE_CONVERGED; |
2249 | } else { |
2250 | // Converge some more |
2251 | mAeCurrentExposureTime += |
2252 | (mAeTargetExposureTime - mAeCurrentExposureTime) * |
2253 | kExposureTrackRate; |
2254 | mAeCounter++; |
2255 | mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE; |
2256 | } |
2257 | |
2258 | } else if (!aeLocked) { |
2259 | // Run standard occasional AE scan |
2260 | switch (mAeState) { |
2261 | case ANDROID_CONTROL_AE_STATE_CONVERGED: |
2262 | case ANDROID_CONTROL_AE_STATE_INACTIVE: |
2263 | mAeCounter++; |
2264 | if (mAeCounter > kStableAeMaxFrames) { |
2265 | mAeTargetExposureTime = |
2266 | mFacePriority ? kFacePriorityExposureTime : |
2267 | kNormalExposureTime; |
2268 | float exposureStep = ((double)rand() / RAND_MAX) * |
2269 | (kExposureWanderMax - kExposureWanderMin) + |
2270 | kExposureWanderMin; |
2271 | mAeTargetExposureTime *= std::pow(2, exposureStep); |
2272 | mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING; |
2273 | } |
2274 | break; |
2275 | case ANDROID_CONTROL_AE_STATE_SEARCHING: |
2276 | mAeCurrentExposureTime += |
2277 | (mAeTargetExposureTime - mAeCurrentExposureTime) * |
2278 | kExposureTrackRate; |
2279 | if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) < |
2280 | mAeTargetExposureTime / 10) { |
2281 | // Close enough |
2282 | mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED; |
2283 | mAeCounter = 0; |
2284 | } |
2285 | break; |
2286 | case ANDROID_CONTROL_AE_STATE_LOCKED: |
2287 | mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED; |
2288 | mAeCounter = 0; |
2289 | break; |
2290 | default: |
2291 | ALOGE("%s: Emulator in unexpected AE state %d", |
2292 | __FUNCTION__, mAeState); |
2293 | return INVALID_OPERATION; |
2294 | } |
2295 | } else { |
2296 | // AE is locked |
2297 | mAeState = ANDROID_CONTROL_AE_STATE_LOCKED; |
2298 | } |
2299 | |
2300 | return OK; |
2301 | } |
2302 | |
2303 | status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) { |
2304 | camera_metadata_entry e; |
2305 | |
2306 | e = settings.find(ANDROID_CONTROL_AF_MODE); |
2307 | if (e.count == 0) { |
2308 | ALOGE("%s: No AF mode entry!", __FUNCTION__); |
2309 | return BAD_VALUE; |
2310 | } |
2311 | uint8_t afMode = e.data.u8[0]; |
2312 | |
2313 | e = settings.find(ANDROID_CONTROL_AF_TRIGGER); |
2314 | typedef camera_metadata_enum_android_control_af_trigger af_trigger_t; |
2315 | af_trigger_t afTrigger; |
2316 | // If we have an afTrigger, afTriggerId should be set too |
2317 | if (e.count != 0) { |
2318 | afTrigger = static_cast<af_trigger_t>(e.data.u8[0]); |
2319 | |
2320 | e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID); |
2321 | |
2322 | if (e.count == 0) { |
2323 | ALOGE("%s: When android.control.afTrigger is set " |
2324 | " in the request, afTriggerId needs to be set as well", |
2325 | __FUNCTION__); |
2326 | return BAD_VALUE; |
2327 | } |
2328 | |
2329 | mAfTriggerId = e.data.i32[0]; |
2330 | |
2331 | ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger); |
2332 | ALOGV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId); |
2333 | ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode); |
2334 | } else { |
2335 | afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; |
2336 | } |
2337 | if (!mFacingBack) { |
2338 | afMode = ANDROID_CONTROL_AF_MODE_OFF; |
2339 | } |
2340 | |
2341 | switch (afMode) { |
2342 | case ANDROID_CONTROL_AF_MODE_OFF: |
2343 | mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; |
2344 | return OK; |
2345 | case ANDROID_CONTROL_AF_MODE_AUTO: |
2346 | case ANDROID_CONTROL_AF_MODE_MACRO: |
2347 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2348 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2349 | if (!mFacingBack) { |
2350 | ALOGE("%s: Front camera doesn't support AF mode %d", |
2351 | __FUNCTION__, afMode); |
2352 | return BAD_VALUE; |
2353 | } |
2354 | mSensor->setAutoFocuas(afMode); |
2355 | // OK, handle transitions lower on |
2356 | break; |
2357 | default: |
2358 | ALOGE("%s: Emulator doesn't support AF mode %d", |
2359 | __FUNCTION__, afMode); |
2360 | return BAD_VALUE; |
2361 | } |
2362 | #if 0 |
2363 | e = settings.find(ANDROID_CONTROL_AF_REGIONS); |
2364 | if (e.count == 0) { |
2365 | ALOGE("%s:Get ANDROID_CONTROL_AF_REGIONS failed\n", __FUNCTION__); |
2366 | return BAD_VALUE; |
2367 | } |
2368 | int32_t x0 = e.data.i32[0]; |
2369 | int32_t y0 = e.data.i32[1]; |
2370 | int32_t x1 = e.data.i32[2]; |
2371 | int32_t y1 = e.data.i32[3]; |
2372 | mSensor->setFocuasArea(x0, y0, x1, y1); |
2373 | DBG_LOGB(" x0:%d, y0:%d,x1:%d,y1:%d,\n", x0, y0, x1, y1); |
2374 | #endif |
2375 | |
2376 | |
2377 | bool afModeChanged = mAfMode != afMode; |
2378 | mAfMode = afMode; |
2379 | |
2380 | /** |
2381 | * Simulate AF triggers. Transition at most 1 state per frame. |
2382 | * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN). |
2383 | */ |
2384 | |
2385 | bool afTriggerStart = false; |
2386 | bool afTriggerCancel = false; |
2387 | switch (afTrigger) { |
2388 | case ANDROID_CONTROL_AF_TRIGGER_IDLE: |
2389 | break; |
2390 | case ANDROID_CONTROL_AF_TRIGGER_START: |
2391 | afTriggerStart = true; |
2392 | break; |
2393 | case ANDROID_CONTROL_AF_TRIGGER_CANCEL: |
2394 | afTriggerCancel = true; |
2395 | // Cancel trigger always transitions into INACTIVE |
2396 | mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; |
2397 | |
2398 | ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__); |
2399 | |
2400 | // Stay in 'inactive' until at least next frame |
2401 | return OK; |
2402 | default: |
2403 | ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger); |
2404 | return BAD_VALUE; |
2405 | } |
2406 | |
2407 | // If we get down here, we're either in an autofocus mode |
2408 | // or in a continuous focus mode (and no other modes) |
2409 | |
2410 | int oldAfState = mAfState; |
2411 | switch (mAfState) { |
2412 | case ANDROID_CONTROL_AF_STATE_INACTIVE: |
2413 | if (afTriggerStart) { |
2414 | switch (afMode) { |
2415 | case ANDROID_CONTROL_AF_MODE_AUTO: |
2416 | // fall-through |
2417 | case ANDROID_CONTROL_AF_MODE_MACRO: |
2418 | mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; |
2419 | break; |
2420 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2421 | // fall-through |
2422 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2423 | mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; |
2424 | break; |
2425 | } |
2426 | } else { |
2427 | // At least one frame stays in INACTIVE |
2428 | if (!afModeChanged) { |
2429 | switch (afMode) { |
2430 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2431 | // fall-through |
2432 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2433 | mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN; |
2434 | break; |
2435 | } |
2436 | } |
2437 | } |
2438 | break; |
2439 | case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: |
2440 | /** |
2441 | * When the AF trigger is activated, the algorithm should finish |
2442 | * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED |
2443 | * or AF_NOT_FOCUSED as appropriate |
2444 | */ |
2445 | if (afTriggerStart) { |
2446 | // Randomly transition to focused or not focused |
2447 | if (rand() % 3) { |
2448 | mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; |
2449 | } else { |
2450 | mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; |
2451 | } |
2452 | } |
2453 | /** |
2454 | * When the AF trigger is not involved, the AF algorithm should |
2455 | * start in INACTIVE state, and then transition into PASSIVE_SCAN |
2456 | * and PASSIVE_FOCUSED states |
2457 | */ |
2458 | else if (!afTriggerCancel) { |
2459 | // Randomly transition to passive focus |
2460 | if (rand() % 3 == 0) { |
2461 | mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED; |
2462 | } |
2463 | } |
2464 | |
2465 | break; |
2466 | case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: |
2467 | if (afTriggerStart) { |
2468 | // Randomly transition to focused or not focused |
2469 | if (rand() % 3) { |
2470 | mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; |
2471 | } else { |
2472 | mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; |
2473 | } |
2474 | } |
2475 | // TODO: initiate passive scan (PASSIVE_SCAN) |
2476 | break; |
2477 | case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN: |
2478 | // Simulate AF sweep completing instantaneously |
2479 | |
2480 | // Randomly transition to focused or not focused |
2481 | if (rand() % 3) { |
2482 | mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; |
2483 | } else { |
2484 | mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; |
2485 | } |
2486 | break; |
2487 | case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: |
2488 | if (afTriggerStart) { |
2489 | switch (afMode) { |
2490 | case ANDROID_CONTROL_AF_MODE_AUTO: |
2491 | // fall-through |
2492 | case ANDROID_CONTROL_AF_MODE_MACRO: |
2493 | mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; |
2494 | break; |
2495 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2496 | // fall-through |
2497 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2498 | // continuous autofocus => trigger start has no effect |
2499 | break; |
2500 | } |
2501 | } |
2502 | break; |
2503 | case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: |
2504 | if (afTriggerStart) { |
2505 | switch (afMode) { |
2506 | case ANDROID_CONTROL_AF_MODE_AUTO: |
2507 | // fall-through |
2508 | case ANDROID_CONTROL_AF_MODE_MACRO: |
2509 | mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; |
2510 | break; |
2511 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2512 | // fall-through |
2513 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2514 | // continuous autofocus => trigger start has no effect |
2515 | break; |
2516 | } |
2517 | } |
2518 | break; |
2519 | default: |
2520 | ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState); |
2521 | } |
2522 | |
2523 | { |
2524 | char afStateString[100] = {0,}; |
2525 | camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, |
2526 | oldAfState, |
2527 | afStateString, |
2528 | sizeof(afStateString)); |
2529 | |
2530 | char afNewStateString[100] = {0,}; |
2531 | camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, |
2532 | mAfState, |
2533 | afNewStateString, |
2534 | sizeof(afNewStateString)); |
2535 | ALOGVV("%s: AF state transitioned from %s to %s", |
2536 | __FUNCTION__, afStateString, afNewStateString); |
2537 | } |
2538 | |
2539 | |
2540 | return OK; |
2541 | } |
2542 | |
2543 | status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) { |
2544 | camera_metadata_entry e; |
2545 | |
2546 | e = settings.find(ANDROID_CONTROL_AWB_MODE); |
2547 | if (e.count == 0) { |
2548 | ALOGE("%s: No AWB mode entry!", __FUNCTION__); |
2549 | return BAD_VALUE; |
2550 | } |
2551 | uint8_t awbMode = e.data.u8[0]; |
2552 | //DBG_LOGB(" awbMode%d\n", awbMode); |
2553 | |
2554 | // TODO: Add white balance simulation |
2555 | |
2556 | switch (awbMode) { |
2557 | case ANDROID_CONTROL_AWB_MODE_OFF: |
2558 | mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
2559 | return OK; |
2560 | case ANDROID_CONTROL_AWB_MODE_AUTO: |
2561 | case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: |
2562 | case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: |
2563 | case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: |
2564 | case ANDROID_CONTROL_AWB_MODE_SHADE: |
2565 | mAwbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; //add for cts |
2566 | return mSensor->setAWB(awbMode); |
2567 | // OK |
2568 | break; |
2569 | default: |
2570 | ALOGE("%s: Emulator doesn't support AWB mode %d", |
2571 | __FUNCTION__, awbMode); |
2572 | return BAD_VALUE; |
2573 | } |
2574 | |
2575 | return OK; |
2576 | } |
2577 | |
2578 | |
2579 | void EmulatedFakeCamera3::update3A(CameraMetadata &settings) { |
2580 | if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) { |
2581 | settings.update(ANDROID_SENSOR_EXPOSURE_TIME, |
2582 | &mAeCurrentExposureTime, 1); |
2583 | settings.update(ANDROID_SENSOR_SENSITIVITY, |
2584 | &mAeCurrentSensitivity, 1); |
2585 | } |
2586 | |
2587 | settings.update(ANDROID_CONTROL_AE_STATE, |
2588 | &mAeState, 1); |
2589 | settings.update(ANDROID_CONTROL_AF_STATE, |
2590 | &mAfState, 1); |
2591 | settings.update(ANDROID_CONTROL_AWB_STATE, |
2592 | &mAwbState, 1); |
2593 | /** |
2594 | * TODO: Trigger IDs need a think-through |
2595 | */ |
2596 | settings.update(ANDROID_CONTROL_AF_TRIGGER_ID, |
2597 | &mAfTriggerId, 1); |
2598 | } |
2599 | |
2600 | void EmulatedFakeCamera3::signalReadoutIdle() { |
2601 | Mutex::Autolock l(mLock); |
2602 | CAMHAL_LOGDB("%s , E" , __FUNCTION__); |
2603 | // Need to chek isIdle again because waiting on mLock may have allowed |
2604 | // something to be placed in the in-flight queue. |
2605 | if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) { |
2606 | ALOGV("Now idle"); |
2607 | mStatus = STATUS_READY; |
2608 | } |
2609 | CAMHAL_LOGDB("%s , X , mStatus = %d " , __FUNCTION__, mStatus); |
2610 | } |
2611 | |
2612 | void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e, |
2613 | nsecs_t timestamp) { |
2614 | switch(e) { |
2615 | case Sensor::SensorListener::EXPOSURE_START: { |
2616 | ALOGVV("%s: Frame %d: Sensor started exposure at %lld", |
2617 | __FUNCTION__, frameNumber, timestamp); |
2618 | // Trigger shutter notify to framework |
2619 | camera3_notify_msg_t msg; |
2620 | msg.type = CAMERA3_MSG_SHUTTER; |
2621 | msg.message.shutter.frame_number = frameNumber; |
2622 | msg.message.shutter.timestamp = timestamp; |
2623 | sendNotify(&msg); |
2624 | break; |
2625 | } |
2626 | case Sensor::SensorListener::ERROR_CAMERA_DEVICE: { |
2627 | camera3_notify_msg_t msg; |
2628 | msg.type = CAMERA3_MSG_ERROR; |
2629 | msg.message.error.frame_number = frameNumber; |
2630 | msg.message.error.error_stream = NULL; |
2631 | msg.message.error.error_code = 1; |
2632 | sendNotify(&msg); |
2633 | break; |
2634 | } |
2635 | default: |
2636 | ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__, |
2637 | e, timestamp); |
2638 | break; |
2639 | } |
2640 | } |
2641 | |
2642 | EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) : |
2643 | mParent(parent), mJpegWaiting(false) { |
2644 | mExitReadoutThread = false; |
2645 | } |
2646 | |
2647 | EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() { |
2648 | for (List<Request>::iterator i = mInFlightQueue.begin(); |
2649 | i != mInFlightQueue.end(); i++) { |
2650 | delete i->buffers; |
2651 | delete i->sensorBuffers; |
2652 | } |
2653 | } |
2654 | |
2655 | void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) { |
2656 | Mutex::Autolock l(mLock); |
2657 | |
2658 | mInFlightQueue.push_back(r); |
2659 | mInFlightSignal.signal(); |
2660 | } |
2661 | |
2662 | bool EmulatedFakeCamera3::ReadoutThread::isIdle() { |
2663 | Mutex::Autolock l(mLock); |
2664 | return mInFlightQueue.empty() && !mThreadActive; |
2665 | } |
2666 | |
2667 | status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() { |
2668 | status_t res; |
2669 | Mutex::Autolock l(mLock); |
2670 | CAMHAL_LOGDB("%s , E" , __FUNCTION__); |
2671 | int loopCount = 0; |
2672 | while (mInFlightQueue.size() >= kMaxQueueSize) { |
2673 | res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop); |
2674 | if (res != OK && res != TIMED_OUT) { |
2675 | ALOGE("%s: Error waiting for in-flight queue to shrink", |
2676 | __FUNCTION__); |
2677 | return INVALID_OPERATION; |
2678 | } |
2679 | if (loopCount == kMaxWaitLoops) { |
2680 | ALOGE("%s: Timed out waiting for in-flight queue to shrink", |
2681 | __FUNCTION__); |
2682 | return TIMED_OUT; |
2683 | } |
2684 | loopCount++; |
2685 | } |
2686 | return OK; |
2687 | } |
2688 | |
2689 | status_t EmulatedFakeCamera3::ReadoutThread::setJpegCompressorListener(EmulatedFakeCamera3 *parent) { |
2690 | status_t res; |
2691 | res = mParent->mJpegCompressor->setlistener(this); |
2692 | if (res != NO_ERROR) { |
2693 | ALOGE("%s: set JpegCompressor Listner failed",__FUNCTION__); |
2694 | } |
2695 | return res; |
2696 | } |
2697 | |
2698 | status_t EmulatedFakeCamera3::ReadoutThread::startJpegCompressor(EmulatedFakeCamera3 *parent) { |
2699 | status_t res; |
2700 | res = mParent->mJpegCompressor->start(); |
2701 | if (res != NO_ERROR) { |
2702 | ALOGE("%s: JpegCompressor start failed",__FUNCTION__); |
2703 | } |
2704 | return res; |
2705 | } |
2706 | |
2707 | status_t EmulatedFakeCamera3::ReadoutThread::shutdownJpegCompressor(EmulatedFakeCamera3 *parent) { |
2708 | status_t res; |
2709 | res = mParent->mJpegCompressor->cancel(); |
2710 | if (res != OK) { |
2711 | ALOGE("%s: JpegCompressor cancel failed",__FUNCTION__); |
2712 | } |
2713 | return res; |
2714 | } |
2715 | |
2716 | void EmulatedFakeCamera3::ReadoutThread::sendExitReadoutThreadSignal(void) { |
2717 | mExitReadoutThread = true; |
2718 | mInFlightSignal.signal(); |
2719 | } |
2720 | |
2721 | bool EmulatedFakeCamera3::ReadoutThread::threadLoop() { |
2722 | status_t res; |
2723 | ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__); |
2724 | |
2725 | // First wait for a request from the in-flight queue |
2726 | if (mExitReadoutThread) { |
2727 | return false; |
2728 | } |
2729 | |
2730 | if (mCurrentRequest.settings.isEmpty()) { |
2731 | Mutex::Autolock l(mLock); |
2732 | if (mInFlightQueue.empty()) { |
2733 | res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop); |
2734 | if (res == TIMED_OUT) { |
2735 | ALOGVV("%s: ReadoutThread: Timed out waiting for request", |
2736 | __FUNCTION__); |
2737 | return true; |
2738 | } else if (res != NO_ERROR) { |
2739 | ALOGE("%s: Error waiting for capture requests: %d", |
2740 | __FUNCTION__, res); |
2741 | return false; |
2742 | } |
2743 | } |
2744 | |
2745 | if (mExitReadoutThread) { |
2746 | return false; |
2747 | } |
2748 | |
2749 | mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber; |
2750 | mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings); |
2751 | mCurrentRequest.buffers = mInFlightQueue.begin()->buffers; |
2752 | mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers; |
2753 | mCurrentRequest.havethumbnail = mInFlightQueue.begin()->havethumbnail; |
2754 | mInFlightQueue.erase(mInFlightQueue.begin()); |
2755 | mInFlightSignal.signal(); |
2756 | mThreadActive = true; |
2757 | ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__, |
2758 | mCurrentRequest.frameNumber); |
2759 | } |
2760 | |
2761 | // Then wait for it to be delivered from the sensor |
2762 | ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor", |
2763 | __FUNCTION__); |
2764 | |
2765 | nsecs_t captureTime; |
2766 | status_t gotFrame = |
2767 | mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime); |
2768 | if (gotFrame == 0) { |
2769 | ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame", |
2770 | __FUNCTION__); |
2771 | return true; |
2772 | } |
2773 | |
2774 | if (gotFrame == -1) { |
2775 | DBG_LOGA("Sensor thread had exited , here should exit ReadoutThread Loop"); |
2776 | return false; |
2777 | } |
2778 | |
2779 | CAMHAL_LOGDB("Sensor done with readout for frame %d, captured at %lld ", |
2780 | mCurrentRequest.frameNumber, captureTime); |
2781 | |
2782 | // Check if we need to JPEG encode a buffer, and send it for async |
2783 | // compression if so. Otherwise prepare the buffer for return. |
2784 | bool needJpeg = false; |
2785 | HalBufferVector::iterator buf = mCurrentRequest.buffers->begin(); |
2786 | while (buf != mCurrentRequest.buffers->end()) { |
2787 | bool goodBuffer = true; |
2788 | if ( buf->stream->format == |
2789 | HAL_PIXEL_FORMAT_BLOB) { |
2790 | Mutex::Autolock jl(mJpegLock); |
2791 | needJpeg = true; |
2792 | CaptureRequest currentcapture; |
2793 | currentcapture.frameNumber = mCurrentRequest.frameNumber; |
2794 | currentcapture.sensorBuffers = mCurrentRequest.sensorBuffers; |
2795 | currentcapture.buf = buf; |
2796 | currentcapture.mNeedThumbnail = mCurrentRequest.havethumbnail; |
2797 | mParent->mJpegCompressor->queueRequest(currentcapture); |
2798 | //this sensorBuffers delete in the jpegcompress; |
2799 | mCurrentRequest.sensorBuffers = NULL; |
2800 | buf = mCurrentRequest.buffers->erase(buf); |
2801 | continue; |
2802 | } |
2803 | GraphicBufferMapper::get().unlock(*(buf->buffer)); |
2804 | |
2805 | buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK : |
2806 | CAMERA3_BUFFER_STATUS_ERROR; |
2807 | buf->acquire_fence = -1; |
2808 | buf->release_fence = -1; |
2809 | |
2810 | ++buf; |
2811 | } // end while |
2812 | |
2813 | // Construct result for all completed buffers and results |
2814 | |
2815 | camera3_capture_result result; |
2816 | |
2817 | mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP, |
2818 | &captureTime, 1); |
2819 | |
2820 | memset(&result, 0, sizeof(result)); |
2821 | result.frame_number = mCurrentRequest.frameNumber; |
2822 | result.result = mCurrentRequest.settings.getAndLock(); |
2823 | result.num_output_buffers = mCurrentRequest.buffers->size(); |
2824 | result.output_buffers = mCurrentRequest.buffers->array(); |
2825 | result.partial_result = 1; |
2826 | |
2827 | // Go idle if queue is empty, before sending result |
2828 | |
2829 | bool signalIdle = false; |
2830 | { |
2831 | Mutex::Autolock l(mLock); |
2832 | if (mInFlightQueue.empty()) { |
2833 | mThreadActive = false; |
2834 | signalIdle = true; |
2835 | } |
2836 | } |
2837 | |
2838 | if (signalIdle) mParent->signalReadoutIdle(); |
2839 | |
2840 | // Send it off to the framework |
2841 | ALOGVV("%s: ReadoutThread: Send result to framework", |
2842 | __FUNCTION__); |
2843 | mParent->sendCaptureResult(&result); |
2844 | |
2845 | // Clean up |
2846 | mCurrentRequest.settings.unlock(result.result); |
2847 | |
2848 | delete mCurrentRequest.buffers; |
2849 | mCurrentRequest.buffers = NULL; |
2850 | if (!needJpeg) { |
2851 | delete mCurrentRequest.sensorBuffers; |
2852 | mCurrentRequest.sensorBuffers = NULL; |
2853 | } |
2854 | mCurrentRequest.settings.clear(); |
2855 | CAMHAL_LOGDB("%s , X " , __FUNCTION__); |
2856 | return true; |
2857 | } |
2858 | |
2859 | void EmulatedFakeCamera3::ReadoutThread::onJpegDone( |
2860 | const StreamBuffer &jpegBuffer, bool success , CaptureRequest &r) { |
2861 | Mutex::Autolock jl(mJpegLock); |
2862 | GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer)); |
2863 | |
2864 | mJpegHalBuffer = *(r.buf); |
2865 | mJpegHalBuffer.status = success ? |
2866 | CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR; |
2867 | mJpegHalBuffer.acquire_fence = -1; |
2868 | mJpegHalBuffer.release_fence = -1; |
2869 | mJpegWaiting = false; |
2870 | |
2871 | camera3_capture_result result; |
2872 | result.frame_number = r.frameNumber; |
2873 | result.result = NULL; |
2874 | result.num_output_buffers = 1; |
2875 | result.output_buffers = &mJpegHalBuffer; |
2876 | result.partial_result = 1; |
2877 | |
2878 | if (!success) { |
2879 | ALOGE("%s: Compression failure, returning error state buffer to" |
2880 | " framework", __FUNCTION__); |
2881 | } else { |
2882 | DBG_LOGB("%s: Compression complete, returning buffer to framework", |
2883 | __FUNCTION__); |
2884 | } |
2885 | |
2886 | mParent->sendCaptureResult(&result); |
2887 | |
2888 | } |
2889 | |
2890 | void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone( |
2891 | const StreamBuffer &inputBuffer) { |
2892 | // Should never get here, since the input buffer has to be returned |
2893 | // by end of processCaptureRequest |
2894 | ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__); |
2895 | } |
2896 | |
2897 | |
2898 | }; // namespace android |
2899 |