blob: 54ff21d9bf8e968fd298c9480099ae89030d7abb
1 | /* |
2 | * Copyright (C) 2013 The Android Open Source Project |
3 | * |
4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
5 | * you may not use this file except in compliance with the License. |
6 | * You may obtain a copy of the License at |
7 | * |
8 | * http://www.apache.org/licenses/LICENSE-2.0 |
9 | * |
10 | * Unless required by applicable law or agreed to in writing, software |
11 | * distributed under the License is distributed on an "AS IS" BASIS, |
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13 | * See the License for the specific language governing permissions and |
14 | * limitations under the License. |
15 | */ |
16 | |
17 | /* |
18 | * Contains implementation of a class EmulatedFakeCamera3 that encapsulates |
19 | * functionality of an advanced fake camera. |
20 | */ |
21 | |
22 | #include <inttypes.h> |
23 | |
24 | #define LOG_NDEBUG 0 |
25 | //#define LOG_NNDEBUG 0 |
26 | #define LOG_TAG "EmulatedCamera_FakeCamera3" |
27 | #include <utils/Log.h> |
28 | |
29 | #include "EmulatedFakeCamera3.h" |
30 | #include "EmulatedCameraFactory.h" |
31 | #include <ui/Fence.h> |
32 | #include <ui/Rect.h> |
33 | #include <ui/GraphicBufferMapper.h> |
34 | #include <sys/types.h> |
35 | |
36 | #include <cutils/properties.h> |
37 | #include "fake-pipeline2/Sensor.h" |
38 | #include "fake-pipeline2/JpegCompressor.h" |
39 | #include <cmath> |
40 | #include <gralloc_priv.h> |
41 | #include <binder/IPCThreadState.h> |
42 | |
43 | #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0 |
44 | #define ALOGVV ALOGV |
45 | #else |
46 | #define ALOGVV(...) ((void)0) |
47 | #endif |
48 | |
49 | namespace android { |
50 | |
51 | /** |
52 | * Constants for camera capabilities |
53 | */ |
54 | |
55 | const int64_t USEC = 1000LL; |
56 | const int64_t MSEC = USEC * 1000LL; |
57 | const int64_t SEC = MSEC * 1000LL; |
58 | |
59 | |
60 | const int32_t EmulatedFakeCamera3::kAvailableFormats[] = { |
61 | //HAL_PIXEL_FORMAT_RAW_SENSOR, |
62 | HAL_PIXEL_FORMAT_BLOB, |
63 | //HAL_PIXEL_FORMAT_RGBA_8888, |
64 | HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, |
65 | // These are handled by YCbCr_420_888 |
66 | HAL_PIXEL_FORMAT_YV12, |
67 | HAL_PIXEL_FORMAT_YCrCb_420_SP, |
68 | //HAL_PIXEL_FORMAT_YCbCr_422_I, |
69 | HAL_PIXEL_FORMAT_YCbCr_420_888 |
70 | }; |
71 | |
72 | const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = { |
73 | 640, 480 |
74 | // Sensor::kResolution[0], Sensor::kResolution[1] |
75 | }; |
76 | |
77 | const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = { |
78 | (const uint64_t)Sensor::kFrameDurationRange[0] |
79 | }; |
80 | |
81 | const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[6] = { |
82 | 640, 480, 320, 240,// 1280, 720 |
83 | // Sensor::kResolution[0], Sensor::kResolution[1] |
84 | }; |
85 | |
86 | const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = { |
87 | 640, 480, 320, 240 |
88 | // Sensor::kResolution[0], Sensor::kResolution[1] |
89 | }; |
90 | |
91 | const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = { |
92 | (const uint64_t)Sensor::kFrameDurationRange[0] |
93 | }; |
94 | |
95 | const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = { |
96 | 1280,720 |
97 | // Sensor::kResolution[0], Sensor::kResolution[1] |
98 | }; |
99 | |
100 | const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = { |
101 | 640, 480 |
102 | // Sensor::kResolution[0], Sensor::kResolution[1] |
103 | }; |
104 | |
105 | |
106 | const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = { |
107 | (const uint64_t)Sensor::kFrameDurationRange[0] |
108 | }; |
109 | |
110 | /** |
111 | * 3A constants |
112 | */ |
113 | |
114 | // Default exposure and gain targets for different scenarios |
115 | const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC; |
116 | const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC; |
117 | const int EmulatedFakeCamera3::kNormalSensitivity = 100; |
118 | const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400; |
119 | const float EmulatedFakeCamera3::kExposureTrackRate = 0.1; |
120 | const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10; |
121 | const int EmulatedFakeCamera3::kStableAeMaxFrames = 100; |
122 | const float EmulatedFakeCamera3::kExposureWanderMin = -2; |
123 | const float EmulatedFakeCamera3::kExposureWanderMax = 1; |
124 | |
125 | /** |
126 | * Camera device lifecycle methods |
127 | */ |
128 | static const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(camera3_jpeg_blob); |
129 | jpegsize EmulatedFakeCamera3::getMaxJpegResolution(uint32_t picSizes[],int count) { |
130 | uint32_t maxJpegWidth = 0, maxJpegHeight = 0; |
131 | jpegsize maxJpegResolution; |
132 | for (int i=0; i < count; i+= 4) { |
133 | uint32_t width = picSizes[i+1]; |
134 | uint32_t height = picSizes[i+2]; |
135 | if (picSizes[i+0] == HAL_PIXEL_FORMAT_BLOB && |
136 | (width * height > maxJpegWidth * maxJpegHeight)) { |
137 | maxJpegWidth = width; |
138 | maxJpegHeight = height; |
139 | } |
140 | } |
141 | maxJpegResolution.width = maxJpegWidth; |
142 | maxJpegResolution.height = maxJpegHeight; |
143 | return maxJpegResolution; |
144 | } |
145 | ssize_t EmulatedFakeCamera3::getJpegBufferSize(int width, int height) { |
146 | if (maxJpegResolution.width == 0) { |
147 | return BAD_VALUE; |
148 | } |
149 | ssize_t maxJpegBufferSize = JpegCompressor::kMaxJpegSize; |
150 | |
151 | // Calculate final jpeg buffer size for the given resolution. |
152 | float scaleFactor = ((float) (width * height)) / |
153 | (maxJpegResolution.width * maxJpegResolution.height); |
154 | ssize_t jpegBufferSize = scaleFactor * maxJpegBufferSize; |
155 | // Bound the buffer size to [MIN_JPEG_BUFFER_SIZE, maxJpegBufferSize]. |
156 | if (jpegBufferSize > maxJpegBufferSize) { |
157 | jpegBufferSize = maxJpegBufferSize; |
158 | } else if (jpegBufferSize < kMinJpegBufferSize) { |
159 | jpegBufferSize = kMinJpegBufferSize; |
160 | } |
161 | return jpegBufferSize; |
162 | } |
163 | |
164 | EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, struct hw_module_t* module) : |
165 | EmulatedCamera3(cameraId, module) { |
166 | ALOGI("Constructing emulated fake camera 3 cameraID:%d", mCameraID); |
167 | |
168 | for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) { |
169 | mDefaultTemplates[i] = NULL; |
170 | } |
171 | |
172 | /** |
173 | * Front cameras = limited mode |
174 | * Back cameras = full mode |
175 | */ |
176 | //TODO limited or full mode, read this from camera driver |
177 | //mFullMode = facingBack; |
178 | mCameraStatus = CAMERA_INIT; |
179 | mSupportCap = 0; |
180 | mSupportRotate = 0; |
181 | mFullMode = 0; |
182 | mFlushTag = false; |
183 | mPlugged = false; |
184 | |
185 | gLoadXml.parseXMLFile(); |
186 | } |
187 | |
188 | EmulatedFakeCamera3::~EmulatedFakeCamera3() { |
189 | for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) { |
190 | if (mDefaultTemplates[i] != NULL) { |
191 | free_camera_metadata(mDefaultTemplates[i]); |
192 | } |
193 | } |
194 | |
195 | if (mCameraInfo != NULL) { |
196 | CAMHAL_LOGIA("free mCameraInfo"); |
197 | free_camera_metadata(mCameraInfo); |
198 | mCameraInfo = NULL; |
199 | } |
200 | } |
201 | |
202 | status_t EmulatedFakeCamera3::Initialize() { |
203 | DBG_LOGB("mCameraID=%d,mStatus=%d,ddd\n", mCameraID, mStatus); |
204 | status_t res; |
205 | |
206 | #ifdef HAVE_VERSION_INFO |
207 | CAMHAL_LOGIB("\n--------------------------------\n" |
208 | "author:aml.sh multi-media team\n" |
209 | "branch name: %s\n" |
210 | "git version: %s \n" |
211 | "last changed: %s\n" |
212 | "build-time: %s\n" |
213 | "build-name: %s\n" |
214 | "uncommitted-file-num:%d\n" |
215 | "ssh user@%s, cd %s\n" |
216 | "hostname %s\n" |
217 | "--------------------------------\n", |
218 | CAMHAL_BRANCH_NAME, |
219 | CAMHAL_GIT_VERSION, |
220 | CAMHAL_LAST_CHANGED, |
221 | CAMHAL_BUILD_TIME, |
222 | CAMHAL_BUILD_NAME, |
223 | CAMHAL_GIT_UNCOMMIT_FILE_NUM, |
224 | CAMHAL_IP, CAMHAL_PATH, CAMHAL_HOSTNAME |
225 | ); |
226 | #endif |
227 | |
228 | |
229 | if (mStatus != STATUS_ERROR) { |
230 | ALOGE("%s: Already initialized!", __FUNCTION__); |
231 | return INVALID_OPERATION; |
232 | } |
233 | |
234 | res = constructStaticInfo(); |
235 | if (res != OK) { |
236 | ALOGE("%s: Unable to allocate static info: %s (%d)", |
237 | __FUNCTION__, strerror(-res), res); |
238 | return res; |
239 | } |
240 | |
241 | return EmulatedCamera3::Initialize(); |
242 | } |
243 | |
244 | status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) { |
245 | ALOGV("%s: E", __FUNCTION__); |
246 | DBG_LOGB("%s, ddd", __FUNCTION__); |
247 | Mutex::Autolock l(mLock); |
248 | status_t res; |
249 | DBG_LOGB("%s , mStatus = %d" , __FUNCTION__, mStatus); |
250 | |
251 | if ((mStatus != STATUS_CLOSED) || !mPlugged) { |
252 | ALOGE("%s: Can't connect in state %d, mPlugged=%d", |
253 | __FUNCTION__, mStatus, mPlugged); |
254 | return INVALID_OPERATION; |
255 | } |
256 | |
257 | mSensor = new Sensor(); |
258 | mSensor->setSensorListener(this); |
259 | |
260 | res = mSensor->startUp(mCameraID); |
261 | DBG_LOGB("mSensor startUp, mCameraID=%d\n", mCameraID); |
262 | if (res != NO_ERROR) return res; |
263 | |
264 | mSupportCap = mSensor->IoctlStateProbe(); |
265 | if (mSupportCap & IOCTL_MASK_ROTATE) { |
266 | mSupportRotate = true; |
267 | } |
268 | |
269 | mReadoutThread = new ReadoutThread(this); |
270 | mJpegCompressor = new JpegCompressor(); |
271 | |
272 | res = mReadoutThread->setJpegCompressorListener(this); |
273 | if (res != NO_ERROR) { |
274 | return res; |
275 | } |
276 | res = mReadoutThread->startJpegCompressor(this); |
277 | if (res != NO_ERROR) { |
278 | return res; |
279 | } |
280 | |
281 | res = mReadoutThread->run("EmuCam3::readoutThread"); |
282 | if (res != NO_ERROR) return res; |
283 | |
284 | // Initialize fake 3A |
285 | |
286 | mControlMode = ANDROID_CONTROL_MODE_AUTO; |
287 | mFacePriority = false; |
288 | mAeMode = ANDROID_CONTROL_AE_MODE_ON; |
289 | mAfMode = ANDROID_CONTROL_AF_MODE_AUTO; |
290 | mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO; |
291 | mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;//ANDROID_CONTROL_AE_STATE_INACTIVE; |
292 | mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; |
293 | mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
294 | mAfTriggerId = 0; |
295 | mAeCurrentExposureTime = kNormalExposureTime; |
296 | mAeCurrentSensitivity = kNormalSensitivity; |
297 | |
298 | return EmulatedCamera3::connectCamera(device); |
299 | } |
300 | |
301 | status_t EmulatedFakeCamera3::plugCamera() { |
302 | { |
303 | Mutex::Autolock l(mLock); |
304 | |
305 | if (!mPlugged) { |
306 | CAMHAL_LOGIB("%s: Plugged back in", __FUNCTION__); |
307 | mPlugged = true; |
308 | } |
309 | } |
310 | |
311 | return NO_ERROR; |
312 | } |
313 | |
314 | status_t EmulatedFakeCamera3::unplugCamera() { |
315 | { |
316 | Mutex::Autolock l(mLock); |
317 | |
318 | if (mPlugged) { |
319 | CAMHAL_LOGIB("%s: Unplugged camera", __FUNCTION__); |
320 | mPlugged = false; |
321 | } |
322 | } |
323 | return true; |
324 | } |
325 | |
326 | camera_device_status_t EmulatedFakeCamera3::getHotplugStatus() { |
327 | Mutex::Autolock l(mLock); |
328 | return mPlugged ? |
329 | CAMERA_DEVICE_STATUS_PRESENT : |
330 | CAMERA_DEVICE_STATUS_NOT_PRESENT; |
331 | } |
332 | |
333 | bool EmulatedFakeCamera3::getCameraStatus() |
334 | { |
335 | CAMHAL_LOGVB("%s, mCameraStatus = %d",__FUNCTION__,mCameraStatus); |
336 | bool ret = false; |
337 | if (mStatus == STATUS_CLOSED) { |
338 | ret = true; |
339 | } else { |
340 | ret = false; |
341 | } |
342 | return ret; |
343 | } |
344 | |
345 | status_t EmulatedFakeCamera3::closeCamera() { |
346 | DBG_LOGB("%s, %d\n", __FUNCTION__, __LINE__); |
347 | status_t res; |
348 | { |
349 | Mutex::Autolock l(mLock); |
350 | if (mStatus == STATUS_CLOSED) return OK; |
351 | } |
352 | |
353 | CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__); |
354 | mReadoutThread->sendFlushSingnal(); |
355 | mSensor->sendExitSingalToSensor(); |
356 | res = mSensor->shutDown(); |
357 | if (res != NO_ERROR) { |
358 | ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res); |
359 | return res; |
360 | } |
361 | mSensor.clear(); |
362 | CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__); |
363 | |
364 | { |
365 | Mutex::Autolock l(mLock); |
366 | res = mReadoutThread->shutdownJpegCompressor(this); |
367 | if (res != OK) { |
368 | ALOGE("%s: Unable to shut down JpegCompressor: %d", __FUNCTION__, res); |
369 | return res; |
370 | } |
371 | mReadoutThread->sendExitReadoutThreadSignal(); |
372 | mReadoutThread->requestExit(); |
373 | } |
374 | CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__); |
375 | |
376 | mReadoutThread->join(); |
377 | DBG_LOGA("Sucess exit ReadOutThread"); |
378 | { |
379 | Mutex::Autolock l(mLock); |
380 | // Clear out private stream information |
381 | for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) { |
382 | PrivateStreamInfo *privStream = |
383 | static_cast<PrivateStreamInfo*>((*s)->priv); |
384 | delete privStream; |
385 | (*s)->priv = NULL; |
386 | } |
387 | mStreams.clear(); |
388 | mReadoutThread.clear(); |
389 | } |
390 | CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__); |
391 | return EmulatedCamera3::closeCamera(); |
392 | } |
393 | |
394 | status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) { |
395 | char property[PROPERTY_VALUE_MAX]; |
396 | char* tempApkName = gLoadXml.getApkPackageName(IPCThreadState::self()->getCallingPid()); |
397 | List_Or * temp=new List_Or(); |
398 | info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT; |
399 | if (mSensorType == SENSOR_USB) { |
400 | if (mFacingBack) { |
401 | property_get("hw.camera.orientation.back", property, "0"); |
402 | } else { |
403 | property_get("hw.camera.orientation.front", property, "0"); |
404 | } |
405 | int32_t orientation = atoi(property); |
406 | |
407 | if (gLoadXml.findApkCp(tempApkName, temp)) { |
408 | orientation = atoi(temp->pro); |
409 | } |
410 | if (temp != NULL) { |
411 | delete temp; |
412 | temp = NULL; |
413 | } |
414 | |
415 | property_get("hw.camera.usb.orientation_offset", property, "0"); |
416 | orientation += atoi(property); |
417 | orientation %= 360; |
418 | info->orientation = orientation ; |
419 | } else { |
420 | if (mFacingBack) { |
421 | property_get("hw.camera.orientation.back", property, "270"); |
422 | } else { |
423 | property_get("hw.camera.orientation.front", property, "90"); |
424 | } |
425 | info->orientation = atoi(property); |
426 | } |
427 | return EmulatedCamera3::getCameraInfo(info); |
428 | } |
429 | |
430 | /** |
431 | * Camera3 interface methods |
432 | */ |
433 | |
434 | void EmulatedFakeCamera3::getValidJpegSize(uint32_t picSizes[], uint32_t availablejpegsize[], int count) { |
435 | int i,j,k; |
436 | bool valid = true; |
437 | for (i=0,j=0; i < count; i+= 4) { |
438 | for (k= 0; k<=j ;k+=2) { |
439 | if ((availablejpegsize[k]*availablejpegsize[k+1]) == (picSizes[i+1]*picSizes[i+2])) { |
440 | |
441 | valid = false; |
442 | } |
443 | } |
444 | if (valid) { |
445 | availablejpegsize[j] = picSizes[i+1]; |
446 | availablejpegsize[j+1] = picSizes[i+2]; |
447 | j+=2; |
448 | } |
449 | valid = true; |
450 | } |
451 | } |
452 | |
453 | status_t EmulatedFakeCamera3::checkValidJpegSize(uint32_t width, uint32_t height) { |
454 | |
455 | int validsizecount = 0; |
456 | uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]); |
457 | for (uint32_t f = 0; f < count; f+=2) { |
458 | if (mAvailableJpegSize[f] != 0) { |
459 | if ((mAvailableJpegSize[f] == width)&&(mAvailableJpegSize[f+1] == height)) { |
460 | validsizecount++; |
461 | } |
462 | } else { |
463 | break; |
464 | } |
465 | } |
466 | if (validsizecount == 0) |
467 | return BAD_VALUE; |
468 | return OK; |
469 | } |
470 | |
471 | status_t EmulatedFakeCamera3::configureStreams( |
472 | camera3_stream_configuration *streamList) { |
473 | Mutex::Autolock l(mLock); |
474 | uint32_t width, height, pixelfmt; |
475 | bool isRestart = false; |
476 | mFlushTag = false; |
477 | DBG_LOGB("%s: %d streams", __FUNCTION__, streamList->num_streams); |
478 | |
479 | if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) { |
480 | ALOGE("%s: Cannot configure streams in state %d", |
481 | __FUNCTION__, mStatus); |
482 | return NO_INIT; |
483 | } |
484 | |
485 | /** |
486 | * Sanity-check input list. |
487 | */ |
488 | if (streamList == NULL) { |
489 | ALOGE("%s: NULL stream configuration", __FUNCTION__); |
490 | return BAD_VALUE; |
491 | } |
492 | |
493 | if (streamList->streams == NULL) { |
494 | ALOGE("%s: NULL stream list", __FUNCTION__); |
495 | return BAD_VALUE; |
496 | } |
497 | |
498 | if (streamList->num_streams < 1) { |
499 | ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__, |
500 | streamList->num_streams); |
501 | return BAD_VALUE; |
502 | } |
503 | |
504 | camera3_stream_t *inputStream = NULL; |
505 | for (size_t i = 0; i < streamList->num_streams; i++) { |
506 | camera3_stream_t *newStream = streamList->streams[i]; |
507 | |
508 | if (newStream == NULL) { |
509 | ALOGE("%s: Stream index %zu was NULL", |
510 | __FUNCTION__, i); |
511 | return BAD_VALUE; |
512 | } |
513 | |
514 | if (newStream->max_buffers <= 0) { |
515 | isRestart = true;//mSensor->isNeedRestart(newStream->width, newStream->height, newStream->format); |
516 | DBG_LOGB("format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d, isRestart=%d\n", |
517 | newStream->format, newStream->width, newStream->height, |
518 | newStream->stream_type, newStream->max_buffers, |
519 | isRestart); |
520 | } |
521 | ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x", |
522 | __FUNCTION__, newStream, i, newStream->stream_type, |
523 | newStream->usage, |
524 | newStream->format); |
525 | |
526 | if (newStream->stream_type == CAMERA3_STREAM_INPUT || |
527 | newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { |
528 | if (inputStream != NULL) { |
529 | |
530 | ALOGE("%s: Multiple input streams requested!", __FUNCTION__); |
531 | return BAD_VALUE; |
532 | } |
533 | inputStream = newStream; |
534 | } |
535 | |
536 | bool validFormat = false; |
537 | for (size_t f = 0; |
538 | f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]); |
539 | f++) { |
540 | if (newStream->format == kAvailableFormats[f]) { |
541 | validFormat = true; |
542 | //HAL_PIXEL_FORMAT_YCrCb_420_SP, |
543 | if (HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) |
544 | newStream->format = HAL_PIXEL_FORMAT_YCrCb_420_SP; |
545 | |
546 | break; |
547 | } |
548 | DBG_LOGB("stream_type=%d\n", newStream->stream_type); |
549 | } |
550 | if (!validFormat) { |
551 | ALOGE("%s: Unsupported stream format 0x%x requested", |
552 | __FUNCTION__, newStream->format); |
553 | return BAD_VALUE; |
554 | } |
555 | |
556 | status_t ret = checkValidJpegSize(newStream->width, newStream->height); |
557 | if (ret != OK) { |
558 | return BAD_VALUE; |
559 | } |
560 | |
561 | } |
562 | mInputStream = inputStream; |
563 | width = 0; |
564 | height = 0; |
565 | for (size_t i = 0; i < streamList->num_streams; i++) { |
566 | camera3_stream_t *newStream = streamList->streams[i]; |
567 | DBG_LOGB("find propert width and height, format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d\n", |
568 | newStream->format, newStream->width, newStream->height, newStream->stream_type, newStream->max_buffers); |
569 | if ((HAL_PIXEL_FORMAT_BLOB != newStream->format) && |
570 | (CAMERA3_STREAM_OUTPUT == newStream->stream_type)) { |
571 | |
572 | if (width < newStream->width) |
573 | width = newStream->width; |
574 | |
575 | if (height < newStream->height) |
576 | height = newStream->height; |
577 | |
578 | pixelfmt = (uint32_t)newStream->format; |
579 | if (HAL_PIXEL_FORMAT_YCbCr_420_888 == pixelfmt) |
580 | pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP; |
581 | } |
582 | |
583 | } |
584 | |
585 | //TODO modify this ugly code |
586 | if (isRestart) { |
587 | isRestart = mSensor->isNeedRestart(width, height, pixelfmt); |
588 | } |
589 | |
590 | if (isRestart) { |
591 | mSensor->streamOff(); |
592 | pixelfmt = mSensor->halFormatToSensorFormat(pixelfmt); |
593 | mSensor->setOutputFormat(width, height, pixelfmt, 0); |
594 | mSensor->streamOn(); |
595 | DBG_LOGB("width=%d, height=%d, pixelfmt=%.4s\n", |
596 | width, height, (char*)&pixelfmt); |
597 | } |
598 | |
599 | /** |
600 | * Initially mark all existing streams as not alive |
601 | */ |
602 | for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) { |
603 | PrivateStreamInfo *privStream = |
604 | static_cast<PrivateStreamInfo*>((*s)->priv); |
605 | privStream->alive = false; |
606 | } |
607 | |
608 | /** |
609 | * Find new streams and mark still-alive ones |
610 | */ |
611 | for (size_t i = 0; i < streamList->num_streams; i++) { |
612 | camera3_stream_t *newStream = streamList->streams[i]; |
613 | if (newStream->priv == NULL) { |
614 | // New stream, construct info |
615 | PrivateStreamInfo *privStream = new PrivateStreamInfo(); |
616 | privStream->alive = true; |
617 | privStream->registered = false; |
618 | |
619 | newStream->usage = |
620 | mSensor->getStreamUsage(newStream->stream_type); |
621 | |
622 | DBG_LOGB("stream_type=%d\n", newStream->stream_type); |
623 | newStream->max_buffers = kMaxBufferCount; |
624 | newStream->priv = privStream; |
625 | mStreams.push_back(newStream); |
626 | } else { |
627 | // Existing stream, mark as still alive. |
628 | PrivateStreamInfo *privStream = |
629 | static_cast<PrivateStreamInfo*>(newStream->priv); |
630 | CAMHAL_LOGDA("Existing stream ?"); |
631 | privStream->alive = true; |
632 | } |
633 | DBG_LOGB("%d, newStream=%p, stream_type=%d, usage=%x, priv=%p, w*h=%dx%d\n", |
634 | i, newStream, newStream->stream_type, newStream->usage, newStream->priv, newStream->width, newStream->height); |
635 | } |
636 | |
637 | /** |
638 | * Reap the dead streams |
639 | */ |
640 | for (StreamIterator s = mStreams.begin(); s != mStreams.end();) { |
641 | PrivateStreamInfo *privStream = |
642 | static_cast<PrivateStreamInfo*>((*s)->priv); |
643 | if (!privStream->alive) { |
644 | DBG_LOGA("delete not alive streams"); |
645 | (*s)->priv = NULL; |
646 | delete privStream; |
647 | s = mStreams.erase(s); |
648 | } else { |
649 | ++s; |
650 | } |
651 | } |
652 | |
653 | /** |
654 | * Can't reuse settings across configure call |
655 | */ |
656 | mPrevSettings.clear(); |
657 | |
658 | return OK; |
659 | } |
660 | |
661 | status_t EmulatedFakeCamera3::registerStreamBuffers( |
662 | const camera3_stream_buffer_set *bufferSet) { |
663 | DBG_LOGB("%s: E", __FUNCTION__); |
664 | Mutex::Autolock l(mLock); |
665 | |
666 | /** |
667 | * Sanity checks |
668 | */ |
669 | DBG_LOGA("==========sanity checks\n"); |
670 | |
671 | // OK: register streams at any time during configure |
672 | // (but only once per stream) |
673 | if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) { |
674 | ALOGE("%s: Cannot register buffers in state %d", |
675 | __FUNCTION__, mStatus); |
676 | return NO_INIT; |
677 | } |
678 | |
679 | if (bufferSet == NULL) { |
680 | ALOGE("%s: NULL buffer set!", __FUNCTION__); |
681 | return BAD_VALUE; |
682 | } |
683 | |
684 | StreamIterator s = mStreams.begin(); |
685 | for (; s != mStreams.end(); ++s) { |
686 | if (bufferSet->stream == *s) break; |
687 | } |
688 | if (s == mStreams.end()) { |
689 | ALOGE("%s: Trying to register buffers for a non-configured stream!", |
690 | __FUNCTION__); |
691 | return BAD_VALUE; |
692 | } |
693 | |
694 | /** |
695 | * Register the buffers. This doesn't mean anything to the emulator besides |
696 | * marking them off as registered. |
697 | */ |
698 | |
699 | PrivateStreamInfo *privStream = |
700 | static_cast<PrivateStreamInfo*>((*s)->priv); |
701 | |
702 | #if 0 |
703 | if (privStream->registered) { |
704 | ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__); |
705 | return BAD_VALUE; |
706 | } |
707 | #endif |
708 | |
709 | privStream->registered = true; |
710 | |
711 | return OK; |
712 | } |
713 | |
714 | const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings( |
715 | int type) { |
716 | DBG_LOGB("%s: E", __FUNCTION__); |
717 | Mutex::Autolock l(mLock); |
718 | |
719 | if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) { |
720 | ALOGE("%s: Unknown request settings template: %d", |
721 | __FUNCTION__, type); |
722 | return NULL; |
723 | } |
724 | |
725 | /** |
726 | * Cache is not just an optimization - pointer returned has to live at |
727 | * least as long as the camera device instance does. |
728 | */ |
729 | if (mDefaultTemplates[type] != NULL) { |
730 | return mDefaultTemplates[type]; |
731 | } |
732 | |
733 | CameraMetadata settings; |
734 | |
735 | /** android.request */ |
736 | static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; |
737 | settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); |
738 | |
739 | static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL; |
740 | settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1); |
741 | |
742 | static const int32_t id = 0; |
743 | settings.update(ANDROID_REQUEST_ID, &id, 1); |
744 | |
745 | static const int32_t frameCount = 0; |
746 | settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1); |
747 | |
748 | /** android.lens */ |
749 | |
750 | static const float focusDistance = 0; |
751 | settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1); |
752 | |
753 | static const float aperture = 2.8f; |
754 | settings.update(ANDROID_LENS_APERTURE, &aperture, 1); |
755 | |
756 | // static const float focalLength = 5.0f; |
757 | static const float focalLength = 3.299999952316284f; |
758 | settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1); |
759 | |
760 | static const float filterDensity = 0; |
761 | settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1); |
762 | |
763 | static const uint8_t opticalStabilizationMode = |
764 | ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; |
765 | settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, |
766 | &opticalStabilizationMode, 1); |
767 | |
768 | // FOCUS_RANGE set only in frame |
769 | |
770 | /** android.sensor */ |
771 | |
772 | static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; |
773 | settings.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1); |
774 | static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; |
775 | settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1); |
776 | static const int64_t exposureTime = 10 * MSEC; |
777 | settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1); |
778 | |
779 | int64_t frameDuration = mSensor->getMinFrameDuration(); |
780 | settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1); |
781 | |
782 | static const int32_t sensitivity = 100; |
783 | settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1); |
784 | |
785 | static const int64_t rollingShutterSkew = 0; |
786 | settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1); |
787 | // TIMESTAMP set only in frame |
788 | |
789 | /** android.flash */ |
790 | |
791 | static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE; |
792 | settings.update(ANDROID_FLASH_STATE, &flashstate, 1); |
793 | |
794 | static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; |
795 | settings.update(ANDROID_FLASH_MODE, &flashMode, 1); |
796 | |
797 | static const uint8_t flashPower = 10; |
798 | settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1); |
799 | |
800 | static const int64_t firingTime = 0; |
801 | settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1); |
802 | |
803 | /** Processing block modes */ |
804 | uint8_t hotPixelMode = 0; |
805 | uint8_t demosaicMode = 0; |
806 | uint8_t noiseMode = 0; |
807 | uint8_t shadingMode = 0; |
808 | uint8_t colorMode = 0; |
809 | uint8_t tonemapMode = 0; |
810 | uint8_t edgeMode = 0; |
811 | switch (type) { |
812 | |
813 | case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: |
814 | case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: |
815 | noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY; |
816 | // fall-through |
817 | case CAMERA3_TEMPLATE_STILL_CAPTURE: |
818 | hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY; |
819 | demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY; |
820 | shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY; |
821 | colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY; |
822 | tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY; |
823 | edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY; |
824 | break; |
825 | case CAMERA3_TEMPLATE_PREVIEW: |
826 | // fall-through |
827 | case CAMERA3_TEMPLATE_VIDEO_RECORD: |
828 | // fall-through |
829 | case CAMERA3_TEMPLATE_MANUAL: |
830 | // fall-through |
831 | default: |
832 | hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST; |
833 | demosaicMode = ANDROID_DEMOSAIC_MODE_FAST; |
834 | noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST; |
835 | shadingMode = ANDROID_SHADING_MODE_FAST; |
836 | colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST; |
837 | tonemapMode = ANDROID_TONEMAP_MODE_FAST; |
838 | edgeMode = ANDROID_EDGE_MODE_FAST; |
839 | break; |
840 | } |
841 | settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1); |
842 | settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1); |
843 | settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1); |
844 | settings.update(ANDROID_SHADING_MODE, &shadingMode, 1); |
845 | settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1); |
846 | settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1); |
847 | settings.update(ANDROID_EDGE_MODE, &edgeMode, 1); |
848 | |
849 | /** android.noise */ |
850 | static const uint8_t noiseStrength = 5; |
851 | settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1); |
852 | static uint8_t availableNBModes[] = { |
853 | ANDROID_NOISE_REDUCTION_MODE_OFF, |
854 | ANDROID_NOISE_REDUCTION_MODE_FAST, |
855 | ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY, |
856 | }; |
857 | settings.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, |
858 | availableNBModes, sizeof(availableNBModes)/sizeof(availableNBModes)); |
859 | |
860 | |
861 | /** android.color */ |
862 | #if PLATFORM_SDK_VERSION >= 23 |
863 | static const camera_metadata_rational colorTransform[9] = { |
864 | {1, 1}, {0, 1}, {0, 1}, |
865 | {0, 1}, {1, 1}, {0, 1}, |
866 | {0, 1}, {0, 1}, {1, 1} |
867 | }; |
868 | settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9); |
869 | #else |
870 | static const float colorTransform[9] = { |
871 | 1.0f, 0.f, 0.f, |
872 | 0.f, 1.f, 0.f, |
873 | 0.f, 0.f, 1.f |
874 | }; |
875 | settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9); |
876 | #endif |
877 | /** android.tonemap */ |
878 | static const float tonemapCurve[4] = { |
879 | 0.f, 0.f, |
880 | 1.f, 1.f |
881 | }; |
882 | settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4); |
883 | settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4); |
884 | settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4); |
885 | |
886 | /** android.edge */ |
887 | static const uint8_t edgeStrength = 5; |
888 | settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1); |
889 | |
890 | /** android.scaler */ |
891 | static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; |
892 | settings.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1); |
893 | |
894 | static const int32_t cropRegion[] = { |
895 | 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1], |
896 | }; |
897 | settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4); |
898 | |
899 | /** android.jpeg */ |
900 | static const uint8_t jpegQuality = 80; |
901 | settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1); |
902 | |
903 | static const int32_t thumbnailSize[2] = { |
904 | 160, 120 |
905 | }; |
906 | settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); |
907 | |
908 | static const uint8_t thumbnailQuality = 80; |
909 | settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1); |
910 | |
911 | static const double gpsCoordinates[3] = { |
912 | 0, 0, 0 |
913 | }; |
914 | settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3); //default 2 value |
915 | |
916 | static const uint8_t gpsProcessingMethod[32] = "None"; |
917 | settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32); |
918 | |
919 | static const int64_t gpsTimestamp = 0; |
920 | settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1); |
921 | |
922 | static const int32_t jpegOrientation = 0; |
923 | settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); |
924 | |
925 | /** android.stats */ |
926 | |
927 | static const uint8_t faceDetectMode = |
928 | ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; |
929 | settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1); |
930 | |
931 | static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF; |
932 | settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1); |
933 | |
934 | static const uint8_t sharpnessMapMode = |
935 | ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF; |
936 | settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1); |
937 | |
938 | static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; |
939 | settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1); |
940 | static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; |
941 | settings.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1); |
942 | static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; |
943 | settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1); |
944 | // faceRectangles, faceScores, faceLandmarks, faceIds, histogram, |
945 | // sharpnessMap only in frames |
946 | |
947 | /** android.control */ |
948 | |
949 | uint8_t controlIntent = 0; |
950 | uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; //default value |
951 | uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; |
952 | uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; |
953 | switch (type) { |
954 | case CAMERA3_TEMPLATE_PREVIEW: |
955 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; |
956 | break; |
957 | case CAMERA3_TEMPLATE_STILL_CAPTURE: |
958 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; |
959 | break; |
960 | case CAMERA3_TEMPLATE_VIDEO_RECORD: |
961 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; |
962 | break; |
963 | case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: |
964 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; |
965 | break; |
966 | case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: |
967 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; |
968 | break; |
969 | case CAMERA3_TEMPLATE_MANUAL: |
970 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL; |
971 | controlMode = ANDROID_CONTROL_MODE_OFF; |
972 | aeMode = ANDROID_CONTROL_AE_MODE_OFF; |
973 | awbMode = ANDROID_CONTROL_AWB_MODE_OFF; |
974 | break; |
975 | default: |
976 | controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; |
977 | break; |
978 | } |
979 | settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); |
980 | settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); |
981 | |
982 | static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; |
983 | settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); |
984 | |
985 | static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; |
986 | settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); |
987 | |
988 | settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); |
989 | |
990 | static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; |
991 | settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); |
992 | |
993 | static const uint8_t aePrecaptureTrigger = |
994 | ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; |
995 | settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1); |
996 | |
997 | static const int32_t mAfTriggerId = 0; |
998 | settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,&mAfTriggerId, 1); |
999 | static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; |
1000 | settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1); |
1001 | |
1002 | static const int32_t controlRegions[5] = { |
1003 | 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1], |
1004 | 1000 |
1005 | }; |
1006 | // settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5); |
1007 | |
1008 | static const int32_t aeExpCompensation = 0; |
1009 | settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1); |
1010 | |
1011 | static const int32_t aeTargetFpsRange[2] = { |
1012 | 30, 30 |
1013 | }; |
1014 | settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2); |
1015 | |
1016 | static const uint8_t aeAntibandingMode = |
1017 | ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; |
1018 | settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1); |
1019 | |
1020 | settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); |
1021 | |
1022 | static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; |
1023 | settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); |
1024 | |
1025 | // settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5); |
1026 | |
1027 | uint8_t afMode = 0; |
1028 | switch (type) { |
1029 | case CAMERA3_TEMPLATE_PREVIEW: |
1030 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1031 | break; |
1032 | case CAMERA3_TEMPLATE_STILL_CAPTURE: |
1033 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1034 | break; |
1035 | case CAMERA3_TEMPLATE_VIDEO_RECORD: |
1036 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1037 | //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; |
1038 | break; |
1039 | case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: |
1040 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1041 | //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; |
1042 | break; |
1043 | case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: |
1044 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1045 | //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; |
1046 | break; |
1047 | case CAMERA3_TEMPLATE_MANUAL: |
1048 | afMode = ANDROID_CONTROL_AF_MODE_OFF; |
1049 | break; |
1050 | default: |
1051 | afMode = ANDROID_CONTROL_AF_MODE_AUTO; |
1052 | break; |
1053 | } |
1054 | settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1); |
1055 | |
1056 | static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE; |
1057 | settings.update(ANDROID_CONTROL_AF_STATE,&afstate,1); |
1058 | |
1059 | // settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5); |
1060 | |
1061 | static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED; |
1062 | settings.update(ANDROID_CONTROL_AE_STATE,&aestate,1); |
1063 | static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
1064 | settings.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1); |
1065 | static const uint8_t vstabMode = |
1066 | ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; |
1067 | settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1); |
1068 | |
1069 | // aeState, awbState, afState only in frame |
1070 | |
1071 | mDefaultTemplates[type] = settings.release(); |
1072 | |
1073 | return mDefaultTemplates[type]; |
1074 | } |
1075 | |
1076 | status_t EmulatedFakeCamera3::processCaptureRequest( |
1077 | camera3_capture_request *request) { |
1078 | status_t res; |
1079 | nsecs_t exposureTime; |
1080 | nsecs_t frameDuration; |
1081 | uint32_t sensitivity; |
1082 | uint32_t frameNumber; |
1083 | bool mHaveThumbnail = false; |
1084 | CameraMetadata settings; |
1085 | Buffers *sensorBuffers = NULL; |
1086 | HalBufferVector *buffers = NULL; |
1087 | |
1088 | if (mFlushTag) { |
1089 | DBG_LOGA("already flush, but still send Capture Request .\n"); |
1090 | } |
1091 | |
1092 | { |
1093 | Mutex::Autolock l(mLock); |
1094 | |
1095 | /** Validation */ |
1096 | |
1097 | if (mStatus < STATUS_READY) { |
1098 | ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__, |
1099 | mStatus); |
1100 | return INVALID_OPERATION; |
1101 | } |
1102 | |
1103 | if (request == NULL) { |
1104 | ALOGE("%s: NULL request!", __FUNCTION__); |
1105 | return BAD_VALUE; |
1106 | } |
1107 | |
1108 | frameNumber = request->frame_number; |
1109 | |
1110 | if (request->settings == NULL && mPrevSettings.isEmpty()) { |
1111 | ALOGE("%s: Request %d: NULL settings for first request after" |
1112 | "configureStreams()", __FUNCTION__, frameNumber); |
1113 | return BAD_VALUE; |
1114 | } |
1115 | |
1116 | if (request->input_buffer != NULL && |
1117 | request->input_buffer->stream != mInputStream) { |
1118 | DBG_LOGB("%s: Request %d: Input buffer not from input stream!", |
1119 | __FUNCTION__, frameNumber); |
1120 | DBG_LOGB("%s: Bad stream %p, expected: %p", |
1121 | __FUNCTION__, request->input_buffer->stream, |
1122 | mInputStream); |
1123 | DBG_LOGB("%s: Bad stream type %d, expected stream type %d", |
1124 | __FUNCTION__, request->input_buffer->stream->stream_type, |
1125 | mInputStream ? mInputStream->stream_type : -1); |
1126 | |
1127 | return BAD_VALUE; |
1128 | } |
1129 | |
1130 | if (request->num_output_buffers < 1 || request->output_buffers == NULL) { |
1131 | ALOGE("%s: Request %d: No output buffers provided!", |
1132 | __FUNCTION__, frameNumber); |
1133 | return BAD_VALUE; |
1134 | } |
1135 | |
1136 | // Validate all buffers, starting with input buffer if it's given |
1137 | |
1138 | ssize_t idx; |
1139 | const camera3_stream_buffer_t *b; |
1140 | if (request->input_buffer != NULL) { |
1141 | idx = -1; |
1142 | b = request->input_buffer; |
1143 | } else { |
1144 | idx = 0; |
1145 | b = request->output_buffers; |
1146 | } |
1147 | do { |
1148 | PrivateStreamInfo *priv = |
1149 | static_cast<PrivateStreamInfo*>(b->stream->priv); |
1150 | if (priv == NULL) { |
1151 | ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!", |
1152 | __FUNCTION__, frameNumber, idx); |
1153 | return BAD_VALUE; |
1154 | } |
1155 | #if 0 |
1156 | if (!priv->alive || !priv->registered) { |
1157 | ALOGE("%s: Request %d: Buffer %zu: Unregistered or dead stream! alive=%d, registered=%d\n", |
1158 | __FUNCTION__, frameNumber, idx, |
1159 | priv->alive, priv->registered); |
1160 | //return BAD_VALUE; |
1161 | } |
1162 | #endif |
1163 | if (b->status != CAMERA3_BUFFER_STATUS_OK) { |
1164 | ALOGE("%s: Request %d: Buffer %zu: Status not OK!", |
1165 | __FUNCTION__, frameNumber, idx); |
1166 | return BAD_VALUE; |
1167 | } |
1168 | if (b->release_fence != -1) { |
1169 | ALOGE("%s: Request %d: Buffer %zu: Has a release fence!", |
1170 | __FUNCTION__, frameNumber, idx); |
1171 | return BAD_VALUE; |
1172 | } |
1173 | if (b->buffer == NULL) { |
1174 | ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!", |
1175 | __FUNCTION__, frameNumber, idx); |
1176 | return BAD_VALUE; |
1177 | } |
1178 | idx++; |
1179 | b = &(request->output_buffers[idx]); |
1180 | } while (idx < (ssize_t)request->num_output_buffers); |
1181 | |
1182 | // TODO: Validate settings parameters |
1183 | |
1184 | /** |
1185 | * Start processing this request |
1186 | */ |
1187 | mStatus = STATUS_ACTIVE; |
1188 | |
1189 | camera_metadata_entry e; |
1190 | |
1191 | if (request->settings == NULL) { |
1192 | settings.acquire(mPrevSettings); |
1193 | } else { |
1194 | settings = request->settings; |
1195 | |
1196 | uint8_t antiBanding = 0; |
1197 | uint8_t effectMode = 0; |
1198 | int exposureCmp = 0; |
1199 | |
1200 | e = settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE); |
1201 | if (e.count == 0) { |
1202 | ALOGE("%s: No antibanding entry!", __FUNCTION__); |
1203 | return BAD_VALUE; |
1204 | } |
1205 | antiBanding = e.data.u8[0]; |
1206 | mSensor->setAntiBanding(antiBanding); |
1207 | |
1208 | e = settings.find(ANDROID_CONTROL_EFFECT_MODE); |
1209 | if (e.count == 0) { |
1210 | ALOGE("%s: No antibanding entry!", __FUNCTION__); |
1211 | return BAD_VALUE; |
1212 | } |
1213 | effectMode = e.data.u8[0]; |
1214 | mSensor->setEffect(effectMode); |
1215 | |
1216 | e = settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION); |
1217 | if (e.count == 0) { |
1218 | ALOGE("%s: No exposure entry!", __FUNCTION__); |
1219 | //return BAD_VALUE; |
1220 | } else { |
1221 | exposureCmp = e.data.i32[0]; |
1222 | DBG_LOGB("set expsore compensaton %d\n", exposureCmp); |
1223 | mSensor->setExposure(exposureCmp); |
1224 | } |
1225 | |
1226 | int32_t cropRegion[4]; |
1227 | int32_t cropWidth; |
1228 | int32_t outputWidth = request->output_buffers[0].stream->width; |
1229 | |
1230 | e = settings.find(ANDROID_SCALER_CROP_REGION); |
1231 | if (e.count == 0) { |
1232 | ALOGE("%s: No corp region entry!", __FUNCTION__); |
1233 | //return BAD_VALUE; |
1234 | } else { |
1235 | cropRegion[0] = e.data.i32[0]; |
1236 | cropRegion[1] = e.data.i32[1]; |
1237 | cropWidth = cropRegion[2] = e.data.i32[2]; |
1238 | cropRegion[3] = e.data.i32[3]; |
1239 | for (int i = mZoomMin; i <= mZoomMax; i += mZoomStep) { |
1240 | //if ( (float) i / mZoomMin >= (float) outputWidth / cropWidth) { |
1241 | if ( i * cropWidth >= outputWidth * mZoomMin ) { |
1242 | mSensor->setZoom(i); |
1243 | break; |
1244 | } |
1245 | } |
1246 | DBG_LOGB("cropRegion:%d, %d, %d, %d\n", cropRegion[0], cropRegion[1],cropRegion[2],cropRegion[3]); |
1247 | } |
1248 | } |
1249 | |
1250 | uint8_t len[] = {1}; |
1251 | settings.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1); |
1252 | |
1253 | uint8_t maxlen[] = {0}; |
1254 | settings.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1); |
1255 | |
1256 | res = process3A(settings); |
1257 | if (res != OK) { |
1258 | ALOGVV("%s: process3A failed!", __FUNCTION__); |
1259 | //return res; |
1260 | } |
1261 | |
1262 | // TODO: Handle reprocessing |
1263 | |
1264 | /** |
1265 | * Get ready for sensor config |
1266 | */ |
1267 | |
1268 | bool needJpeg = false; |
1269 | ssize_t jpegbuffersize; |
1270 | uint32_t jpegpixelfmt; |
1271 | |
1272 | exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; |
1273 | frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; |
1274 | sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; |
1275 | |
1276 | sensorBuffers = new Buffers(); |
1277 | buffers = new HalBufferVector(); |
1278 | |
1279 | sensorBuffers->setCapacity(request->num_output_buffers); |
1280 | buffers->setCapacity(request->num_output_buffers); |
1281 | |
1282 | // Process all the buffers we got for output, constructing internal buffer |
1283 | // structures for them, and lock them for writing. |
1284 | for (size_t i = 0; i < request->num_output_buffers; i++) { |
1285 | const camera3_stream_buffer &srcBuf = request->output_buffers[i]; |
1286 | const private_handle_t *privBuffer = |
1287 | (const private_handle_t*)(*srcBuf.buffer); |
1288 | StreamBuffer destBuf; |
1289 | destBuf.streamId = kGenericStreamId; |
1290 | destBuf.width = srcBuf.stream->width; |
1291 | destBuf.height = srcBuf.stream->height; |
1292 | destBuf.format = privBuffer->format; // Use real private format |
1293 | destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc |
1294 | destBuf.buffer = srcBuf.buffer; |
1295 | destBuf.share_fd = privBuffer->share_fd; |
1296 | |
1297 | if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) { |
1298 | needJpeg = true; |
1299 | memset(&info,0,sizeof(struct ExifInfo)); |
1300 | info.orientation = settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0]; |
1301 | jpegpixelfmt = mSensor->getOutputFormat(); |
1302 | if (!mSupportRotate) { |
1303 | info.mainwidth = srcBuf.stream->width; |
1304 | info.mainheight = srcBuf.stream->height; |
1305 | } else { |
1306 | if ((info.orientation == 90) || (info.orientation == 270)) { |
1307 | info.mainwidth = srcBuf.stream->height; |
1308 | info.mainheight = srcBuf.stream->width; |
1309 | } else { |
1310 | info.mainwidth = srcBuf.stream->width; |
1311 | info.mainheight = srcBuf.stream->height; |
1312 | } |
1313 | } |
1314 | if ((jpegpixelfmt == V4L2_PIX_FMT_MJPEG) || (jpegpixelfmt == V4L2_PIX_FMT_YUYV)) { |
1315 | mSensor->setOutputFormat(info.mainwidth,info.mainheight,jpegpixelfmt,1); |
1316 | } else { |
1317 | mSensor->setOutputFormat(info.mainwidth,info.mainheight,V4L2_PIX_FMT_RGB24,1); |
1318 | } |
1319 | } |
1320 | |
1321 | // Wait on fence |
1322 | sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence); |
1323 | res = bufferAcquireFence->wait(kFenceTimeoutMs); |
1324 | if (res == TIMED_OUT) { |
1325 | ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms", |
1326 | __FUNCTION__, frameNumber, i, kFenceTimeoutMs); |
1327 | } |
1328 | if (res == OK) { |
1329 | // Lock buffer for writing |
1330 | const Rect rect(destBuf.width, destBuf.height); |
1331 | if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) { |
1332 | if (privBuffer->format == HAL_PIXEL_FORMAT_YCbCr_420_888/*HAL_PIXEL_FORMAT_YCrCb_420_SP*/) { |
1333 | android_ycbcr ycbcr = android_ycbcr(); |
1334 | res = GraphicBufferMapper::get().lockYCbCr( |
1335 | *(destBuf.buffer), |
1336 | GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK, |
1337 | rect, |
1338 | &ycbcr); |
1339 | // This is only valid because we know that emulator's |
1340 | // YCbCr_420_888 is really contiguous NV21 under the hood |
1341 | destBuf.img = static_cast<uint8_t*>(ycbcr.y); |
1342 | } else { |
1343 | ALOGE("Unexpected private format for flexible YUV: 0x%x", |
1344 | privBuffer->format); |
1345 | res = INVALID_OPERATION; |
1346 | } |
1347 | } else { |
1348 | res = GraphicBufferMapper::get().lock(*(destBuf.buffer), |
1349 | GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK, |
1350 | rect, |
1351 | (void**)&(destBuf.img)); |
1352 | } |
1353 | if (res != OK) { |
1354 | ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer", |
1355 | __FUNCTION__, frameNumber, i); |
1356 | } |
1357 | } |
1358 | |
1359 | if (res != OK) { |
1360 | // Either waiting or locking failed. Unlock locked buffers and bail |
1361 | // out. |
1362 | for (size_t j = 0; j < i; j++) { |
1363 | GraphicBufferMapper::get().unlock( |
1364 | *(request->output_buffers[i].buffer)); |
1365 | } |
1366 | ALOGE("line:%d, format for this usage: %d x %d, usage %x, format=%x, returned\n", |
1367 | __LINE__, destBuf.width, destBuf.height, privBuffer->usage, privBuffer->format); |
1368 | return NO_INIT; |
1369 | } |
1370 | sensorBuffers->push_back(destBuf); |
1371 | buffers->push_back(srcBuf); |
1372 | } |
1373 | |
1374 | if (needJpeg) { |
1375 | if (!mSupportRotate) { |
1376 | info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; |
1377 | info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; |
1378 | } else { |
1379 | if ((info.orientation == 90) || (info.orientation == 270)) { |
1380 | info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; |
1381 | info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; |
1382 | } else { |
1383 | info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; |
1384 | info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; |
1385 | } |
1386 | } |
1387 | if (settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { |
1388 | info.latitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[0]; |
1389 | info.longitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[1]; |
1390 | info.altitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[2]; |
1391 | info.has_latitude = true; |
1392 | info.has_longitude = true; |
1393 | info.has_altitude = true; |
1394 | } else { |
1395 | info.has_latitude = false; |
1396 | info.has_longitude = false; |
1397 | info.has_altitude = false; |
1398 | } |
1399 | if (settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { |
1400 | uint8_t * gpsString = settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8; |
1401 | memcpy(info.gpsProcessingMethod, gpsString , sizeof(info.gpsProcessingMethod)-1); |
1402 | info.has_gpsProcessingMethod = true; |
1403 | } else { |
1404 | info.has_gpsProcessingMethod = false; |
1405 | } |
1406 | if (settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { |
1407 | info.gpsTimestamp = settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0]; |
1408 | info.has_gpsTimestamp = true; |
1409 | } else { |
1410 | info.has_gpsTimestamp = false; |
1411 | } |
1412 | if (settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { |
1413 | info.focallen = settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; |
1414 | info.has_focallen = true; |
1415 | } else { |
1416 | info.has_focallen = false; |
1417 | } |
1418 | jpegbuffersize = getJpegBufferSize(info.mainwidth,info.mainheight); |
1419 | |
1420 | mJpegCompressor->SetMaxJpegBufferSize(jpegbuffersize); |
1421 | mJpegCompressor->SetExifInfo(info); |
1422 | mSensor->setPictureRotate(info.orientation); |
1423 | if ((info.thumbwidth > 0) && (info.thumbheight > 0)) { |
1424 | mHaveThumbnail = true; |
1425 | } |
1426 | DBG_LOGB("%s::thumbnailSize_width=%d,thumbnailSize_height=%d,mainsize_width=%d,mainsize_height=%d,jpegOrientation=%d",__FUNCTION__, |
1427 | info.thumbwidth,info.thumbheight,info.mainwidth,info.mainheight,info.orientation); |
1428 | } |
1429 | /** |
1430 | * Wait for JPEG compressor to not be busy, if needed |
1431 | */ |
1432 | #if 0 |
1433 | if (needJpeg) { |
1434 | bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs); |
1435 | if (!ready) { |
1436 | ALOGE("%s: Timeout waiting for JPEG compression to complete!", |
1437 | __FUNCTION__); |
1438 | return NO_INIT; |
1439 | } |
1440 | } |
1441 | #else |
1442 | while (needJpeg) { |
1443 | bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs); |
1444 | if (ready) { |
1445 | break; |
1446 | } |
1447 | } |
1448 | #endif |
1449 | } |
1450 | /** |
1451 | * Wait until the in-flight queue has room |
1452 | */ |
1453 | res = mReadoutThread->waitForReadout(); |
1454 | if (res != OK) { |
1455 | ALOGE("%s: Timeout waiting for previous requests to complete!", |
1456 | __FUNCTION__); |
1457 | return NO_INIT; |
1458 | } |
1459 | |
1460 | /** |
1461 | * Wait until sensor's ready. This waits for lengthy amounts of time with |
1462 | * mLock held, but the interface spec is that no other calls may by done to |
1463 | * the HAL by the framework while process_capture_request is happening. |
1464 | */ |
1465 | { |
1466 | Mutex::Autolock l(mLock); |
1467 | int syncTimeoutCount = 0; |
1468 | while (!mSensor->waitForVSync(kSyncWaitTimeout)) { |
1469 | if (mStatus == STATUS_ERROR) { |
1470 | return NO_INIT; |
1471 | } |
1472 | if (syncTimeoutCount == kMaxSyncTimeoutCount) { |
1473 | ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms", |
1474 | __FUNCTION__, frameNumber, |
1475 | kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000); |
1476 | return NO_INIT; |
1477 | } |
1478 | syncTimeoutCount++; |
1479 | } |
1480 | |
1481 | /** |
1482 | * Configure sensor and queue up the request to the readout thread |
1483 | */ |
1484 | mSensor->setExposureTime(exposureTime); |
1485 | mSensor->setFrameDuration(frameDuration); |
1486 | mSensor->setSensitivity(sensitivity); |
1487 | mSensor->setDestinationBuffers(sensorBuffers); |
1488 | mSensor->setFrameNumber(request->frame_number); |
1489 | |
1490 | ReadoutThread::Request r; |
1491 | r.frameNumber = request->frame_number; |
1492 | r.settings = settings; |
1493 | r.sensorBuffers = sensorBuffers; |
1494 | r.buffers = buffers; |
1495 | r.havethumbnail = mHaveThumbnail; |
1496 | |
1497 | mReadoutThread->queueCaptureRequest(r); |
1498 | ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number); |
1499 | |
1500 | // Cache the settings for next time |
1501 | mPrevSettings.acquire(settings); |
1502 | } |
1503 | CAMHAL_LOGVB("%s , X" , __FUNCTION__); |
1504 | return OK; |
1505 | } |
1506 | |
1507 | /** Debug methods */ |
1508 | |
1509 | void EmulatedFakeCamera3::dump(int fd) { |
1510 | |
1511 | String8 result; |
1512 | uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]); |
1513 | result = String8::format("%s, valid resolution\n", __FILE__); |
1514 | |
1515 | for (uint32_t f = 0; f < count; f+=2) { |
1516 | if (mAvailableJpegSize[f] == 0) |
1517 | break; |
1518 | result.appendFormat("width: %d , height =%d\n", |
1519 | mAvailableJpegSize[f], mAvailableJpegSize[f+1]); |
1520 | } |
1521 | result.appendFormat("\nmZoomMin: %d , mZoomMax =%d, mZoomStep=%d\n", |
1522 | mZoomMin, mZoomMax, mZoomStep); |
1523 | |
1524 | if (mZoomStep <= 0) { |
1525 | result.appendFormat("!!!!!!!!!camera apk may have no picture out\n"); |
1526 | } |
1527 | |
1528 | write(fd, result.string(), result.size()); |
1529 | |
1530 | if (mSensor.get() != NULL) { |
1531 | mSensor->dump(fd); |
1532 | } |
1533 | |
1534 | } |
1535 | //flush all request |
1536 | //TODO returned buffers every request held immediately with |
1537 | //CAMERA3_BUFFER_STATUS_ERROR flag. |
1538 | int EmulatedFakeCamera3::flush_all_requests() { |
1539 | DBG_LOGA("flush all request"); |
1540 | mFlushTag = true; |
1541 | mReadoutThread->flushAllRequest(true); |
1542 | mReadoutThread->setFlushFlag(false); |
1543 | mSensor->setFlushFlag(false); |
1544 | return 0; |
1545 | } |
1546 | /** Tag query methods */ |
1547 | const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) { |
1548 | return NULL; |
1549 | } |
1550 | |
1551 | const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) { |
1552 | return NULL; |
1553 | } |
1554 | |
1555 | int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) { |
1556 | return 0; |
1557 | } |
1558 | |
1559 | /** |
1560 | * Private methods |
1561 | */ |
1562 | |
1563 | camera_metadata_ro_entry_t EmulatedFakeCamera3::staticInfo(const CameraMetadata *info, uint32_t tag, |
1564 | size_t minCount, size_t maxCount, bool required) const { |
1565 | |
1566 | camera_metadata_ro_entry_t entry = info->find(tag); |
1567 | |
1568 | if (CC_UNLIKELY( entry.count == 0 ) && required) { |
1569 | const char* tagSection = get_camera_metadata_section_name(tag); |
1570 | if (tagSection == NULL) tagSection = "<unknown>"; |
1571 | const char* tagName = get_camera_metadata_tag_name(tag); |
1572 | if (tagName == NULL) tagName = "<unknown>"; |
1573 | |
1574 | ALOGE("Error finding static metadata entry '%s.%s' (%x)", |
1575 | tagSection, tagName, tag); |
1576 | } else if (CC_UNLIKELY( |
1577 | (minCount != 0 && entry.count < minCount) || |
1578 | (maxCount != 0 && entry.count > maxCount) ) ) { |
1579 | const char* tagSection = get_camera_metadata_section_name(tag); |
1580 | if (tagSection == NULL) tagSection = "<unknown>"; |
1581 | const char* tagName = get_camera_metadata_tag_name(tag); |
1582 | if (tagName == NULL) tagName = "<unknown>"; |
1583 | ALOGE("Malformed static metadata entry '%s.%s' (%x):" |
1584 | "Expected between %zu and %zu values, but got %zu values", |
1585 | tagSection, tagName, tag, minCount, maxCount, entry.count); |
1586 | } |
1587 | |
1588 | return entry; |
1589 | } |
1590 | |
1591 | //this is only for debug |
1592 | void EmulatedFakeCamera3::getStreamConfigurationp(CameraMetadata *info) { |
1593 | const int STREAM_CONFIGURATION_SIZE = 4; |
1594 | const int STREAM_FORMAT_OFFSET = 0; |
1595 | const int STREAM_WIDTH_OFFSET = 1; |
1596 | const int STREAM_HEIGHT_OFFSET = 2; |
1597 | const int STREAM_IS_INPUT_OFFSET = 3; |
1598 | |
1599 | camera_metadata_ro_entry_t availableStreamConfigs = |
1600 | staticInfo(info, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS); |
1601 | CAMHAL_LOGDB(" stream, availableStreamConfigs.count=%d\n", availableStreamConfigs.count); |
1602 | |
1603 | for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) { |
1604 | int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET]; |
1605 | int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET]; |
1606 | int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET]; |
1607 | int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET]; |
1608 | CAMHAL_LOGDB("f=%x, w*h=%dx%d, du=%d\n", format, width, height, isInput); |
1609 | } |
1610 | |
1611 | } |
1612 | |
1613 | //this is only for debug |
1614 | void EmulatedFakeCamera3::getStreamConfigurationDurations(CameraMetadata *info) { |
1615 | const int STREAM_CONFIGURATION_SIZE = 4; |
1616 | const int STREAM_FORMAT_OFFSET = 0; |
1617 | const int STREAM_WIDTH_OFFSET = 1; |
1618 | const int STREAM_HEIGHT_OFFSET = 2; |
1619 | const int STREAM_IS_INPUT_OFFSET = 3; |
1620 | |
1621 | camera_metadata_ro_entry_t availableStreamConfigs = |
1622 | staticInfo(info, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS); |
1623 | CAMHAL_LOGDB("availableStreamConfigs.count=%d\n", availableStreamConfigs.count); |
1624 | |
1625 | for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) { |
1626 | int64_t format = availableStreamConfigs.data.i64[i + STREAM_FORMAT_OFFSET]; |
1627 | int64_t width = availableStreamConfigs.data.i64[i + STREAM_WIDTH_OFFSET]; |
1628 | int64_t height = availableStreamConfigs.data.i64[i + STREAM_HEIGHT_OFFSET]; |
1629 | int64_t isInput = availableStreamConfigs.data.i64[i + STREAM_IS_INPUT_OFFSET]; |
1630 | CAMHAL_LOGDB("f=%llx, w*h=%lldx%lld, du=%lld\n", format, width, height, isInput); |
1631 | } |
1632 | } |
1633 | |
1634 | void EmulatedFakeCamera3::updateCameraMetaData(CameraMetadata *info) { |
1635 | |
1636 | } |
1637 | |
1638 | status_t EmulatedFakeCamera3::constructStaticInfo() { |
1639 | |
1640 | status_t ret = OK; |
1641 | CameraMetadata info; |
1642 | uint32_t picSizes[64 * 8]; |
1643 | int64_t* duration = NULL; |
1644 | int count, duration_count, availablejpegsize; |
1645 | uint8_t maxCount = 10; |
1646 | char property[PROPERTY_VALUE_MAX]; |
1647 | unsigned int supportrotate; |
1648 | availablejpegsize = ARRAY_SIZE(mAvailableJpegSize); |
1649 | memset(mAvailableJpegSize,0,(sizeof(uint32_t))*availablejpegsize); |
1650 | sp<Sensor> s = new Sensor(); |
1651 | ret = s->startUp(mCameraID); |
1652 | if (ret != OK) { |
1653 | DBG_LOGA("sensor start up failed"); |
1654 | return ret; |
1655 | } |
1656 | |
1657 | mSensorType = s->getSensorType(); |
1658 | |
1659 | if ( mSensorType == SENSOR_USB) { |
1660 | char property[PROPERTY_VALUE_MAX]; |
1661 | property_get("rw.camera.usb.faceback", property, "false"); |
1662 | if (strstr(property, "true")) |
1663 | mFacingBack = 1; |
1664 | else |
1665 | mFacingBack = 0; |
1666 | ALOGI("Setting usb camera cameraID:%d to back camera:%s\n", |
1667 | mCameraID, property); |
1668 | } else { |
1669 | if (s->mSensorFace == SENSOR_FACE_FRONT) { |
1670 | mFacingBack = 0; |
1671 | } else if (s->mSensorFace == SENSOR_FACE_BACK) { |
1672 | mFacingBack = 1; |
1673 | } else if (s->mSensorFace == SENSOR_FACE_NONE) { |
1674 | if (gEmulatedCameraFactory.getEmulatedCameraNum() == 1) { |
1675 | mFacingBack = 1; |
1676 | } else if ( mCameraID == 0) { |
1677 | mFacingBack = 1; |
1678 | } else { |
1679 | mFacingBack = 0; |
1680 | } |
1681 | } |
1682 | |
1683 | ALOGI("Setting on board camera cameraID:%d to back camera:%d[0 false, 1 true]\n", |
1684 | mCameraID, mFacingBack); |
1685 | } |
1686 | |
1687 | mSupportCap = s->IoctlStateProbe(); |
1688 | if (mSupportCap & IOCTL_MASK_ROTATE) { |
1689 | supportrotate = true; |
1690 | } else { |
1691 | supportrotate = false; |
1692 | } |
1693 | // android.lens |
1694 | |
1695 | // 5 cm min focus distance for back camera, infinity (fixed focus) for front |
1696 | // TODO read this ioctl from camera driver |
1697 | DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo); |
1698 | const float minFocusDistance = 0.0; |
1699 | info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, |
1700 | &minFocusDistance, 1); |
1701 | |
1702 | // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front |
1703 | const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0; |
1704 | info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, |
1705 | &minFocusDistance, 1); |
1706 | |
1707 | static const float focalLength = 3.30f; // mm |
1708 | info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, |
1709 | &focalLength, 1); |
1710 | static const float aperture = 2.8f; |
1711 | info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, |
1712 | &aperture, 1); |
1713 | static const float filterDensity = 0; |
1714 | info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, |
1715 | &filterDensity, 1); |
1716 | static const uint8_t availableOpticalStabilization = |
1717 | ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; |
1718 | info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, |
1719 | &availableOpticalStabilization, 1); |
1720 | |
1721 | static const int32_t lensShadingMapSize[] = {1, 1}; |
1722 | info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize, |
1723 | sizeof(lensShadingMapSize)/sizeof(int32_t)); |
1724 | |
1725 | uint8_t lensFacing = mFacingBack ? |
1726 | ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; |
1727 | info.update(ANDROID_LENS_FACING, &lensFacing, 1); |
1728 | |
1729 | float lensPosition[3]; |
1730 | if (mFacingBack) { |
1731 | // Back-facing camera is center-top on device |
1732 | lensPosition[0] = 0; |
1733 | lensPosition[1] = 20; |
1734 | lensPosition[2] = -5; |
1735 | } else { |
1736 | // Front-facing camera is center-right on device |
1737 | lensPosition[0] = 20; |
1738 | lensPosition[1] = 20; |
1739 | lensPosition[2] = 0; |
1740 | } |
1741 | #if PLATFORM_SDK_VERSION <= 22 |
1742 | info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/ |
1743 | sizeof(float)); |
1744 | #endif |
1745 | static const uint8_t lensCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED; |
1746 | info.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,&lensCalibration,1); |
1747 | |
1748 | // android.sensor |
1749 | |
1750 | static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; |
1751 | info.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1); |
1752 | static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; |
1753 | info.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1); |
1754 | info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, |
1755 | Sensor::kExposureTimeRange, 2); |
1756 | |
1757 | info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, |
1758 | &Sensor::kFrameDurationRange[1], 1); |
1759 | |
1760 | info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, |
1761 | Sensor::kSensitivityRange, |
1762 | sizeof(Sensor::kSensitivityRange) |
1763 | /sizeof(int32_t)); |
1764 | |
1765 | info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, |
1766 | &Sensor::kColorFilterArrangement, 1); |
1767 | |
1768 | static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm |
1769 | info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, |
1770 | sensorPhysicalSize, 2); |
1771 | |
1772 | info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, |
1773 | (int32_t*)Sensor::kResolution, 2); |
1774 | |
1775 | //(int32_t*)Sensor::kResolution, 2); |
1776 | |
1777 | info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, |
1778 | (int32_t*)&Sensor::kMaxRawValue, 1); |
1779 | |
1780 | static const int32_t blackLevelPattern[4] = { |
1781 | (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel, |
1782 | (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel |
1783 | }; |
1784 | info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, |
1785 | blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t)); |
1786 | |
1787 | static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN; |
1788 | info.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1); |
1789 | if (mSensorType == SENSOR_USB) { |
1790 | if (mFacingBack) { |
1791 | property_get("hw.camera.orientation.back", property, "0"); |
1792 | } else { |
1793 | property_get("hw.camera.orientation.front", property, "0"); |
1794 | } |
1795 | int32_t orientation = atoi(property); |
1796 | property_get("hw.camera.usb.orientation_offset", property, "0"); |
1797 | orientation += atoi(property); |
1798 | orientation %= 360; |
1799 | info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1); |
1800 | } else { |
1801 | if (mFacingBack) { |
1802 | property_get("hw.camera.orientation.back", property, "270"); |
1803 | const int32_t orientation = atoi(property); |
1804 | info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1); |
1805 | } else { |
1806 | property_get("hw.camera.orientation.front", property, "90"); |
1807 | const int32_t orientation = atoi(property); |
1808 | info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1); |
1809 | } |
1810 | } |
1811 | |
1812 | static const int64_t rollingShutterSkew = 0; |
1813 | info.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1); |
1814 | |
1815 | //TODO: sensor color calibration fields |
1816 | |
1817 | // android.flash |
1818 | static const uint8_t flashAvailable = 0; |
1819 | info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1); |
1820 | |
1821 | static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE; |
1822 | info.update(ANDROID_FLASH_STATE, &flashstate, 1); |
1823 | |
1824 | static const int64_t flashChargeDuration = 0; |
1825 | info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1); |
1826 | |
1827 | /** android.noise */ |
1828 | static const uint8_t availableNBModes = ANDROID_NOISE_REDUCTION_MODE_OFF; |
1829 | info.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, &availableNBModes, 1); |
1830 | |
1831 | // android.tonemap |
1832 | |
1833 | static const int32_t tonemapCurvePoints = 128; |
1834 | info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1); |
1835 | |
1836 | // android.scaler |
1837 | |
1838 | static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; |
1839 | info.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1); |
1840 | |
1841 | info.update(ANDROID_SCALER_AVAILABLE_FORMATS, |
1842 | kAvailableFormats, |
1843 | sizeof(kAvailableFormats)/sizeof(int32_t)); |
1844 | |
1845 | info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, |
1846 | (int64_t*)kAvailableRawMinDurations, |
1847 | sizeof(kAvailableRawMinDurations)/sizeof(uint64_t)); |
1848 | |
1849 | //for version 3.2 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS |
1850 | count = sizeof(picSizes)/sizeof(picSizes[0]); |
1851 | count = s->getStreamConfigurations(picSizes, kAvailableFormats, count); |
1852 | |
1853 | info.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, |
1854 | (int32_t*)picSizes, count); |
1855 | |
1856 | if (count < availablejpegsize) { |
1857 | availablejpegsize = count; |
1858 | } |
1859 | getValidJpegSize(picSizes,mAvailableJpegSize,availablejpegsize); |
1860 | |
1861 | maxJpegResolution = getMaxJpegResolution(picSizes,count); |
1862 | int32_t full_size[4]; |
1863 | if (mFacingBack) { |
1864 | full_size[0] = 0; |
1865 | full_size[1] = 0; |
1866 | full_size[2] = maxJpegResolution.width; |
1867 | full_size[3] = maxJpegResolution.height; |
1868 | } else { |
1869 | full_size[0] = 0; |
1870 | full_size[1] = 0; |
1871 | full_size[2] = maxJpegResolution.width; |
1872 | full_size[3] = maxJpegResolution.height; |
1873 | } |
1874 | info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, |
1875 | (int32_t*)full_size, |
1876 | sizeof(full_size)/sizeof(full_size[0])); |
1877 | duration = new int64_t[count]; |
1878 | if (duration == NULL) { |
1879 | DBG_LOGA("allocate memory for duration failed"); |
1880 | return NO_MEMORY; |
1881 | } else { |
1882 | memset(duration,0,sizeof(int64_t)*count); |
1883 | } |
1884 | duration_count = s->getStreamConfigurationDurations(picSizes, duration , count); |
1885 | |
1886 | info.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, |
1887 | duration, duration_count); |
1888 | info.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, |
1889 | duration, duration_count); |
1890 | |
1891 | info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, |
1892 | (int64_t*)kAvailableProcessedMinDurations, |
1893 | sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t)); |
1894 | |
1895 | info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, |
1896 | (int64_t*)kAvailableJpegMinDurations, |
1897 | sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t)); |
1898 | |
1899 | |
1900 | // android.jpeg |
1901 | |
1902 | static const int32_t jpegThumbnailSizes[] = { |
1903 | 0, 0, |
1904 | 160, 120, |
1905 | 320, 240 |
1906 | }; |
1907 | info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, |
1908 | jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t)); |
1909 | |
1910 | static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize; |
1911 | info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1); |
1912 | |
1913 | // android.stats |
1914 | |
1915 | static const uint8_t availableFaceDetectModes[] = { |
1916 | ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, |
1917 | ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, |
1918 | ANDROID_STATISTICS_FACE_DETECT_MODE_FULL |
1919 | }; |
1920 | |
1921 | info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, |
1922 | availableFaceDetectModes, |
1923 | sizeof(availableFaceDetectModes)); |
1924 | |
1925 | static const int32_t maxFaceCount = 8; |
1926 | info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, |
1927 | &maxFaceCount, 1); |
1928 | |
1929 | static const int32_t histogramSize = 64; |
1930 | info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, |
1931 | &histogramSize, 1); |
1932 | |
1933 | static const int32_t maxHistogramCount = 1000; |
1934 | info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, |
1935 | &maxHistogramCount, 1); |
1936 | |
1937 | static const int32_t sharpnessMapSize[2] = {64, 64}; |
1938 | info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, |
1939 | sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t)); |
1940 | |
1941 | static const int32_t maxSharpnessMapValue = 1000; |
1942 | info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, |
1943 | &maxSharpnessMapValue, 1); |
1944 | static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; |
1945 | info.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1); |
1946 | |
1947 | static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; |
1948 | info.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1); |
1949 | static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; |
1950 | info.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1); |
1951 | // android.control |
1952 | |
1953 | static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; |
1954 | info.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); |
1955 | |
1956 | static const uint8_t availableSceneModes[] = { |
1957 | // ANDROID_CONTROL_SCENE_MODE_DISABLED, |
1958 | ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY |
1959 | }; |
1960 | info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, |
1961 | availableSceneModes, sizeof(availableSceneModes)); |
1962 | |
1963 | static const uint8_t availableEffects[] = { |
1964 | ANDROID_CONTROL_EFFECT_MODE_OFF |
1965 | }; |
1966 | info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, |
1967 | availableEffects, sizeof(availableEffects)); |
1968 | |
1969 | static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0}; |
1970 | info.update(ANDROID_CONTROL_MAX_REGIONS, |
1971 | max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0])); |
1972 | |
1973 | static const uint8_t availableAeModes[] = { |
1974 | ANDROID_CONTROL_AE_MODE_OFF, |
1975 | ANDROID_CONTROL_AE_MODE_ON |
1976 | }; |
1977 | info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, |
1978 | availableAeModes, sizeof(availableAeModes)); |
1979 | |
1980 | |
1981 | static const int32_t availableTargetFpsRanges[] = { |
1982 | 5, 15, 15, 15, 5, 25, 25, 25, 5, 30, 30, 30, |
1983 | }; |
1984 | info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, |
1985 | availableTargetFpsRanges, |
1986 | sizeof(availableTargetFpsRanges)/sizeof(int32_t)); |
1987 | |
1988 | uint8_t awbModes[maxCount]; |
1989 | count = s->getAWB(awbModes, maxCount); |
1990 | if (count < 0) { |
1991 | static const uint8_t availableAwbModes[] = { |
1992 | ANDROID_CONTROL_AWB_MODE_OFF, |
1993 | ANDROID_CONTROL_AWB_MODE_AUTO, |
1994 | ANDROID_CONTROL_AWB_MODE_INCANDESCENT, |
1995 | ANDROID_CONTROL_AWB_MODE_FLUORESCENT, |
1996 | ANDROID_CONTROL_AWB_MODE_DAYLIGHT, |
1997 | ANDROID_CONTROL_AWB_MODE_SHADE |
1998 | }; |
1999 | info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, |
2000 | availableAwbModes, sizeof(availableAwbModes)); |
2001 | } else { |
2002 | DBG_LOGB("getAWB %d ",count); |
2003 | info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, |
2004 | awbModes, count); |
2005 | } |
2006 | |
2007 | static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE; |
2008 | info.update(ANDROID_CONTROL_AF_STATE,&afstate,1); |
2009 | |
2010 | static const uint8_t availableAfModesFront[] = { |
2011 | ANDROID_CONTROL_AF_MODE_OFF |
2012 | }; |
2013 | |
2014 | if (mFacingBack) { |
2015 | uint8_t afMode[maxCount]; |
2016 | count = s->getAutoFocus(afMode, maxCount); |
2017 | if (count < 0) { |
2018 | static const uint8_t availableAfModesBack[] = { |
2019 | ANDROID_CONTROL_AF_MODE_OFF, |
2020 | //ANDROID_CONTROL_AF_MODE_AUTO, |
2021 | //ANDROID_CONTROL_AF_MODE_MACRO, |
2022 | //ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, |
2023 | //ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, |
2024 | }; |
2025 | |
2026 | info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, |
2027 | availableAfModesBack, sizeof(availableAfModesBack)); |
2028 | } else { |
2029 | info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, |
2030 | afMode, count); |
2031 | } |
2032 | } else { |
2033 | info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, |
2034 | availableAfModesFront, sizeof(availableAfModesFront)); |
2035 | } |
2036 | |
2037 | uint8_t antiBanding[maxCount]; |
2038 | count = s->getAntiBanding(antiBanding, maxCount); |
2039 | if (count < 0) { |
2040 | static const uint8_t availableAntibanding[] = { |
2041 | ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, |
2042 | ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, |
2043 | }; |
2044 | info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, |
2045 | availableAntibanding, sizeof(availableAntibanding)); |
2046 | } else { |
2047 | info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, |
2048 | antiBanding, count); |
2049 | } |
2050 | |
2051 | camera_metadata_rational step; |
2052 | int maxExp, minExp, def; |
2053 | ret = s->getExposure(&maxExp, &minExp, &def, &step); |
2054 | if (ret < 0) { |
2055 | static const int32_t aeExpCompensation = 0; |
2056 | info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1); |
2057 | |
2058 | static const camera_metadata_rational exposureCompensationStep = { |
2059 | 1, 3 |
2060 | }; |
2061 | info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, |
2062 | &exposureCompensationStep, 1); |
2063 | |
2064 | int32_t exposureCompensationRange[] = {0, 0}; |
2065 | info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, |
2066 | exposureCompensationRange, |
2067 | sizeof(exposureCompensationRange)/sizeof(int32_t)); |
2068 | } else { |
2069 | DBG_LOGB("exposure compensation support:(%d, %d)\n", minExp, maxExp); |
2070 | int32_t exposureCompensationRange[] = {minExp, maxExp}; |
2071 | info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, |
2072 | exposureCompensationRange, |
2073 | sizeof(exposureCompensationRange)/sizeof(int32_t)); |
2074 | info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, |
2075 | &step, 1); |
2076 | info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &def, 1); |
2077 | } |
2078 | |
2079 | ret = s->getZoom(&mZoomMin, &mZoomMax, &mZoomStep); |
2080 | if (ret < 0) { |
2081 | float maxZoom = 1.0; |
2082 | info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, |
2083 | &maxZoom, 1); |
2084 | } else { |
2085 | if (mZoomMin != 0) { |
2086 | float maxZoom = mZoomMax / mZoomMin; |
2087 | info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, |
2088 | &maxZoom, 1); |
2089 | } else { |
2090 | float maxZoom = 1.0; |
2091 | info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, |
2092 | &maxZoom, 1); |
2093 | } |
2094 | } |
2095 | |
2096 | static const uint8_t availableVstabModes[] = { |
2097 | ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF |
2098 | }; |
2099 | info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, |
2100 | availableVstabModes, sizeof(availableVstabModes)); |
2101 | |
2102 | static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED; |
2103 | info.update(ANDROID_CONTROL_AE_STATE,&aestate,1); |
2104 | static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
2105 | info.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1); |
2106 | // android.info |
2107 | const uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; |
2108 | //mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL : |
2109 | // ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; |
2110 | info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, |
2111 | &supportedHardwareLevel, |
2112 | /*count*/1); |
2113 | |
2114 | int32_t android_sync_max_latency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN; |
2115 | info.update(ANDROID_SYNC_MAX_LATENCY, &android_sync_max_latency, 1); |
2116 | |
2117 | uint8_t len[] = {1}; |
2118 | info.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1); |
2119 | |
2120 | uint8_t maxlen[] = {2}; |
2121 | info.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1); |
2122 | uint8_t cap[] = { |
2123 | ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE, |
2124 | }; |
2125 | info.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, |
2126 | (uint8_t *)cap, sizeof(cap)/sizeof(cap[0])); |
2127 | |
2128 | |
2129 | int32_t partialResultCount = 1; |
2130 | info.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,&partialResultCount,1); |
2131 | int32_t maxNumOutputStreams[3] = {0,2,1}; |
2132 | info.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,maxNumOutputStreams,3); |
2133 | uint8_t aberrationMode[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF}; |
2134 | info.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, |
2135 | aberrationMode, 1); |
2136 | info.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, |
2137 | aberrationMode, 1); |
2138 | |
2139 | getAvailableChKeys(&info, supportedHardwareLevel); |
2140 | |
2141 | if (mCameraInfo != NULL) { |
2142 | CAMHAL_LOGDA("mCameraInfo is not null, mem leak?"); |
2143 | } |
2144 | mCameraInfo = info.release(); |
2145 | DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo); |
2146 | |
2147 | if (duration != NULL) { |
2148 | delete [] duration; |
2149 | } |
2150 | |
2151 | s->shutDown(); |
2152 | s.clear(); |
2153 | mPlugged = true; |
2154 | |
2155 | return OK; |
2156 | } |
2157 | |
2158 | status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) { |
2159 | /** |
2160 | * Extract top-level 3A controls |
2161 | */ |
2162 | status_t res; |
2163 | |
2164 | bool facePriority = false; |
2165 | |
2166 | camera_metadata_entry e; |
2167 | |
2168 | e = settings.find(ANDROID_CONTROL_MODE); |
2169 | if (e.count == 0) { |
2170 | ALOGE("%s: No control mode entry!", __FUNCTION__); |
2171 | return BAD_VALUE; |
2172 | } |
2173 | uint8_t controlMode = e.data.u8[0]; |
2174 | |
2175 | e = settings.find(ANDROID_CONTROL_SCENE_MODE); |
2176 | if (e.count == 0) { |
2177 | ALOGE("%s: No scene mode entry!", __FUNCTION__); |
2178 | return BAD_VALUE; |
2179 | } |
2180 | uint8_t sceneMode = e.data.u8[0]; |
2181 | |
2182 | if (controlMode == ANDROID_CONTROL_MODE_OFF) { |
2183 | mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; |
2184 | mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; |
2185 | mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
2186 | update3A(settings); |
2187 | return OK; |
2188 | } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) { |
2189 | switch(sceneMode) { |
2190 | case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY: |
2191 | mFacePriority = true; |
2192 | break; |
2193 | default: |
2194 | ALOGE("%s: Emulator doesn't support scene mode %d", |
2195 | __FUNCTION__, sceneMode); |
2196 | return BAD_VALUE; |
2197 | } |
2198 | } else { |
2199 | mFacePriority = false; |
2200 | } |
2201 | |
2202 | // controlMode == AUTO or sceneMode = FACE_PRIORITY |
2203 | // Process individual 3A controls |
2204 | |
2205 | res = doFakeAE(settings); |
2206 | if (res != OK) return res; |
2207 | |
2208 | res = doFakeAF(settings); |
2209 | if (res != OK) return res; |
2210 | |
2211 | res = doFakeAWB(settings); |
2212 | if (res != OK) return res; |
2213 | |
2214 | update3A(settings); |
2215 | return OK; |
2216 | } |
2217 | |
2218 | status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) { |
2219 | camera_metadata_entry e; |
2220 | |
2221 | e = settings.find(ANDROID_CONTROL_AE_MODE); |
2222 | if (e.count == 0) { |
2223 | ALOGE("%s: No AE mode entry!", __FUNCTION__); |
2224 | return BAD_VALUE; |
2225 | } |
2226 | uint8_t aeMode = e.data.u8[0]; |
2227 | |
2228 | switch (aeMode) { |
2229 | case ANDROID_CONTROL_AE_MODE_OFF: |
2230 | // AE is OFF |
2231 | mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; |
2232 | return OK; |
2233 | case ANDROID_CONTROL_AE_MODE_ON: |
2234 | // OK for AUTO modes |
2235 | break; |
2236 | default: |
2237 | ALOGVV("%s: Emulator doesn't support AE mode %d", |
2238 | __FUNCTION__, aeMode); |
2239 | return BAD_VALUE; |
2240 | } |
2241 | |
2242 | e = settings.find(ANDROID_CONTROL_AE_LOCK); |
2243 | if (e.count == 0) { |
2244 | ALOGE("%s: No AE lock entry!", __FUNCTION__); |
2245 | return BAD_VALUE; |
2246 | } |
2247 | bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON); |
2248 | |
2249 | e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER); |
2250 | bool precaptureTrigger = false; |
2251 | if (e.count != 0) { |
2252 | precaptureTrigger = |
2253 | (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START); |
2254 | } |
2255 | |
2256 | if (precaptureTrigger) { |
2257 | ALOGVV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger); |
2258 | } else if (e.count > 0) { |
2259 | ALOGVV("%s: Pre capture trigger was present? %zu", |
2260 | __FUNCTION__, |
2261 | e.count); |
2262 | } |
2263 | |
2264 | if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) { |
2265 | // Run precapture sequence |
2266 | if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) { |
2267 | mAeCounter = 0; |
2268 | } |
2269 | |
2270 | if (mFacePriority) { |
2271 | mAeTargetExposureTime = kFacePriorityExposureTime; |
2272 | } else { |
2273 | mAeTargetExposureTime = kNormalExposureTime; |
2274 | } |
2275 | |
2276 | if (mAeCounter > kPrecaptureMinFrames && |
2277 | (mAeTargetExposureTime - mAeCurrentExposureTime) < |
2278 | mAeTargetExposureTime / 10) { |
2279 | // Done with precapture |
2280 | mAeCounter = 0; |
2281 | mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED : |
2282 | ANDROID_CONTROL_AE_STATE_CONVERGED; |
2283 | } else { |
2284 | // Converge some more |
2285 | mAeCurrentExposureTime += |
2286 | (mAeTargetExposureTime - mAeCurrentExposureTime) * |
2287 | kExposureTrackRate; |
2288 | mAeCounter++; |
2289 | mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE; |
2290 | } |
2291 | |
2292 | } else if (!aeLocked) { |
2293 | // Run standard occasional AE scan |
2294 | switch (mAeState) { |
2295 | case ANDROID_CONTROL_AE_STATE_CONVERGED: |
2296 | case ANDROID_CONTROL_AE_STATE_INACTIVE: |
2297 | mAeCounter++; |
2298 | if (mAeCounter > kStableAeMaxFrames) { |
2299 | mAeTargetExposureTime = |
2300 | mFacePriority ? kFacePriorityExposureTime : |
2301 | kNormalExposureTime; |
2302 | float exposureStep = ((double)rand() / RAND_MAX) * |
2303 | (kExposureWanderMax - kExposureWanderMin) + |
2304 | kExposureWanderMin; |
2305 | mAeTargetExposureTime *= std::pow(2, exposureStep); |
2306 | mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING; |
2307 | } |
2308 | break; |
2309 | case ANDROID_CONTROL_AE_STATE_SEARCHING: |
2310 | mAeCurrentExposureTime += |
2311 | (mAeTargetExposureTime - mAeCurrentExposureTime) * |
2312 | kExposureTrackRate; |
2313 | if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) < |
2314 | mAeTargetExposureTime / 10) { |
2315 | // Close enough |
2316 | mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED; |
2317 | mAeCounter = 0; |
2318 | } |
2319 | break; |
2320 | case ANDROID_CONTROL_AE_STATE_LOCKED: |
2321 | mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED; |
2322 | mAeCounter = 0; |
2323 | break; |
2324 | default: |
2325 | ALOGE("%s: Emulator in unexpected AE state %d", |
2326 | __FUNCTION__, mAeState); |
2327 | return INVALID_OPERATION; |
2328 | } |
2329 | } else { |
2330 | // AE is locked |
2331 | mAeState = ANDROID_CONTROL_AE_STATE_LOCKED; |
2332 | } |
2333 | |
2334 | return OK; |
2335 | } |
2336 | |
2337 | status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) { |
2338 | camera_metadata_entry e; |
2339 | |
2340 | e = settings.find(ANDROID_CONTROL_AF_MODE); |
2341 | if (e.count == 0) { |
2342 | ALOGE("%s: No AF mode entry!", __FUNCTION__); |
2343 | return BAD_VALUE; |
2344 | } |
2345 | uint8_t afMode = e.data.u8[0]; |
2346 | |
2347 | e = settings.find(ANDROID_CONTROL_AF_TRIGGER); |
2348 | typedef camera_metadata_enum_android_control_af_trigger af_trigger_t; |
2349 | af_trigger_t afTrigger; |
2350 | // If we have an afTrigger, afTriggerId should be set too |
2351 | if (e.count != 0) { |
2352 | afTrigger = static_cast<af_trigger_t>(e.data.u8[0]); |
2353 | |
2354 | e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID); |
2355 | |
2356 | if (e.count == 0) { |
2357 | ALOGE("%s: When android.control.afTrigger is set " |
2358 | " in the request, afTriggerId needs to be set as well", |
2359 | __FUNCTION__); |
2360 | return BAD_VALUE; |
2361 | } |
2362 | |
2363 | mAfTriggerId = e.data.i32[0]; |
2364 | |
2365 | ALOGVV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger); |
2366 | ALOGVV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId); |
2367 | ALOGVV("%s: AF mode is 0x%x", __FUNCTION__, afMode); |
2368 | } else { |
2369 | afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; |
2370 | } |
2371 | if (!mFacingBack) { |
2372 | afMode = ANDROID_CONTROL_AF_MODE_OFF; |
2373 | } |
2374 | |
2375 | switch (afMode) { |
2376 | case ANDROID_CONTROL_AF_MODE_OFF: |
2377 | mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; |
2378 | return OK; |
2379 | case ANDROID_CONTROL_AF_MODE_AUTO: |
2380 | case ANDROID_CONTROL_AF_MODE_MACRO: |
2381 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2382 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2383 | if (!mFacingBack) { |
2384 | ALOGE("%s: Front camera doesn't support AF mode %d", |
2385 | __FUNCTION__, afMode); |
2386 | return BAD_VALUE; |
2387 | } |
2388 | mSensor->setAutoFocuas(afMode); |
2389 | // OK, handle transitions lower on |
2390 | break; |
2391 | default: |
2392 | ALOGE("%s: Emulator doesn't support AF mode %d", |
2393 | __FUNCTION__, afMode); |
2394 | return BAD_VALUE; |
2395 | } |
2396 | #if 0 |
2397 | e = settings.find(ANDROID_CONTROL_AF_REGIONS); |
2398 | if (e.count == 0) { |
2399 | ALOGE("%s:Get ANDROID_CONTROL_AF_REGIONS failed\n", __FUNCTION__); |
2400 | return BAD_VALUE; |
2401 | } |
2402 | int32_t x0 = e.data.i32[0]; |
2403 | int32_t y0 = e.data.i32[1]; |
2404 | int32_t x1 = e.data.i32[2]; |
2405 | int32_t y1 = e.data.i32[3]; |
2406 | mSensor->setFocuasArea(x0, y0, x1, y1); |
2407 | DBG_LOGB(" x0:%d, y0:%d,x1:%d,y1:%d,\n", x0, y0, x1, y1); |
2408 | #endif |
2409 | |
2410 | |
2411 | bool afModeChanged = mAfMode != afMode; |
2412 | mAfMode = afMode; |
2413 | |
2414 | /** |
2415 | * Simulate AF triggers. Transition at most 1 state per frame. |
2416 | * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN). |
2417 | */ |
2418 | |
2419 | bool afTriggerStart = false; |
2420 | bool afTriggerCancel = false; |
2421 | switch (afTrigger) { |
2422 | case ANDROID_CONTROL_AF_TRIGGER_IDLE: |
2423 | break; |
2424 | case ANDROID_CONTROL_AF_TRIGGER_START: |
2425 | afTriggerStart = true; |
2426 | break; |
2427 | case ANDROID_CONTROL_AF_TRIGGER_CANCEL: |
2428 | afTriggerCancel = true; |
2429 | // Cancel trigger always transitions into INACTIVE |
2430 | mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE; |
2431 | |
2432 | ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__); |
2433 | |
2434 | // Stay in 'inactive' until at least next frame |
2435 | return OK; |
2436 | default: |
2437 | ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger); |
2438 | return BAD_VALUE; |
2439 | } |
2440 | |
2441 | // If we get down here, we're either in an autofocus mode |
2442 | // or in a continuous focus mode (and no other modes) |
2443 | |
2444 | int oldAfState = mAfState; |
2445 | switch (mAfState) { |
2446 | case ANDROID_CONTROL_AF_STATE_INACTIVE: |
2447 | if (afTriggerStart) { |
2448 | switch (afMode) { |
2449 | case ANDROID_CONTROL_AF_MODE_AUTO: |
2450 | // fall-through |
2451 | case ANDROID_CONTROL_AF_MODE_MACRO: |
2452 | mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; |
2453 | break; |
2454 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2455 | // fall-through |
2456 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2457 | mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; |
2458 | break; |
2459 | } |
2460 | } else { |
2461 | // At least one frame stays in INACTIVE |
2462 | if (!afModeChanged) { |
2463 | switch (afMode) { |
2464 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2465 | // fall-through |
2466 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2467 | mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN; |
2468 | break; |
2469 | } |
2470 | } |
2471 | } |
2472 | break; |
2473 | case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: |
2474 | /** |
2475 | * When the AF trigger is activated, the algorithm should finish |
2476 | * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED |
2477 | * or AF_NOT_FOCUSED as appropriate |
2478 | */ |
2479 | if (afTriggerStart) { |
2480 | // Randomly transition to focused or not focused |
2481 | if (rand() % 3) { |
2482 | mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; |
2483 | } else { |
2484 | mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; |
2485 | } |
2486 | } |
2487 | /** |
2488 | * When the AF trigger is not involved, the AF algorithm should |
2489 | * start in INACTIVE state, and then transition into PASSIVE_SCAN |
2490 | * and PASSIVE_FOCUSED states |
2491 | */ |
2492 | else if (!afTriggerCancel) { |
2493 | // Randomly transition to passive focus |
2494 | if (rand() % 3 == 0) { |
2495 | mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED; |
2496 | } |
2497 | } |
2498 | |
2499 | break; |
2500 | case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: |
2501 | if (afTriggerStart) { |
2502 | // Randomly transition to focused or not focused |
2503 | if (rand() % 3) { |
2504 | mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; |
2505 | } else { |
2506 | mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; |
2507 | } |
2508 | } |
2509 | // TODO: initiate passive scan (PASSIVE_SCAN) |
2510 | break; |
2511 | case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN: |
2512 | // Simulate AF sweep completing instantaneously |
2513 | |
2514 | // Randomly transition to focused or not focused |
2515 | if (rand() % 3) { |
2516 | mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; |
2517 | } else { |
2518 | mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; |
2519 | } |
2520 | break; |
2521 | case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: |
2522 | if (afTriggerStart) { |
2523 | switch (afMode) { |
2524 | case ANDROID_CONTROL_AF_MODE_AUTO: |
2525 | // fall-through |
2526 | case ANDROID_CONTROL_AF_MODE_MACRO: |
2527 | mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; |
2528 | break; |
2529 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2530 | // fall-through |
2531 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2532 | // continuous autofocus => trigger start has no effect |
2533 | break; |
2534 | } |
2535 | } |
2536 | break; |
2537 | case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: |
2538 | if (afTriggerStart) { |
2539 | switch (afMode) { |
2540 | case ANDROID_CONTROL_AF_MODE_AUTO: |
2541 | // fall-through |
2542 | case ANDROID_CONTROL_AF_MODE_MACRO: |
2543 | mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN; |
2544 | break; |
2545 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO: |
2546 | // fall-through |
2547 | case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE: |
2548 | // continuous autofocus => trigger start has no effect |
2549 | break; |
2550 | } |
2551 | } |
2552 | break; |
2553 | default: |
2554 | ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState); |
2555 | } |
2556 | |
2557 | { |
2558 | char afStateString[100] = {0,}; |
2559 | camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, |
2560 | oldAfState, |
2561 | afStateString, |
2562 | sizeof(afStateString)); |
2563 | |
2564 | char afNewStateString[100] = {0,}; |
2565 | camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE, |
2566 | mAfState, |
2567 | afNewStateString, |
2568 | sizeof(afNewStateString)); |
2569 | ALOGVV("%s: AF state transitioned from %s to %s", |
2570 | __FUNCTION__, afStateString, afNewStateString); |
2571 | } |
2572 | |
2573 | |
2574 | return OK; |
2575 | } |
2576 | |
2577 | status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) { |
2578 | camera_metadata_entry e; |
2579 | |
2580 | e = settings.find(ANDROID_CONTROL_AWB_MODE); |
2581 | if (e.count == 0) { |
2582 | ALOGE("%s: No AWB mode entry!", __FUNCTION__); |
2583 | return BAD_VALUE; |
2584 | } |
2585 | uint8_t awbMode = e.data.u8[0]; |
2586 | //DBG_LOGB(" awbMode%d\n", awbMode); |
2587 | |
2588 | // TODO: Add white balance simulation |
2589 | |
2590 | switch (awbMode) { |
2591 | case ANDROID_CONTROL_AWB_MODE_OFF: |
2592 | mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; |
2593 | return OK; |
2594 | case ANDROID_CONTROL_AWB_MODE_AUTO: |
2595 | case ANDROID_CONTROL_AWB_MODE_INCANDESCENT: |
2596 | case ANDROID_CONTROL_AWB_MODE_FLUORESCENT: |
2597 | case ANDROID_CONTROL_AWB_MODE_DAYLIGHT: |
2598 | case ANDROID_CONTROL_AWB_MODE_SHADE: |
2599 | mAwbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; //add for cts |
2600 | return mSensor->setAWB(awbMode); |
2601 | // OK |
2602 | break; |
2603 | default: |
2604 | ALOGE("%s: Emulator doesn't support AWB mode %d", |
2605 | __FUNCTION__, awbMode); |
2606 | return BAD_VALUE; |
2607 | } |
2608 | |
2609 | return OK; |
2610 | } |
2611 | |
2612 | |
2613 | void EmulatedFakeCamera3::update3A(CameraMetadata &settings) { |
2614 | if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) { |
2615 | settings.update(ANDROID_SENSOR_EXPOSURE_TIME, |
2616 | &mAeCurrentExposureTime, 1); |
2617 | settings.update(ANDROID_SENSOR_SENSITIVITY, |
2618 | &mAeCurrentSensitivity, 1); |
2619 | } |
2620 | |
2621 | settings.update(ANDROID_CONTROL_AE_STATE, |
2622 | &mAeState, 1); |
2623 | settings.update(ANDROID_CONTROL_AF_STATE, |
2624 | &mAfState, 1); |
2625 | settings.update(ANDROID_CONTROL_AWB_STATE, |
2626 | &mAwbState, 1); |
2627 | /** |
2628 | * TODO: Trigger IDs need a think-through |
2629 | */ |
2630 | settings.update(ANDROID_CONTROL_AF_TRIGGER_ID, |
2631 | &mAfTriggerId, 1); |
2632 | } |
2633 | |
2634 | void EmulatedFakeCamera3::signalReadoutIdle() { |
2635 | Mutex::Autolock l(mLock); |
2636 | CAMHAL_LOGVB("%s , E" , __FUNCTION__); |
2637 | // Need to chek isIdle again because waiting on mLock may have allowed |
2638 | // something to be placed in the in-flight queue. |
2639 | if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) { |
2640 | ALOGV("Now idle"); |
2641 | mStatus = STATUS_READY; |
2642 | } |
2643 | CAMHAL_LOGVB("%s , X , mStatus = %d " , __FUNCTION__, mStatus); |
2644 | } |
2645 | |
2646 | void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e, |
2647 | nsecs_t timestamp) { |
2648 | switch(e) { |
2649 | case Sensor::SensorListener::EXPOSURE_START: { |
2650 | ALOGVV("%s: Frame %d: Sensor started exposure at %lld", |
2651 | __FUNCTION__, frameNumber, timestamp); |
2652 | // Trigger shutter notify to framework |
2653 | camera3_notify_msg_t msg; |
2654 | msg.type = CAMERA3_MSG_SHUTTER; |
2655 | msg.message.shutter.frame_number = frameNumber; |
2656 | msg.message.shutter.timestamp = timestamp; |
2657 | sendNotify(&msg); |
2658 | break; |
2659 | } |
2660 | case Sensor::SensorListener::ERROR_CAMERA_DEVICE: { |
2661 | camera3_notify_msg_t msg; |
2662 | msg.type = CAMERA3_MSG_ERROR; |
2663 | msg.message.error.frame_number = frameNumber; |
2664 | msg.message.error.error_stream = NULL; |
2665 | msg.message.error.error_code = 1; |
2666 | sendNotify(&msg); |
2667 | break; |
2668 | } |
2669 | default: |
2670 | ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__, |
2671 | e, timestamp); |
2672 | break; |
2673 | } |
2674 | } |
2675 | |
2676 | EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) : |
2677 | mParent(parent), mJpegWaiting(false) { |
2678 | mExitReadoutThread = false; |
2679 | mFlushFlag = false; |
2680 | } |
2681 | |
2682 | EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() { |
2683 | for (List<Request>::iterator i = mInFlightQueue.begin(); |
2684 | i != mInFlightQueue.end(); i++) { |
2685 | delete i->buffers; |
2686 | delete i->sensorBuffers; |
2687 | } |
2688 | } |
2689 | |
2690 | status_t EmulatedFakeCamera3::ReadoutThread::flushAllRequest(bool flag) { |
2691 | status_t res; |
2692 | mFlushFlag = flag; |
2693 | Mutex::Autolock l(mLock); |
2694 | CAMHAL_LOGDB("count = %d" , mInFlightQueue.size()); |
2695 | if (mInFlightQueue.size() > 0) { |
2696 | mParent->mSensor->setFlushFlag(true); |
2697 | res = mFlush.waitRelative(mLock, kSyncWaitTimeout * 15); |
2698 | if (res != OK && res != TIMED_OUT) { |
2699 | ALOGE("%s: Error waiting for mFlush singnal : %d", |
2700 | __FUNCTION__, res); |
2701 | return INVALID_OPERATION; |
2702 | } |
2703 | DBG_LOGA("finish flush all request"); |
2704 | } |
2705 | return 0; |
2706 | } |
2707 | |
2708 | void EmulatedFakeCamera3::ReadoutThread::sendFlushSingnal(void) { |
2709 | Mutex::Autolock l(mLock); |
2710 | mFlush.signal(); |
2711 | } |
2712 | |
2713 | void EmulatedFakeCamera3::ReadoutThread::setFlushFlag(bool flag) { |
2714 | mFlushFlag = flag; |
2715 | } |
2716 | |
2717 | void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) { |
2718 | Mutex::Autolock l(mLock); |
2719 | |
2720 | mInFlightQueue.push_back(r); |
2721 | mInFlightSignal.signal(); |
2722 | } |
2723 | |
2724 | bool EmulatedFakeCamera3::ReadoutThread::isIdle() { |
2725 | Mutex::Autolock l(mLock); |
2726 | return mInFlightQueue.empty() && !mThreadActive; |
2727 | } |
2728 | |
2729 | status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() { |
2730 | status_t res; |
2731 | Mutex::Autolock l(mLock); |
2732 | CAMHAL_LOGVB("%s , E" , __FUNCTION__); |
2733 | int loopCount = 0; |
2734 | while (mInFlightQueue.size() >= kMaxQueueSize) { |
2735 | res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop); |
2736 | if (res != OK && res != TIMED_OUT) { |
2737 | ALOGE("%s: Error waiting for in-flight queue to shrink", |
2738 | __FUNCTION__); |
2739 | return INVALID_OPERATION; |
2740 | } |
2741 | if (loopCount == kMaxWaitLoops) { |
2742 | ALOGE("%s: Timed out waiting for in-flight queue to shrink", |
2743 | __FUNCTION__); |
2744 | return TIMED_OUT; |
2745 | } |
2746 | loopCount++; |
2747 | } |
2748 | return OK; |
2749 | } |
2750 | |
2751 | status_t EmulatedFakeCamera3::ReadoutThread::setJpegCompressorListener(EmulatedFakeCamera3 *parent) { |
2752 | status_t res; |
2753 | res = mParent->mJpegCompressor->setlistener(this); |
2754 | if (res != NO_ERROR) { |
2755 | ALOGE("%s: set JpegCompressor Listner failed",__FUNCTION__); |
2756 | } |
2757 | return res; |
2758 | } |
2759 | |
2760 | status_t EmulatedFakeCamera3::ReadoutThread::startJpegCompressor(EmulatedFakeCamera3 *parent) { |
2761 | status_t res; |
2762 | res = mParent->mJpegCompressor->start(); |
2763 | if (res != NO_ERROR) { |
2764 | ALOGE("%s: JpegCompressor start failed",__FUNCTION__); |
2765 | } |
2766 | return res; |
2767 | } |
2768 | |
2769 | status_t EmulatedFakeCamera3::ReadoutThread::shutdownJpegCompressor(EmulatedFakeCamera3 *parent) { |
2770 | status_t res; |
2771 | res = mParent->mJpegCompressor->cancel(); |
2772 | if (res != OK) { |
2773 | ALOGE("%s: JpegCompressor cancel failed",__FUNCTION__); |
2774 | } |
2775 | return res; |
2776 | } |
2777 | |
2778 | void EmulatedFakeCamera3::ReadoutThread::sendExitReadoutThreadSignal(void) { |
2779 | mExitReadoutThread = true; |
2780 | mInFlightSignal.signal(); |
2781 | } |
2782 | |
2783 | bool EmulatedFakeCamera3::ReadoutThread::threadLoop() { |
2784 | status_t res; |
2785 | ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__); |
2786 | |
2787 | // First wait for a request from the in-flight queue |
2788 | if (mExitReadoutThread) { |
2789 | return false; |
2790 | } |
2791 | |
2792 | { |
2793 | Mutex::Autolock l(mLock); |
2794 | if ((mInFlightQueue.size() == 0) && (mFlushFlag) && |
2795 | (mCurrentRequest.settings.isEmpty())) { |
2796 | mFlush.signal(); |
2797 | } |
2798 | } |
2799 | |
2800 | if (mCurrentRequest.settings.isEmpty()) { |
2801 | Mutex::Autolock l(mLock); |
2802 | if (mInFlightQueue.empty()) { |
2803 | res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop); |
2804 | if (res == TIMED_OUT) { |
2805 | ALOGVV("%s: ReadoutThread: Timed out waiting for request", |
2806 | __FUNCTION__); |
2807 | return true; |
2808 | } else if (res != NO_ERROR) { |
2809 | ALOGE("%s: Error waiting for capture requests: %d", |
2810 | __FUNCTION__, res); |
2811 | return false; |
2812 | } |
2813 | } |
2814 | |
2815 | if (mExitReadoutThread) { |
2816 | return false; |
2817 | } |
2818 | |
2819 | mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber; |
2820 | mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings); |
2821 | mCurrentRequest.buffers = mInFlightQueue.begin()->buffers; |
2822 | mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers; |
2823 | mCurrentRequest.havethumbnail = mInFlightQueue.begin()->havethumbnail; |
2824 | mInFlightQueue.erase(mInFlightQueue.begin()); |
2825 | mInFlightSignal.signal(); |
2826 | mThreadActive = true; |
2827 | ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__, |
2828 | mCurrentRequest.frameNumber); |
2829 | } |
2830 | |
2831 | // Then wait for it to be delivered from the sensor |
2832 | ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor", |
2833 | __FUNCTION__); |
2834 | |
2835 | nsecs_t captureTime; |
2836 | status_t gotFrame = |
2837 | mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime); |
2838 | if (gotFrame == 0) { |
2839 | ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame", |
2840 | __FUNCTION__); |
2841 | return true; |
2842 | } |
2843 | |
2844 | if (gotFrame == -1) { |
2845 | DBG_LOGA("Sensor thread had exited , here should exit ReadoutThread Loop"); |
2846 | return false; |
2847 | } |
2848 | |
2849 | bool workflag = |
2850 | mParent->mSensor->get_sensor_status(); |
2851 | if (!workflag) |
2852 | return true; |
2853 | |
2854 | ALOGVV("Sensor done with readout for frame %d, captured at %lld ", |
2855 | mCurrentRequest.frameNumber, captureTime); |
2856 | |
2857 | // Check if we need to JPEG encode a buffer, and send it for async |
2858 | // compression if so. Otherwise prepare the buffer for return. |
2859 | bool needJpeg = false; |
2860 | HalBufferVector::iterator buf = mCurrentRequest.buffers->begin(); |
2861 | while (buf != mCurrentRequest.buffers->end()) { |
2862 | bool goodBuffer = true; |
2863 | if ( buf->stream->format == |
2864 | HAL_PIXEL_FORMAT_BLOB) { |
2865 | Mutex::Autolock jl(mJpegLock); |
2866 | needJpeg = true; |
2867 | CaptureRequest currentcapture; |
2868 | currentcapture.frameNumber = mCurrentRequest.frameNumber; |
2869 | currentcapture.sensorBuffers = mCurrentRequest.sensorBuffers; |
2870 | currentcapture.buf = buf; |
2871 | currentcapture.mNeedThumbnail = mCurrentRequest.havethumbnail; |
2872 | mParent->mJpegCompressor->queueRequest(currentcapture); |
2873 | //this sensorBuffers delete in the jpegcompress; |
2874 | mCurrentRequest.sensorBuffers = NULL; |
2875 | buf = mCurrentRequest.buffers->erase(buf); |
2876 | continue; |
2877 | } |
2878 | GraphicBufferMapper::get().unlock(*(buf->buffer)); |
2879 | |
2880 | buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK : |
2881 | CAMERA3_BUFFER_STATUS_ERROR; |
2882 | buf->acquire_fence = -1; |
2883 | buf->release_fence = -1; |
2884 | |
2885 | ++buf; |
2886 | } // end while |
2887 | |
2888 | // Construct result for all completed buffers and results |
2889 | |
2890 | camera3_capture_result result; |
2891 | |
2892 | mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP, |
2893 | &captureTime, 1); |
2894 | |
2895 | memset(&result, 0, sizeof(result)); |
2896 | result.frame_number = mCurrentRequest.frameNumber; |
2897 | result.result = mCurrentRequest.settings.getAndLock(); |
2898 | result.num_output_buffers = mCurrentRequest.buffers->size(); |
2899 | result.output_buffers = mCurrentRequest.buffers->array(); |
2900 | result.partial_result = 1; |
2901 | |
2902 | // Go idle if queue is empty, before sending result |
2903 | |
2904 | bool signalIdle = false; |
2905 | { |
2906 | Mutex::Autolock l(mLock); |
2907 | if (mInFlightQueue.empty()) { |
2908 | mThreadActive = false; |
2909 | signalIdle = true; |
2910 | } |
2911 | } |
2912 | |
2913 | if (signalIdle) mParent->signalReadoutIdle(); |
2914 | |
2915 | // Send it off to the framework |
2916 | ALOGVV("%s: ReadoutThread: Send result to framework", |
2917 | __FUNCTION__); |
2918 | mParent->sendCaptureResult(&result); |
2919 | |
2920 | // Clean up |
2921 | mCurrentRequest.settings.unlock(result.result); |
2922 | |
2923 | delete mCurrentRequest.buffers; |
2924 | mCurrentRequest.buffers = NULL; |
2925 | if (!needJpeg) { |
2926 | delete mCurrentRequest.sensorBuffers; |
2927 | mCurrentRequest.sensorBuffers = NULL; |
2928 | } |
2929 | mCurrentRequest.settings.clear(); |
2930 | CAMHAL_LOGVB("%s , X " , __FUNCTION__); |
2931 | return true; |
2932 | } |
2933 | |
2934 | void EmulatedFakeCamera3::ReadoutThread::onJpegDone( |
2935 | const StreamBuffer &jpegBuffer, bool success , CaptureRequest &r) { |
2936 | Mutex::Autolock jl(mJpegLock); |
2937 | GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer)); |
2938 | |
2939 | mJpegHalBuffer = *(r.buf); |
2940 | mJpegHalBuffer.status = success ? |
2941 | CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR; |
2942 | mJpegHalBuffer.acquire_fence = -1; |
2943 | mJpegHalBuffer.release_fence = -1; |
2944 | mJpegWaiting = false; |
2945 | |
2946 | camera3_capture_result result; |
2947 | result.frame_number = r.frameNumber; |
2948 | result.result = NULL; |
2949 | result.num_output_buffers = 1; |
2950 | result.output_buffers = &mJpegHalBuffer; |
2951 | result.partial_result = 1; |
2952 | |
2953 | if (!success) { |
2954 | ALOGE("%s: Compression failure, returning error state buffer to" |
2955 | " framework", __FUNCTION__); |
2956 | } else { |
2957 | DBG_LOGB("%s: Compression complete, returning buffer to framework", |
2958 | __FUNCTION__); |
2959 | } |
2960 | |
2961 | mParent->sendCaptureResult(&result); |
2962 | |
2963 | } |
2964 | |
2965 | void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone( |
2966 | const StreamBuffer &inputBuffer) { |
2967 | // Should never get here, since the input buffer has to be returned |
2968 | // by end of processCaptureRequest |
2969 | ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__); |
2970 | } |
2971 | |
2972 | |
2973 | }; // namespace android |
2974 |