summaryrefslogtreecommitdiff
path: root/v3/EmulatedFakeCamera3.cpp (plain)
blob: 6417fe416f8b9d775f7f7c99727daa8c25162bc0
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/*
18 * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22#include <inttypes.h>
23
24//#define LOG_NDEBUG 0
25//#define LOG_NNDEBUG 0
26#define LOG_TAG "EmulatedCamera_FakeCamera3"
27#include <utils/Log.h>
28
29#include "EmulatedFakeCamera3.h"
30#include "EmulatedCameraFactory.h"
31#include <ui/Fence.h>
32#include <ui/Rect.h>
33#include <ui/GraphicBufferMapper.h>
34#include <sys/types.h>
35
36#include <cutils/properties.h>
37#include "fake-pipeline2/Sensor.h"
38#include "fake-pipeline2/JpegCompressor.h"
39#include <cmath>
40#include <gralloc_priv.h>
41
42#if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
43#define ALOGVV ALOGV
44#else
45#define ALOGVV(...) ((void)0)
46#endif
47
48namespace android {
49
50/**
51 * Constants for camera capabilities
52 */
53
54const int64_t USEC = 1000LL;
55const int64_t MSEC = USEC * 1000LL;
56const int64_t SEC = MSEC * 1000LL;
57
58
59const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
60 //HAL_PIXEL_FORMAT_RAW_SENSOR,
61 HAL_PIXEL_FORMAT_BLOB,
62 //HAL_PIXEL_FORMAT_RGBA_8888,
63 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
64 // These are handled by YCbCr_420_888
65 HAL_PIXEL_FORMAT_YV12,
66 HAL_PIXEL_FORMAT_YCrCb_420_SP,
67 //HAL_PIXEL_FORMAT_YCbCr_422_I,
68 HAL_PIXEL_FORMAT_YCbCr_420_888
69};
70
71const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = {
72 640, 480
73 // Sensor::kResolution[0], Sensor::kResolution[1]
74};
75
76const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = {
77 (const uint64_t)Sensor::kFrameDurationRange[0]
78};
79
80const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[6] = {
81 640, 480, 320, 240,// 1280, 720
82 // Sensor::kResolution[0], Sensor::kResolution[1]
83};
84
85const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = {
86 640, 480, 320, 240
87 // Sensor::kResolution[0], Sensor::kResolution[1]
88};
89
90const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = {
91 (const uint64_t)Sensor::kFrameDurationRange[0]
92};
93
94const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = {
95 1280,720
96 // Sensor::kResolution[0], Sensor::kResolution[1]
97};
98
99const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = {
100 640, 480
101 // Sensor::kResolution[0], Sensor::kResolution[1]
102};
103
104
105const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = {
106 (const uint64_t)Sensor::kFrameDurationRange[0]
107};
108
109/**
110 * 3A constants
111 */
112
113// Default exposure and gain targets for different scenarios
114const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC;
115const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
116const int EmulatedFakeCamera3::kNormalSensitivity = 100;
117const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400;
118const float EmulatedFakeCamera3::kExposureTrackRate = 0.1;
119const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10;
120const int EmulatedFakeCamera3::kStableAeMaxFrames = 100;
121const float EmulatedFakeCamera3::kExposureWanderMin = -2;
122const float EmulatedFakeCamera3::kExposureWanderMax = 1;
123
124/**
125 * Camera device lifecycle methods
126 */
127static const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(camera3_jpeg_blob);
128jpegsize EmulatedFakeCamera3::getMaxJpegResolution(uint32_t picSizes[],int count) {
129 uint32_t maxJpegWidth = 0, maxJpegHeight = 0;
130 jpegsize maxJpegResolution;
131 for (int i=0; i < count; i+= 4) {
132 uint32_t width = picSizes[i+1];
133 uint32_t height = picSizes[i+2];
134 if (picSizes[i+0] == HAL_PIXEL_FORMAT_BLOB &&
135 (width * height > maxJpegWidth * maxJpegHeight)) {
136 maxJpegWidth = width;
137 maxJpegHeight = height;
138 }
139 }
140 maxJpegResolution.width = maxJpegWidth;
141 maxJpegResolution.height = maxJpegHeight;
142 return maxJpegResolution;
143}
144ssize_t EmulatedFakeCamera3::getJpegBufferSize(int width, int height) {
145 if (maxJpegResolution.width == 0) {
146 return BAD_VALUE;
147 }
148 ssize_t maxJpegBufferSize = JpegCompressor::kMaxJpegSize;
149
150 // Calculate final jpeg buffer size for the given resolution.
151 float scaleFactor = ((float) (width * height)) /
152 (maxJpegResolution.width * maxJpegResolution.height);
153 ssize_t jpegBufferSize = scaleFactor * maxJpegBufferSize;
154 // Bound the buffer size to [MIN_JPEG_BUFFER_SIZE, maxJpegBufferSize].
155 if (jpegBufferSize > maxJpegBufferSize) {
156 jpegBufferSize = maxJpegBufferSize;
157 } else if (jpegBufferSize < kMinJpegBufferSize) {
158 jpegBufferSize = kMinJpegBufferSize;
159 }
160 return jpegBufferSize;
161}
162
163EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, struct hw_module_t* module) :
164 EmulatedCamera3(cameraId, module) {
165 ALOGI("Constructing emulated fake camera 3 cameraID:%d", mCameraID);
166
167 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
168 mDefaultTemplates[i] = NULL;
169 }
170
171 /**
172 * Front cameras = limited mode
173 * Back cameras = full mode
174 */
175 //TODO limited or full mode, read this from camera driver
176 //mFullMode = facingBack;
177 mSupportCap = 0;
178 mSupportRotate = 0;
179 mFullMode = 0;
180}
181
182EmulatedFakeCamera3::~EmulatedFakeCamera3() {
183 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
184 if (mDefaultTemplates[i] != NULL) {
185 free_camera_metadata(mDefaultTemplates[i]);
186 }
187 }
188
189 if (mCameraInfo != NULL) {
190 CAMHAL_LOGIA("free mCameraInfo");
191 free_camera_metadata(mCameraInfo);
192 mCameraInfo = NULL;
193 }
194}
195
196status_t EmulatedFakeCamera3::Initialize() {
197 DBG_LOGB("mCameraID=%d,mStatus=%d,ddd\n", mCameraID, mStatus);
198 status_t res;
199
200#ifdef HAVE_VERSION_INFO
201 CAMHAL_LOGIB("\n--------------------------------\n"
202 "author:aml.sh multi-media team\n"
203 "branch name: %s\n"
204 "git version: %s \n"
205 "last changed: %s\n"
206 "build-time: %s\n"
207 "build-name: %s\n"
208 "uncommitted-file-num:%d\n"
209 "ssh user@%s, cd %s\n"
210 "hostname %s\n"
211 "--------------------------------\n",
212 CAMHAL_BRANCH_NAME,
213 CAMHAL_GIT_VERSION,
214 CAMHAL_LAST_CHANGED,
215 CAMHAL_BUILD_TIME,
216 CAMHAL_BUILD_NAME,
217 CAMHAL_GIT_UNCOMMIT_FILE_NUM,
218 CAMHAL_IP, CAMHAL_PATH, CAMHAL_HOSTNAME
219 );
220#endif
221
222
223 if (mStatus != STATUS_ERROR) {
224 ALOGE("%s: Already initialized!", __FUNCTION__);
225 return INVALID_OPERATION;
226 }
227
228 res = constructStaticInfo();
229 if (res != OK) {
230 ALOGE("%s: Unable to allocate static info: %s (%d)",
231 __FUNCTION__, strerror(-res), res);
232 return res;
233 }
234
235 return EmulatedCamera3::Initialize();
236}
237
238status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) {
239 ALOGV("%s: E", __FUNCTION__);
240 DBG_LOGA("ddd");
241 Mutex::Autolock l(mLock);
242 status_t res;
243
244 if ((mStatus != STATUS_CLOSED) || !mPlugged) {
245 ALOGE("%s: Can't connect in state %d, mPlugged=%d",
246 __FUNCTION__, mStatus, mPlugged);
247 return INVALID_OPERATION;
248 }
249
250 mSensor = new Sensor();
251 mSensor->setSensorListener(this);
252
253 res = mSensor->startUp(mCameraID);
254 DBG_LOGB("mSensor startUp, mCameraID=%d\n", mCameraID);
255 if (res != NO_ERROR) return res;
256
257 mSupportCap = mSensor->IoctlStateProbe();
258 if (mSupportCap & IOCTL_MASK_ROTATE) {
259 mSupportRotate = true;
260 }
261
262 mReadoutThread = new ReadoutThread(this);
263 mJpegCompressor = new JpegCompressor();
264
265 res = mReadoutThread->run("EmuCam3::readoutThread");
266 if (res != NO_ERROR) return res;
267
268 // Initialize fake 3A
269
270 mControlMode = ANDROID_CONTROL_MODE_AUTO;
271 mFacePriority = false;
272 mAeMode = ANDROID_CONTROL_AE_MODE_ON;
273 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
274 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
275 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;//ANDROID_CONTROL_AE_STATE_INACTIVE;
276 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
277 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
278 mAfTriggerId = 0;
279 mAeCurrentExposureTime = kNormalExposureTime;
280 mAeCurrentSensitivity = kNormalSensitivity;
281
282 return EmulatedCamera3::connectCamera(device);
283}
284
285status_t EmulatedFakeCamera3::plugCamera() {
286 {
287 Mutex::Autolock l(mLock);
288
289 if (!mPlugged) {
290 CAMHAL_LOGIB("%s: Plugged back in", __FUNCTION__);
291 mPlugged = true;
292 }
293 }
294
295 return NO_ERROR;
296}
297
298status_t EmulatedFakeCamera3::unplugCamera() {
299 {
300 Mutex::Autolock l(mLock);
301
302 if (mPlugged) {
303 CAMHAL_LOGIB("%s: Unplugged camera", __FUNCTION__);
304 mPlugged = false;
305 }
306 }
307
308 return closeCamera();
309}
310
311camera_device_status_t EmulatedFakeCamera3::getHotplugStatus() {
312 Mutex::Autolock l(mLock);
313 return mPlugged ?
314 CAMERA_DEVICE_STATUS_PRESENT :
315 CAMERA_DEVICE_STATUS_NOT_PRESENT;
316}
317
318status_t EmulatedFakeCamera3::closeCamera() {
319 CAMHAL_LOGVB("%s, %d\n", __FUNCTION__, __LINE__);
320 status_t res;
321 {
322 Mutex::Autolock l(mLock);
323 if (mStatus == STATUS_CLOSED) return OK;
324 res = mSensor->streamOff();
325
326 res = mSensor->shutDown();
327 if (res != NO_ERROR) {
328 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
329 return res;
330 }
331 mSensor.clear();
332
333 mReadoutThread->requestExit();
334 }
335
336 mReadoutThread->join();
337
338 {
339 Mutex::Autolock l(mLock);
340 // Clear out private stream information
341 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
342 PrivateStreamInfo *privStream =
343 static_cast<PrivateStreamInfo*>((*s)->priv);
344 delete privStream;
345 (*s)->priv = NULL;
346 }
347 mStreams.clear();
348 mReadoutThread.clear();
349 }
350
351 return EmulatedCamera3::closeCamera();
352}
353
354status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
355 char property[PROPERTY_VALUE_MAX];
356 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
357 if (mSensorType == SENSOR_USB) {
358 if (mFacingBack) {
359 property_get("hw.camera.orientation.back", property, "0");
360 } else {
361 property_get("hw.camera.orientation.front", property, "0");
362 }
363 int32_t orientation = atoi(property);
364 property_get("hw.camera.usb.orientation_offset", property, "0");
365 orientation += atoi(property);
366 orientation %= 360;
367 info->orientation = orientation ;
368 } else {
369 if (mFacingBack) {
370 property_get("hw.camera.orientation.back", property, "270");
371 } else {
372 property_get("hw.camera.orientation.front", property, "90");
373 }
374 info->orientation = atoi(property);
375 }
376 return EmulatedCamera3::getCameraInfo(info);
377}
378
379/**
380 * Camera3 interface methods
381 */
382
383void EmulatedFakeCamera3::getValidJpegSize(uint32_t picSizes[], uint32_t availablejpegsize[], int count) {
384 int i,j,k;
385 bool valid = true;
386 for (i=0,j=0; i < count; i+= 4) {
387 for (k= 0; k<=j ;k+=2) {
388 if ((availablejpegsize[k]*availablejpegsize[k+1]) == (picSizes[i+1]*picSizes[i+2])) {
389
390 valid = false;
391 }
392 }
393 if (valid) {
394 availablejpegsize[j] = picSizes[i+1];
395 availablejpegsize[j+1] = picSizes[i+2];
396 j+=2;
397 }
398 valid = true;
399 }
400}
401
402status_t EmulatedFakeCamera3::checkValidJpegSize(uint32_t width, uint32_t height) {
403
404 int validsizecount = 0;
405 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
406 for (uint32_t f = 0; f < count; f+=2) {
407 if (mAvailableJpegSize[f] != 0) {
408 if ((mAvailableJpegSize[f] == width)&&(mAvailableJpegSize[f+1] == height)) {
409 validsizecount++;
410 }
411 } else {
412 break;
413 }
414 }
415 if (validsizecount == 0)
416 return BAD_VALUE;
417 return OK;
418}
419
420status_t EmulatedFakeCamera3::configureStreams(
421 camera3_stream_configuration *streamList) {
422 Mutex::Autolock l(mLock);
423 uint32_t width, height, pixelfmt;
424 bool isRestart = false;
425 DBG_LOGB("%s: %d streams", __FUNCTION__, streamList->num_streams);
426
427 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
428 ALOGE("%s: Cannot configure streams in state %d",
429 __FUNCTION__, mStatus);
430 return NO_INIT;
431 }
432
433 /**
434 * Sanity-check input list.
435 */
436 if (streamList == NULL) {
437 ALOGE("%s: NULL stream configuration", __FUNCTION__);
438 return BAD_VALUE;
439 }
440
441 if (streamList->streams == NULL) {
442 ALOGE("%s: NULL stream list", __FUNCTION__);
443 return BAD_VALUE;
444 }
445
446 if (streamList->num_streams < 1) {
447 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
448 streamList->num_streams);
449 return BAD_VALUE;
450 }
451
452 camera3_stream_t *inputStream = NULL;
453 for (size_t i = 0; i < streamList->num_streams; i++) {
454 camera3_stream_t *newStream = streamList->streams[i];
455
456 if (newStream == NULL) {
457 ALOGE("%s: Stream index %zu was NULL",
458 __FUNCTION__, i);
459 return BAD_VALUE;
460 }
461
462 if (newStream->max_buffers <= 0) {
463 isRestart = true;//mSensor->isNeedRestart(newStream->width, newStream->height, newStream->format);
464 DBG_LOGB("format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d, isRestart=%d\n",
465 newStream->format, newStream->width, newStream->height,
466 newStream->stream_type, newStream->max_buffers,
467 isRestart);
468 }
469 ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
470 __FUNCTION__, newStream, i, newStream->stream_type,
471 newStream->usage,
472 newStream->format);
473
474 if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
475 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
476 if (inputStream != NULL) {
477
478 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
479 return BAD_VALUE;
480 }
481 inputStream = newStream;
482 }
483
484 bool validFormat = false;
485 for (size_t f = 0;
486 f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]);
487 f++) {
488 if (newStream->format == kAvailableFormats[f]) {
489 validFormat = true;
490 //HAL_PIXEL_FORMAT_YCrCb_420_SP,
491 if (HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format)
492 newStream->format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
493
494 break;
495 }
496 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
497 }
498 if (!validFormat) {
499 ALOGE("%s: Unsupported stream format 0x%x requested",
500 __FUNCTION__, newStream->format);
501 return BAD_VALUE;
502 }
503
504 status_t ret = checkValidJpegSize(newStream->width, newStream->height);
505 if (ret != OK) {
506 return BAD_VALUE;
507 }
508
509 }
510 mInputStream = inputStream;
511 width = 0;
512 height = 0;
513 for (size_t i = 0; i < streamList->num_streams; i++) {
514 camera3_stream_t *newStream = streamList->streams[i];
515 DBG_LOGB("find propert width and height, format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d\n",
516 newStream->format, newStream->width, newStream->height, newStream->stream_type, newStream->max_buffers);
517 if ((HAL_PIXEL_FORMAT_BLOB != newStream->format) &&
518 (CAMERA3_STREAM_OUTPUT == newStream->stream_type)) {
519
520 if (width < newStream->width)
521 width = newStream->width;
522
523 if (height < newStream->height)
524 height = newStream->height;
525
526 pixelfmt = (uint32_t)newStream->format;
527 if (HAL_PIXEL_FORMAT_YCbCr_420_888 == pixelfmt)
528 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
529 }
530
531 }
532
533 //TODO modify this ugly code
534 if (isRestart) {
535 isRestart = mSensor->isNeedRestart(width, height, pixelfmt);
536 }
537
538 if (isRestart) {
539 mSensor->streamOff();
540 pixelfmt = mSensor->halFormatToSensorFormat(pixelfmt);
541 mSensor->setOutputFormat(width, height, pixelfmt, 0);
542 mSensor->streamOn();
543 DBG_LOGB("width=%d, height=%d, pixelfmt=%.4s\n",
544 width, height, (char*)&pixelfmt);
545 }
546
547 /**
548 * Initially mark all existing streams as not alive
549 */
550 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
551 PrivateStreamInfo *privStream =
552 static_cast<PrivateStreamInfo*>((*s)->priv);
553 privStream->alive = false;
554 }
555
556 /**
557 * Find new streams and mark still-alive ones
558 */
559 for (size_t i = 0; i < streamList->num_streams; i++) {
560 camera3_stream_t *newStream = streamList->streams[i];
561 if (newStream->priv == NULL) {
562 // New stream, construct info
563 PrivateStreamInfo *privStream = new PrivateStreamInfo();
564 privStream->alive = true;
565 privStream->registered = false;
566
567 newStream->usage =
568 mSensor->getStreamUsage(newStream->stream_type);
569
570 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
571 newStream->max_buffers = kMaxBufferCount;
572 newStream->priv = privStream;
573 mStreams.push_back(newStream);
574 } else {
575 // Existing stream, mark as still alive.
576 PrivateStreamInfo *privStream =
577 static_cast<PrivateStreamInfo*>(newStream->priv);
578 CAMHAL_LOGDA("Existing stream ?");
579 privStream->alive = true;
580 }
581 DBG_LOGB("%d, newStream=%p, stream_type=%d, usage=%x, priv=%p, w*h=%dx%d\n",
582 i, newStream, newStream->stream_type, newStream->usage, newStream->priv, newStream->width, newStream->height);
583 }
584
585 /**
586 * Reap the dead streams
587 */
588 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
589 PrivateStreamInfo *privStream =
590 static_cast<PrivateStreamInfo*>((*s)->priv);
591 if (!privStream->alive) {
592 DBG_LOGA("delete not alive streams");
593 (*s)->priv = NULL;
594 delete privStream;
595 s = mStreams.erase(s);
596 } else {
597 ++s;
598 }
599 }
600
601 /**
602 * Can't reuse settings across configure call
603 */
604 mPrevSettings.clear();
605
606 return OK;
607}
608
609status_t EmulatedFakeCamera3::registerStreamBuffers(
610 const camera3_stream_buffer_set *bufferSet) {
611 DBG_LOGB("%s: E", __FUNCTION__);
612 Mutex::Autolock l(mLock);
613
614 /**
615 * Sanity checks
616 */
617 DBG_LOGA("==========sanity checks\n");
618
619 // OK: register streams at any time during configure
620 // (but only once per stream)
621 if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) {
622 ALOGE("%s: Cannot register buffers in state %d",
623 __FUNCTION__, mStatus);
624 return NO_INIT;
625 }
626
627 if (bufferSet == NULL) {
628 ALOGE("%s: NULL buffer set!", __FUNCTION__);
629 return BAD_VALUE;
630 }
631
632 StreamIterator s = mStreams.begin();
633 for (; s != mStreams.end(); ++s) {
634 if (bufferSet->stream == *s) break;
635 }
636 if (s == mStreams.end()) {
637 ALOGE("%s: Trying to register buffers for a non-configured stream!",
638 __FUNCTION__);
639 return BAD_VALUE;
640 }
641
642 /**
643 * Register the buffers. This doesn't mean anything to the emulator besides
644 * marking them off as registered.
645 */
646
647 PrivateStreamInfo *privStream =
648 static_cast<PrivateStreamInfo*>((*s)->priv);
649
650#if 0
651 if (privStream->registered) {
652 ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__);
653 return BAD_VALUE;
654 }
655#endif
656
657 privStream->registered = true;
658
659 return OK;
660}
661
662const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings(
663 int type) {
664 DBG_LOGB("%s: E", __FUNCTION__);
665 Mutex::Autolock l(mLock);
666
667 if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
668 ALOGE("%s: Unknown request settings template: %d",
669 __FUNCTION__, type);
670 return NULL;
671 }
672
673 /**
674 * Cache is not just an optimization - pointer returned has to live at
675 * least as long as the camera device instance does.
676 */
677 if (mDefaultTemplates[type] != NULL) {
678 return mDefaultTemplates[type];
679 }
680
681 CameraMetadata settings;
682
683 /** android.request */
684 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
685 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
686
687 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
688 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
689
690 static const int32_t id = 0;
691 settings.update(ANDROID_REQUEST_ID, &id, 1);
692
693 static const int32_t frameCount = 0;
694 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
695
696 /** android.lens */
697
698 static const float focusDistance = 0;
699 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
700
701 static const float aperture = 2.8f;
702 settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
703
704// static const float focalLength = 5.0f;
705 static const float focalLength = 3.299999952316284f;
706 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
707
708 static const float filterDensity = 0;
709 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
710
711 static const uint8_t opticalStabilizationMode =
712 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
713 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
714 &opticalStabilizationMode, 1);
715
716 // FOCUS_RANGE set only in frame
717
718 /** android.sensor */
719
720 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
721 settings.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
722 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
723 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
724 static const int64_t exposureTime = 10 * MSEC;
725 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
726
727 int64_t frameDuration = mSensor->getMinFrameDuration();
728 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
729
730 static const int32_t sensitivity = 100;
731 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
732
733 static const int64_t rollingShutterSkew = 0;
734 settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
735 // TIMESTAMP set only in frame
736
737 /** android.flash */
738
739 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
740 settings.update(ANDROID_FLASH_STATE, &flashstate, 1);
741
742 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
743 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
744
745 static const uint8_t flashPower = 10;
746 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
747
748 static const int64_t firingTime = 0;
749 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
750
751 /** Processing block modes */
752 uint8_t hotPixelMode = 0;
753 uint8_t demosaicMode = 0;
754 uint8_t noiseMode = 0;
755 uint8_t shadingMode = 0;
756 uint8_t colorMode = 0;
757 uint8_t tonemapMode = 0;
758 uint8_t edgeMode = 0;
759 switch (type) {
760
761 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
762 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
763 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
764 // fall-through
765 case CAMERA3_TEMPLATE_STILL_CAPTURE:
766 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
767 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
768 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
769 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
770 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
771 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
772 break;
773 case CAMERA3_TEMPLATE_PREVIEW:
774 // fall-through
775 case CAMERA3_TEMPLATE_VIDEO_RECORD:
776 // fall-through
777 case CAMERA3_TEMPLATE_MANUAL:
778 // fall-through
779 default:
780 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
781 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
782 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
783 shadingMode = ANDROID_SHADING_MODE_FAST;
784 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
785 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
786 edgeMode = ANDROID_EDGE_MODE_FAST;
787 break;
788 }
789 settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
790 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
791 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
792 settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
793 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
794 settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
795 settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
796
797 /** android.noise */
798 static const uint8_t noiseStrength = 5;
799 settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
800 static uint8_t availableNBModes[] = {
801 ANDROID_NOISE_REDUCTION_MODE_OFF,
802 ANDROID_NOISE_REDUCTION_MODE_FAST,
803 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
804 };
805 settings.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
806 availableNBModes, sizeof(availableNBModes)/sizeof(availableNBModes));
807
808
809 /** android.color */
810 static const float colorTransform[9] = {
811 1.0f, 0.f, 0.f,
812 0.f, 1.f, 0.f,
813 0.f, 0.f, 1.f
814 };
815 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
816
817 /** android.tonemap */
818 static const float tonemapCurve[4] = {
819 0.f, 0.f,
820 1.f, 1.f
821 };
822 settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
823 settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
824 settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
825
826 /** android.edge */
827 static const uint8_t edgeStrength = 5;
828 settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
829
830 /** android.scaler */
831 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
832 settings.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
833
834 static const int32_t cropRegion[] = {
835 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
836 };
837 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
838
839 /** android.jpeg */
840 static const uint8_t jpegQuality = 80;
841 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
842
843 static const int32_t thumbnailSize[2] = {
844 640, 480
845 };
846 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
847
848 static const uint8_t thumbnailQuality = 80;
849 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
850
851 static const double gpsCoordinates[3] = {
852 0, 0, 0
853 };
854 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3); //default 2 value
855
856 static const uint8_t gpsProcessingMethod[32] = "None";
857 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
858
859 static const int64_t gpsTimestamp = 0;
860 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
861
862 static const int32_t jpegOrientation = 0;
863 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
864
865 /** android.stats */
866
867 static const uint8_t faceDetectMode =
868 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
869 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
870
871 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
872 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
873
874 static const uint8_t sharpnessMapMode =
875 ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
876 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
877
878 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
879 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
880 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
881 settings.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
882 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
883 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
884 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
885 // sharpnessMap only in frames
886
887 /** android.control */
888
889 uint8_t controlIntent = 0;
890 uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; //default value
891 uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
892 uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
893 switch (type) {
894 case CAMERA3_TEMPLATE_PREVIEW:
895 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
896 break;
897 case CAMERA3_TEMPLATE_STILL_CAPTURE:
898 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
899 break;
900 case CAMERA3_TEMPLATE_VIDEO_RECORD:
901 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
902 break;
903 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
904 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
905 break;
906 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
907 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
908 break;
909 case CAMERA3_TEMPLATE_MANUAL:
910 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
911 controlMode = ANDROID_CONTROL_MODE_OFF;
912 aeMode = ANDROID_CONTROL_AE_MODE_OFF;
913 awbMode = ANDROID_CONTROL_AWB_MODE_OFF;
914 break;
915 default:
916 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
917 break;
918 }
919 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
920 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
921
922 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
923 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
924
925 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
926 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
927
928 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
929
930 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
931 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
932
933 static const uint8_t aePrecaptureTrigger =
934 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
935 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
936
937 static const int32_t mAfTriggerId = 0;
938 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,&mAfTriggerId, 1);
939 static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
940 settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
941
942 static const int32_t controlRegions[5] = {
943 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
944 1000
945 };
946// settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
947
948 static const int32_t aeExpCompensation = 0;
949 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
950
951 static const int32_t aeTargetFpsRange[2] = {
952 30, 30
953 };
954 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
955
956 static const uint8_t aeAntibandingMode =
957 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
958 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
959
960 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
961
962 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
963 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
964
965// settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
966
967 uint8_t afMode = 0;
968 switch (type) {
969 case CAMERA3_TEMPLATE_PREVIEW:
970 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
971 break;
972 case CAMERA3_TEMPLATE_STILL_CAPTURE:
973 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
974 break;
975 case CAMERA3_TEMPLATE_VIDEO_RECORD:
976 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
977 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
978 break;
979 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
980 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
981 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
982 break;
983 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
984 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
985 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
986 break;
987 case CAMERA3_TEMPLATE_MANUAL:
988 afMode = ANDROID_CONTROL_AF_MODE_OFF;
989 break;
990 default:
991 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
992 break;
993 }
994 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
995
996 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
997 settings.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
998
999// settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
1000
1001 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
1002 settings.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
1003 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1004 settings.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
1005 static const uint8_t vstabMode =
1006 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
1007 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
1008
1009 // aeState, awbState, afState only in frame
1010
1011 mDefaultTemplates[type] = settings.release();
1012
1013 return mDefaultTemplates[type];
1014}
1015
1016status_t EmulatedFakeCamera3::processCaptureRequest(
1017 camera3_capture_request *request) {
1018
1019 Mutex::Autolock l(mLock);
1020 status_t res;
1021
1022 /** Validation */
1023
1024 if (mStatus < STATUS_READY) {
1025 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
1026 mStatus);
1027 return INVALID_OPERATION;
1028 }
1029
1030 if (request == NULL) {
1031 ALOGE("%s: NULL request!", __FUNCTION__);
1032 return BAD_VALUE;
1033 }
1034
1035 uint32_t frameNumber = request->frame_number;
1036
1037 if (request->settings == NULL && mPrevSettings.isEmpty()) {
1038 ALOGE("%s: Request %d: NULL settings for first request after"
1039 "configureStreams()", __FUNCTION__, frameNumber);
1040 return BAD_VALUE;
1041 }
1042
1043 if (request->input_buffer != NULL &&
1044 request->input_buffer->stream != mInputStream) {
1045 DBG_LOGB("%s: Request %d: Input buffer not from input stream!",
1046 __FUNCTION__, frameNumber);
1047 DBG_LOGB("%s: Bad stream %p, expected: %p",
1048 __FUNCTION__, request->input_buffer->stream,
1049 mInputStream);
1050 DBG_LOGB("%s: Bad stream type %d, expected stream type %d",
1051 __FUNCTION__, request->input_buffer->stream->stream_type,
1052 mInputStream ? mInputStream->stream_type : -1);
1053
1054 return BAD_VALUE;
1055 }
1056
1057 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1058 ALOGE("%s: Request %d: No output buffers provided!",
1059 __FUNCTION__, frameNumber);
1060 return BAD_VALUE;
1061 }
1062
1063 // Validate all buffers, starting with input buffer if it's given
1064
1065 ssize_t idx;
1066 const camera3_stream_buffer_t *b;
1067 if (request->input_buffer != NULL) {
1068 idx = -1;
1069 b = request->input_buffer;
1070 } else {
1071 idx = 0;
1072 b = request->output_buffers;
1073 }
1074 do {
1075 PrivateStreamInfo *priv =
1076 static_cast<PrivateStreamInfo*>(b->stream->priv);
1077 if (priv == NULL) {
1078 ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
1079 __FUNCTION__, frameNumber, idx);
1080 return BAD_VALUE;
1081 }
1082#if 0
1083 if (!priv->alive || !priv->registered) {
1084 ALOGE("%s: Request %d: Buffer %zu: Unregistered or dead stream! alive=%d, registered=%d\n",
1085 __FUNCTION__, frameNumber, idx,
1086 priv->alive, priv->registered);
1087 //return BAD_VALUE;
1088 }
1089#endif
1090 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1091 ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
1092 __FUNCTION__, frameNumber, idx);
1093 return BAD_VALUE;
1094 }
1095 if (b->release_fence != -1) {
1096 ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
1097 __FUNCTION__, frameNumber, idx);
1098 return BAD_VALUE;
1099 }
1100 if (b->buffer == NULL) {
1101 ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
1102 __FUNCTION__, frameNumber, idx);
1103 return BAD_VALUE;
1104 }
1105 idx++;
1106 b = &(request->output_buffers[idx]);
1107 } while (idx < (ssize_t)request->num_output_buffers);
1108
1109 // TODO: Validate settings parameters
1110
1111 /**
1112 * Start processing this request
1113 */
1114
1115 mStatus = STATUS_ACTIVE;
1116
1117 CameraMetadata settings;
1118 camera_metadata_entry e;
1119
1120 if (request->settings == NULL) {
1121 settings.acquire(mPrevSettings);
1122 } else {
1123 settings = request->settings;
1124
1125 uint8_t antiBanding = 0;
1126 uint8_t effectMode = 0;
1127 int exposureCmp = 0;
1128
1129 e = settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE);
1130 if (e.count == 0) {
1131 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1132 return BAD_VALUE;
1133 }
1134 antiBanding = e.data.u8[0];
1135 mSensor->setAntiBanding(antiBanding);
1136
1137 e = settings.find(ANDROID_CONTROL_EFFECT_MODE);
1138 if (e.count == 0) {
1139 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1140 return BAD_VALUE;
1141 }
1142 effectMode = e.data.u8[0];
1143 mSensor->setEffect(effectMode);
1144
1145
1146 e = settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION);
1147 if (e.count == 0) {
1148 ALOGE("%s: No exposure entry!", __FUNCTION__);
1149 //return BAD_VALUE;
1150 } else {
1151 exposureCmp = e.data.i32[0];
1152 DBG_LOGB("set expsore compensaton %d\n", exposureCmp);
1153 mSensor->setExposure(exposureCmp);
1154 }
1155
1156 int32_t cropRegion[4];
1157 int32_t cropWidth;
1158 int32_t outputWidth = request->output_buffers[0].stream->width;
1159
1160 e = settings.find(ANDROID_SCALER_CROP_REGION);
1161 if (e.count == 0) {
1162 ALOGE("%s: No corp region entry!", __FUNCTION__);
1163 //return BAD_VALUE;
1164 } else {
1165 cropRegion[0] = e.data.i32[0];
1166 cropRegion[1] = e.data.i32[1];
1167 cropWidth = cropRegion[2] = e.data.i32[2];
1168 cropRegion[3] = e.data.i32[3];
1169 for (int i = mZoomMin; i <= mZoomMax; i += mZoomStep) {
1170 //if ( (float) i / mZoomMin >= (float) outputWidth / cropWidth) {
1171 if ( i * cropWidth >= outputWidth * mZoomMin ) {
1172 mSensor->setZoom(i);
1173 break;
1174 }
1175 }
1176 DBG_LOGB("cropRegion:%d, %d, %d, %d\n", cropRegion[0], cropRegion[1],cropRegion[2],cropRegion[3]);
1177 }
1178 }
1179
1180 uint8_t len[] = {1};
1181 settings.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1);
1182
1183 uint8_t maxlen[] = {0};
1184 settings.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1);
1185
1186 res = process3A(settings);
1187 if (res != OK) {
1188 CAMHAL_LOGDB("%s: process3A failed!", __FUNCTION__);
1189 //return res;
1190 }
1191
1192 // TODO: Handle reprocessing
1193
1194 /**
1195 * Get ready for sensor config
1196 */
1197
1198 nsecs_t exposureTime;
1199 nsecs_t frameDuration;
1200 uint32_t sensitivity;
1201 bool needJpeg = false;
1202 struct ExifInfo info;
1203 ssize_t jpegbuffersize;
1204 uint32_t jpegpixelfmt;
1205
1206 exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
1207 frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
1208 sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
1209
1210 Buffers *sensorBuffers = new Buffers();
1211 HalBufferVector *buffers = new HalBufferVector();
1212
1213 sensorBuffers->setCapacity(request->num_output_buffers);
1214 buffers->setCapacity(request->num_output_buffers);
1215
1216 // Process all the buffers we got for output, constructing internal buffer
1217 // structures for them, and lock them for writing.
1218 for (size_t i = 0; i < request->num_output_buffers; i++) {
1219 const camera3_stream_buffer &srcBuf = request->output_buffers[i];
1220 const private_handle_t *privBuffer =
1221 (const private_handle_t*)(*srcBuf.buffer);
1222 StreamBuffer destBuf;
1223 destBuf.streamId = kGenericStreamId;
1224 destBuf.width = srcBuf.stream->width;
1225 destBuf.height = srcBuf.stream->height;
1226 destBuf.format = privBuffer->format; // Use real private format
1227 destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc
1228 destBuf.buffer = srcBuf.buffer;
1229 destBuf.share_fd = privBuffer->share_fd;
1230
1231 //ALOGI("%s, i:%d format for this usage: %d x %d, usage %x, format=%x, returned\n",
1232 // __FUNCTION__, i, destBuf.width, destBuf.height, privBuffer->usage, privBuffer->format);
1233 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
1234 needJpeg = true;
1235 memset(&info,0,sizeof(struct ExifInfo));
1236 info.orientation = settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
1237 jpegpixelfmt = mSensor->getOutputFormat();
1238 if (!mSupportRotate) {
1239 info.mainwidth = srcBuf.stream->width;
1240 info.mainheight = srcBuf.stream->height;
1241 } else {
1242 if ((info.orientation==90)||(info.orientation==270)) {
1243 info.mainwidth = srcBuf.stream->height;
1244 info.mainheight = srcBuf.stream->width;
1245 } else {
1246 info.mainwidth = srcBuf.stream->width;
1247 info.mainheight = srcBuf.stream->height;
1248 }
1249 }
1250 if ((jpegpixelfmt == V4L2_PIX_FMT_MJPEG)||(jpegpixelfmt == V4L2_PIX_FMT_YUYV)) {
1251 mSensor->setOutputFormat(info.mainwidth,info.mainheight,jpegpixelfmt,1);
1252 } else {
1253 mSensor->setOutputFormat(info.mainwidth,info.mainheight,V4L2_PIX_FMT_RGB24,1);
1254 }
1255 }
1256
1257 // Wait on fence
1258 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
1259 res = bufferAcquireFence->wait(kFenceTimeoutMs);
1260 if (res == TIMED_OUT) {
1261 ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
1262 __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
1263 }
1264 if (res == OK) {
1265 // Lock buffer for writing
1266 const Rect rect(destBuf.width, destBuf.height);
1267 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1268 if (privBuffer->format == HAL_PIXEL_FORMAT_YCbCr_420_888/*HAL_PIXEL_FORMAT_YCrCb_420_SP*/) {
1269 android_ycbcr ycbcr = android_ycbcr();
1270 res = GraphicBufferMapper::get().lockYCbCr(
1271 *(destBuf.buffer),
1272 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK
1273 , rect,
1274 &ycbcr);
1275 // This is only valid because we know that emulator's
1276 // YCbCr_420_888 is really contiguous NV21 under the hood
1277 destBuf.img = static_cast<uint8_t*>(ycbcr.y);
1278 } else {
1279 ALOGE("Unexpected private format for flexible YUV: 0x%x",
1280 privBuffer->format);
1281 res = INVALID_OPERATION;
1282 }
1283 } else {
1284 res = GraphicBufferMapper::get().lock(*(destBuf.buffer),
1285 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK
1286 , rect,
1287 (void**)&(destBuf.img));
1288 }
1289 if (res != OK) {
1290 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
1291 __FUNCTION__, frameNumber, i);
1292 }
1293 }
1294
1295 if (res != OK) {
1296 // Either waiting or locking failed. Unlock locked buffers and bail
1297 // out.
1298 for (size_t j = 0; j < i; j++) {
1299 GraphicBufferMapper::get().unlock(
1300 *(request->output_buffers[i].buffer));
1301 }
1302 ALOGE("line:%d, format for this usage: %d x %d, usage %x, format=%x, returned\n",
1303 __LINE__, destBuf.width, destBuf.height, privBuffer->usage, privBuffer->format);
1304 return NO_INIT;
1305 }
1306
1307 sensorBuffers->push_back(destBuf);
1308 buffers->push_back(srcBuf);
1309 }
1310
1311 if (needJpeg){
1312 if (!mSupportRotate) {
1313 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1314 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1315 } else {
1316 if ((info.orientation==90)||(info.orientation==270)) {
1317 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1318 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1319 } else {
1320 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1321 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1322 }
1323 }
1324 if (settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
1325 info.latitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[0];
1326 info.longitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[1];
1327 info.altitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[2];
1328 info.has_latitude = true;
1329 info.has_longitude = true;
1330 info.has_altitude = true;
1331 } else {
1332 info.has_latitude = false;
1333 info.has_longitude = false;
1334 info.has_altitude = false;
1335 }
1336 if (settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
1337 info.gpsProcessingMethod = settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
1338 info.has_gpsProcessingMethod = true;
1339 } else {
1340 info.has_gpsProcessingMethod = false;
1341 }
1342 if (settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
1343 info.gpsTimestamp = settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
1344 info.has_gpsTimestamp = true;
1345 } else {
1346 info.has_gpsTimestamp = false;
1347 }
1348 if (settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
1349 info.focallen = settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
1350 info.has_focallen = true;
1351 } else {
1352 info.has_focallen = false;
1353 }
1354 jpegbuffersize = getJpegBufferSize(info.mainwidth,info.mainheight);
1355
1356 mJpegCompressor->SetMaxJpegBufferSize(jpegbuffersize);
1357 mJpegCompressor->SetExifInfo(info);
1358 mSensor->setPictureRotate(info.orientation);
1359 DBG_LOGB("%s::thumbnailSize_width=%d,thumbnailSize_height=%d,mainsize_width=%d,mainsize_height=%d,jpegOrientation=%d",__FUNCTION__,
1360 info.thumbwidth,info.thumbheight,info.mainwidth,info.mainheight,info.orientation);
1361 }
1362 /**
1363 * Wait for JPEG compressor to not be busy, if needed
1364 */
1365#if 0
1366 if (needJpeg) {
1367 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1368 if (!ready) {
1369 ALOGE("%s: Timeout waiting for JPEG compression to complete!",
1370 __FUNCTION__);
1371 return NO_INIT;
1372 }
1373 }
1374#else
1375 while (needJpeg) {
1376 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1377 if (ready)
1378 break;
1379 }
1380#endif
1381 /**
1382 * Wait until the in-flight queue has room
1383 */
1384 res = mReadoutThread->waitForReadout();
1385 if (res != OK) {
1386 ALOGE("%s: Timeout waiting for previous requests to complete!",
1387 __FUNCTION__);
1388 return NO_INIT;
1389 }
1390
1391 /**
1392 * Wait until sensor's ready. This waits for lengthy amounts of time with
1393 * mLock held, but the interface spec is that no other calls may by done to
1394 * the HAL by the framework while process_capture_request is happening.
1395 */
1396 int syncTimeoutCount = 0;
1397 while(!mSensor->waitForVSync(kSyncWaitTimeout)) {
1398 if (mStatus == STATUS_ERROR) {
1399 return NO_INIT;
1400 }
1401 if (syncTimeoutCount == kMaxSyncTimeoutCount) {
1402 ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
1403 __FUNCTION__, frameNumber,
1404 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
1405 return NO_INIT;
1406 }
1407 syncTimeoutCount++;
1408 }
1409
1410 /**
1411 * Configure sensor and queue up the request to the readout thread
1412 */
1413 mSensor->setExposureTime(exposureTime);
1414 mSensor->setFrameDuration(frameDuration);
1415 mSensor->setSensitivity(sensitivity);
1416 mSensor->setDestinationBuffers(sensorBuffers);
1417 mSensor->setFrameNumber(request->frame_number);
1418
1419 ReadoutThread::Request r;
1420 r.frameNumber = request->frame_number;
1421 r.settings = settings;
1422 r.sensorBuffers = sensorBuffers;
1423 r.buffers = buffers;
1424
1425 mReadoutThread->queueCaptureRequest(r);
1426 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
1427
1428 // Cache the settings for next time
1429 mPrevSettings.acquire(settings);
1430
1431 return OK;
1432}
1433
1434/** Debug methods */
1435
1436void EmulatedFakeCamera3::dump(int fd) {
1437
1438 String8 result;
1439 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
1440 result = String8::format("%s, valid resolution\n", __FILE__);
1441
1442 for (uint32_t f = 0; f < count; f+=2) {
1443 if (mAvailableJpegSize[f] == 0)
1444 break;
1445 result.appendFormat("width: %d , height =%d\n",
1446 mAvailableJpegSize[f], mAvailableJpegSize[f+1]);
1447 }
1448 result.appendFormat("\nmZoomMin: %d , mZoomMax =%d, mZoomStep=%d\n",
1449 mZoomMin, mZoomMax, mZoomStep);
1450
1451 if (mZoomStep <= 0) {
1452 result.appendFormat("!!!!!!!!!camera apk may have no picture out\n");
1453 }
1454
1455 write(fd, result.string(), result.size());
1456
1457 if (mSensor.get() != NULL) {
1458 mSensor->dump(fd);
1459 }
1460
1461}
1462//flush all request
1463//TODO returned buffers every request held immediately with
1464//CAMERA3_BUFFER_STATUS_ERROR flag.
1465int EmulatedFakeCamera3::flush_all_requests() {
1466 DBG_LOGA("flush all request");
1467 return 0;
1468}
1469/** Tag query methods */
1470const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) {
1471 return NULL;
1472}
1473
1474const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) {
1475 return NULL;
1476}
1477
1478int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) {
1479 return 0;
1480}
1481
1482/**
1483 * Private methods
1484 */
1485
1486camera_metadata_ro_entry_t EmulatedFakeCamera3::staticInfo(const CameraMetadata *info, uint32_t tag,
1487 size_t minCount, size_t maxCount, bool required) const {
1488
1489 camera_metadata_ro_entry_t entry = info->find(tag);
1490
1491 if (CC_UNLIKELY( entry.count == 0 ) && required) {
1492 const char* tagSection = get_camera_metadata_section_name(tag);
1493 if (tagSection == NULL) tagSection = "<unknown>";
1494 const char* tagName = get_camera_metadata_tag_name(tag);
1495 if (tagName == NULL) tagName = "<unknown>";
1496
1497 ALOGE("Error finding static metadata entry '%s.%s' (%x)",
1498 tagSection, tagName, tag);
1499 } else if (CC_UNLIKELY(
1500 (minCount != 0 && entry.count < minCount) ||
1501 (maxCount != 0 && entry.count > maxCount) ) ) {
1502 const char* tagSection = get_camera_metadata_section_name(tag);
1503 if (tagSection == NULL) tagSection = "<unknown>";
1504 const char* tagName = get_camera_metadata_tag_name(tag);
1505 if (tagName == NULL) tagName = "<unknown>";
1506 ALOGE("Malformed static metadata entry '%s.%s' (%x):"
1507 "Expected between %zu and %zu values, but got %zu values",
1508 tagSection, tagName, tag, minCount, maxCount, entry.count);
1509 }
1510
1511 return entry;
1512}
1513
1514//this is only for debug
1515void EmulatedFakeCamera3::getStreamConfigurationp(CameraMetadata *info) {
1516 const int STREAM_CONFIGURATION_SIZE = 4;
1517 const int STREAM_FORMAT_OFFSET = 0;
1518 const int STREAM_WIDTH_OFFSET = 1;
1519 const int STREAM_HEIGHT_OFFSET = 2;
1520 const int STREAM_IS_INPUT_OFFSET = 3;
1521
1522 camera_metadata_ro_entry_t availableStreamConfigs =
1523 staticInfo(info, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
1524 CAMHAL_LOGDB(" stream, availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1525
1526 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1527 int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
1528 int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
1529 int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
1530 int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
1531 CAMHAL_LOGDB("f=%x, w*h=%dx%d, du=%d\n", format, width, height, isInput);
1532 }
1533
1534}
1535
1536//this is only for debug
1537void EmulatedFakeCamera3::getStreamConfigurationDurations(CameraMetadata *info) {
1538 const int STREAM_CONFIGURATION_SIZE = 4;
1539 const int STREAM_FORMAT_OFFSET = 0;
1540 const int STREAM_WIDTH_OFFSET = 1;
1541 const int STREAM_HEIGHT_OFFSET = 2;
1542 const int STREAM_IS_INPUT_OFFSET = 3;
1543
1544 camera_metadata_ro_entry_t availableStreamConfigs =
1545 staticInfo(info, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
1546 CAMHAL_LOGDB("availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1547
1548 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1549 int64_t format = availableStreamConfigs.data.i64[i + STREAM_FORMAT_OFFSET];
1550 int64_t width = availableStreamConfigs.data.i64[i + STREAM_WIDTH_OFFSET];
1551 int64_t height = availableStreamConfigs.data.i64[i + STREAM_HEIGHT_OFFSET];
1552 int64_t isInput = availableStreamConfigs.data.i64[i + STREAM_IS_INPUT_OFFSET];
1553 CAMHAL_LOGDB("f=%llx, w*h=%lldx%lld, du=%lld\n", format, width, height, isInput);
1554 }
1555}
1556
1557void EmulatedFakeCamera3::updateCameraMetaData(CameraMetadata *info) {
1558
1559}
1560
1561status_t EmulatedFakeCamera3::constructStaticInfo() {
1562
1563 CameraMetadata info;
1564 uint32_t picSizes[64 * 8];
1565 int64_t* duration = NULL;
1566 int count, duration_count, availablejpegsize;
1567 uint8_t maxCount = 10;
1568 char property[PROPERTY_VALUE_MAX];
1569 unsigned int supportrotate;
1570 availablejpegsize = ARRAY_SIZE(mAvailableJpegSize);
1571 memset(mAvailableJpegSize,0,(sizeof(uint32_t))*availablejpegsize);
1572 sp<Sensor> s = new Sensor();
1573 s->startUp(mCameraID);
1574 mSensorType = s->getSensorType();
1575
1576 if ( mSensorType == SENSOR_USB) {
1577 char property[PROPERTY_VALUE_MAX];
1578 property_get("rw.camera.usb.faceback", property, "false");
1579 if (strstr(property, "true"))
1580 mFacingBack = 1;
1581 else
1582 mFacingBack = 0;
1583 ALOGI("Setting usb camera cameraID:%d to back camera:%s\n",
1584 mCameraID, property);
1585 } else {
1586 if (s->mSensorFace == SENSOR_FACE_FRONT) {
1587 mFacingBack = 0;
1588 } else if (s->mSensorFace == SENSOR_FACE_BACK) {
1589 mFacingBack = 1;
1590 } else if (s->mSensorFace == SENSOR_FACE_NONE) {
1591 if (gEmulatedCameraFactory.getEmulatedCameraNum() == 1) {
1592 mFacingBack = 1;
1593 } else if ( mCameraID == 0) {
1594 mFacingBack = 1;
1595 } else {
1596 mFacingBack = 0;
1597 }
1598 }
1599
1600 ALOGI("Setting on board camera cameraID:%d to back camera:%d[0 false, 1 true]\n",
1601 mCameraID, mFacingBack);
1602 }
1603
1604 mSupportCap = s->IoctlStateProbe();
1605 if (mSupportCap & IOCTL_MASK_ROTATE) {
1606 supportrotate = true;
1607 } else {
1608 supportrotate = false;
1609 }
1610 // android.lens
1611
1612 // 5 cm min focus distance for back camera, infinity (fixed focus) for front
1613 // TODO read this ioctl from camera driver
1614 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
1615 const float minFocusDistance = 0.0;
1616 info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1617 &minFocusDistance, 1);
1618
1619 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
1620 const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
1621 info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1622 &minFocusDistance, 1);
1623
1624 static const float focalLength = 3.30f; // mm
1625 info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1626 &focalLength, 1);
1627 static const float aperture = 2.8f;
1628 info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1629 &aperture, 1);
1630 static const float filterDensity = 0;
1631 info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1632 &filterDensity, 1);
1633 static const uint8_t availableOpticalStabilization =
1634 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1635 info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1636 &availableOpticalStabilization, 1);
1637
1638 static const int32_t lensShadingMapSize[] = {1, 1};
1639 info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1640 sizeof(lensShadingMapSize)/sizeof(int32_t));
1641
1642 uint8_t lensFacing = mFacingBack ?
1643 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1644 info.update(ANDROID_LENS_FACING, &lensFacing, 1);
1645
1646 float lensPosition[3];
1647 if (mFacingBack) {
1648 // Back-facing camera is center-top on device
1649 lensPosition[0] = 0;
1650 lensPosition[1] = 20;
1651 lensPosition[2] = -5;
1652 } else {
1653 // Front-facing camera is center-right on device
1654 lensPosition[0] = 20;
1655 lensPosition[1] = 20;
1656 lensPosition[2] = 0;
1657 }
1658 info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
1659 sizeof(float));
1660 static const uint8_t lensCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED;
1661 info.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,&lensCalibration,1);
1662
1663 // android.sensor
1664
1665 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1666 info.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
1667 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1668 info.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
1669 info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1670 Sensor::kExposureTimeRange, 2);
1671
1672 info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1673 &Sensor::kFrameDurationRange[1], 1);
1674
1675 info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
1676 Sensor::kSensitivityRange,
1677 sizeof(Sensor::kSensitivityRange)
1678 /sizeof(int32_t));
1679
1680 info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1681 &Sensor::kColorFilterArrangement, 1);
1682
1683 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1684 info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1685 sensorPhysicalSize, 2);
1686
1687 info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1688 (int32_t*)Sensor::kResolution, 2);
1689
1690 //(int32_t*)Sensor::kResolution, 2);
1691
1692 info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1693 (int32_t*)&Sensor::kMaxRawValue, 1);
1694
1695 static const int32_t blackLevelPattern[4] = {
1696 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
1697 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
1698 };
1699 info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1700 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
1701
1702 static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
1703 info.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
1704 if (mSensorType == SENSOR_USB) {
1705 if (mFacingBack) {
1706 property_get("hw.camera.orientation.back", property, "0");
1707 } else {
1708 property_get("hw.camera.orientation.front", property, "0");
1709 }
1710 int32_t orientation = atoi(property);
1711 property_get("hw.camera.usb.orientation_offset", property, "0");
1712 orientation += atoi(property);
1713 orientation %= 360;
1714 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1715 } else {
1716 if (mFacingBack) {
1717 property_get("hw.camera.orientation.back", property, "270");
1718 const int32_t orientation = atoi(property);
1719 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1720 } else {
1721 property_get("hw.camera.orientation.front", property, "90");
1722 const int32_t orientation = atoi(property);
1723 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1724 }
1725 }
1726
1727 static const int64_t rollingShutterSkew = 0;
1728 info.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
1729
1730 //TODO: sensor color calibration fields
1731
1732 // android.flash
1733 static const uint8_t flashAvailable = 0;
1734 info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1735
1736 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
1737 info.update(ANDROID_FLASH_STATE, &flashstate, 1);
1738
1739 static const int64_t flashChargeDuration = 0;
1740 info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
1741
1742 /** android.noise */
1743 static const uint8_t availableNBModes = ANDROID_NOISE_REDUCTION_MODE_OFF;
1744 info.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, &availableNBModes, 1);
1745
1746 // android.tonemap
1747
1748 static const int32_t tonemapCurvePoints = 128;
1749 info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
1750
1751 // android.scaler
1752
1753 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
1754 info.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
1755
1756 info.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1757 kAvailableFormats,
1758 sizeof(kAvailableFormats)/sizeof(int32_t));
1759
1760 info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1761 (int64_t*)kAvailableRawMinDurations,
1762 sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
1763
1764 //for version 3.2 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
1765 count = sizeof(picSizes)/sizeof(picSizes[0]);
1766 count = s->getStreamConfigurations(picSizes, kAvailableFormats, count);
1767
1768 info.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1769 (int32_t*)picSizes, count);
1770
1771 if (count < availablejpegsize) {
1772 availablejpegsize = count;
1773 }
1774 getValidJpegSize(picSizes,mAvailableJpegSize,availablejpegsize);
1775
1776 maxJpegResolution = getMaxJpegResolution(picSizes,count);
1777 int32_t full_size[4];
1778 if (mFacingBack) {
1779 full_size[0] = 0;
1780 full_size[1] = 0;
1781 full_size[2] = maxJpegResolution.width;
1782 full_size[3] = maxJpegResolution.height;
1783 } else {
1784 full_size[0] = 0;
1785 full_size[1] = 0;
1786 full_size[2] = maxJpegResolution.width;
1787 full_size[3] = maxJpegResolution.height;
1788 }
1789 info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1790 (int32_t*)full_size,
1791 sizeof(full_size)/sizeof(full_size[0]));
1792 duration = new int64_t[count];
1793 if (duration == NULL) {
1794 DBG_LOGA("allocate memory for duration failed");
1795 return NO_MEMORY;
1796 } else {
1797 memset(duration,0,sizeof(int64_t)*count);
1798 }
1799 duration_count = s->getStreamConfigurationDurations(picSizes, duration , count);
1800
1801 info.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1802 duration, duration_count);
1803 info.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1804 duration, duration_count);
1805
1806 info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1807 (int64_t*)kAvailableProcessedMinDurations,
1808 sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
1809
1810 info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
1811 (int64_t*)kAvailableJpegMinDurations,
1812 sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
1813
1814
1815 // android.jpeg
1816
1817 static const int32_t jpegThumbnailSizes[] = {
1818 0, 0,
1819 160, 120,
1820 320, 240
1821 };
1822 info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1823 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
1824
1825 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1826 info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1827
1828 // android.stats
1829
1830 static const uint8_t availableFaceDetectModes[] = {
1831 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1832 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
1833 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
1834 };
1835
1836 info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1837 availableFaceDetectModes,
1838 sizeof(availableFaceDetectModes));
1839
1840 static const int32_t maxFaceCount = 8;
1841 info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1842 &maxFaceCount, 1);
1843
1844 static const int32_t histogramSize = 64;
1845 info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1846 &histogramSize, 1);
1847
1848 static const int32_t maxHistogramCount = 1000;
1849 info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1850 &maxHistogramCount, 1);
1851
1852 static const int32_t sharpnessMapSize[2] = {64, 64};
1853 info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1854 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
1855
1856 static const int32_t maxSharpnessMapValue = 1000;
1857 info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1858 &maxSharpnessMapValue, 1);
1859 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
1860 info.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
1861
1862 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
1863 info.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
1864 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
1865 info.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
1866 // android.control
1867
1868 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
1869 info.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
1870
1871 static const uint8_t availableSceneModes[] = {
1872 // ANDROID_CONTROL_SCENE_MODE_DISABLED,
1873 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY
1874 };
1875 info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1876 availableSceneModes, sizeof(availableSceneModes));
1877
1878 static const uint8_t availableEffects[] = {
1879 ANDROID_CONTROL_EFFECT_MODE_OFF
1880 };
1881 info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1882 availableEffects, sizeof(availableEffects));
1883
1884 static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0};
1885 info.update(ANDROID_CONTROL_MAX_REGIONS,
1886 max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
1887
1888 static const uint8_t availableAeModes[] = {
1889 ANDROID_CONTROL_AE_MODE_OFF,
1890 ANDROID_CONTROL_AE_MODE_ON
1891 };
1892 info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1893 availableAeModes, sizeof(availableAeModes));
1894
1895
1896 static const int32_t availableTargetFpsRanges[] = {
1897 5, 15, 15, 15, 5, 30, 30, 30,
1898 };
1899 info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1900 availableTargetFpsRanges,
1901 sizeof(availableTargetFpsRanges)/sizeof(int32_t));
1902
1903 uint8_t awbModes[maxCount];
1904 count = s->getAWB(awbModes, maxCount);
1905 if (count < 0) {
1906 static const uint8_t availableAwbModes[] = {
1907 ANDROID_CONTROL_AWB_MODE_OFF,
1908 ANDROID_CONTROL_AWB_MODE_AUTO,
1909 ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1910 ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1911 ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1912 ANDROID_CONTROL_AWB_MODE_SHADE
1913 };
1914 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1915 availableAwbModes, sizeof(availableAwbModes));
1916 } else {
1917 DBG_LOGB("getAWB %d ",count);
1918 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1919 awbModes, count);
1920 }
1921
1922 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
1923 info.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
1924
1925 static const uint8_t availableAfModesFront[] = {
1926 ANDROID_CONTROL_AF_MODE_OFF
1927 };
1928
1929 if (mFacingBack) {
1930 uint8_t afMode[maxCount];
1931 count = s->getAutoFocus(afMode, maxCount);
1932 if (count < 0) {
1933 static const uint8_t availableAfModesBack[] = {
1934 ANDROID_CONTROL_AF_MODE_OFF,
1935 //ANDROID_CONTROL_AF_MODE_AUTO,
1936 //ANDROID_CONTROL_AF_MODE_MACRO,
1937 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
1938 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE,
1939 };
1940
1941 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1942 availableAfModesBack, sizeof(availableAfModesBack));
1943 } else {
1944 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1945 afMode, count);
1946 }
1947 } else {
1948 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1949 availableAfModesFront, sizeof(availableAfModesFront));
1950 }
1951
1952 uint8_t antiBanding[maxCount];
1953 count = s->getAntiBanding(antiBanding, maxCount);
1954 if (count < 0) {
1955 static const uint8_t availableAntibanding[] = {
1956 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
1957 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
1958 };
1959 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1960 availableAntibanding, sizeof(availableAntibanding));
1961 } else {
1962 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1963 antiBanding, count);
1964 }
1965
1966 camera_metadata_rational step;
1967 int maxExp, minExp, def, ret;
1968 ret = s->getExposure(&maxExp, &minExp, &def, &step);
1969 if (ret < 0) {
1970 static const int32_t aeExpCompensation = 0;
1971 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
1972
1973 static const camera_metadata_rational exposureCompensationStep = {
1974 1, 3
1975 };
1976 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1977 &exposureCompensationStep, 1);
1978
1979 int32_t exposureCompensationRange[] = {0, 0};
1980 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1981 exposureCompensationRange,
1982 sizeof(exposureCompensationRange)/sizeof(int32_t));
1983 } else {
1984 DBG_LOGB("exposure compensation support:(%d, %d)\n", minExp, maxExp);
1985 int32_t exposureCompensationRange[] = {minExp, maxExp};
1986 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1987 exposureCompensationRange,
1988 sizeof(exposureCompensationRange)/sizeof(int32_t));
1989 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1990 &step, 1);
1991 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &def, 1);
1992 }
1993
1994 ret = s->getZoom(&mZoomMin, &mZoomMax, &mZoomStep);
1995 if (ret < 0) {
1996 float maxZoom = 1.0;
1997 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1998 &maxZoom, 1);
1999 } else {
2000 float maxZoom = mZoomMax / mZoomMin;
2001 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2002 &maxZoom, 1);
2003 }
2004
2005 static const uint8_t availableVstabModes[] = {
2006 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
2007 };
2008 info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2009 availableVstabModes, sizeof(availableVstabModes));
2010
2011 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
2012 info.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
2013 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2014 info.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
2015 // android.info
2016 const uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2017 //mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
2018 // ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2019 info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2020 &supportedHardwareLevel,
2021 /*count*/1);
2022
2023 int32_t android_sync_max_latency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
2024 info.update(ANDROID_SYNC_MAX_LATENCY, &android_sync_max_latency, 1);
2025
2026 uint8_t len[] = {1};
2027 info.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1);
2028
2029 uint8_t maxlen[] = {2};
2030 info.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1);
2031 uint8_t cap[] = {
2032 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
2033 };
2034 info.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2035 (uint8_t *)cap, sizeof(cap)/sizeof(cap[0]));
2036
2037
2038 int32_t partialResultCount = 1;
2039 info.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,&partialResultCount,1);
2040 int32_t maxNumOutputStreams[3] = {0,2,1};
2041 info.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,maxNumOutputStreams,3);
2042 uint8_t aberrationMode[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF};
2043 info.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
2044 aberrationMode, 1);
2045 info.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
2046 aberrationMode, 1);
2047
2048 getAvailableChKeys(&info, supportedHardwareLevel);
2049
2050 if (mCameraInfo != NULL) {
2051 CAMHAL_LOGDA("mCameraInfo is not null, mem leak?");
2052 }
2053 mCameraInfo = info.release();
2054 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
2055
2056 if (duration != NULL) {
2057 delete [] duration;
2058 }
2059
2060 s->shutDown();
2061 s.clear();
2062 mPlugged = true;
2063
2064 return OK;
2065}
2066
2067status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
2068 /**
2069 * Extract top-level 3A controls
2070 */
2071 status_t res;
2072
2073 bool facePriority = false;
2074
2075 camera_metadata_entry e;
2076
2077 e = settings.find(ANDROID_CONTROL_MODE);
2078 if (e.count == 0) {
2079 ALOGE("%s: No control mode entry!", __FUNCTION__);
2080 return BAD_VALUE;
2081 }
2082 uint8_t controlMode = e.data.u8[0];
2083
2084 e = settings.find(ANDROID_CONTROL_SCENE_MODE);
2085 if (e.count == 0) {
2086 ALOGE("%s: No scene mode entry!", __FUNCTION__);
2087 return BAD_VALUE;
2088 }
2089 uint8_t sceneMode = e.data.u8[0];
2090
2091 if (controlMode == ANDROID_CONTROL_MODE_OFF) {
2092 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2093 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2094 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2095 update3A(settings);
2096 return OK;
2097 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2098 switch(sceneMode) {
2099 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
2100 mFacePriority = true;
2101 break;
2102 default:
2103 ALOGE("%s: Emulator doesn't support scene mode %d",
2104 __FUNCTION__, sceneMode);
2105 return BAD_VALUE;
2106 }
2107 } else {
2108 mFacePriority = false;
2109 }
2110
2111 // controlMode == AUTO or sceneMode = FACE_PRIORITY
2112 // Process individual 3A controls
2113
2114 res = doFakeAE(settings);
2115 if (res != OK) return res;
2116
2117 res = doFakeAF(settings);
2118 if (res != OK) return res;
2119
2120 res = doFakeAWB(settings);
2121 if (res != OK) return res;
2122
2123 update3A(settings);
2124 return OK;
2125}
2126
2127status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
2128 camera_metadata_entry e;
2129
2130 e = settings.find(ANDROID_CONTROL_AE_MODE);
2131 if (e.count == 0) {
2132 ALOGE("%s: No AE mode entry!", __FUNCTION__);
2133 return BAD_VALUE;
2134 }
2135 uint8_t aeMode = e.data.u8[0];
2136
2137 switch (aeMode) {
2138 case ANDROID_CONTROL_AE_MODE_OFF:
2139 // AE is OFF
2140 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2141 return OK;
2142 case ANDROID_CONTROL_AE_MODE_ON:
2143 // OK for AUTO modes
2144 break;
2145 default:
2146 ALOGE("%s: Emulator doesn't support AE mode %d",
2147 __FUNCTION__, aeMode);
2148 return BAD_VALUE;
2149 }
2150
2151 e = settings.find(ANDROID_CONTROL_AE_LOCK);
2152 if (e.count == 0) {
2153 ALOGE("%s: No AE lock entry!", __FUNCTION__);
2154 return BAD_VALUE;
2155 }
2156 bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON);
2157
2158 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
2159 bool precaptureTrigger = false;
2160 if (e.count != 0) {
2161 precaptureTrigger =
2162 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
2163 }
2164
2165 if (precaptureTrigger) {
2166 ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
2167 } else if (e.count > 0) {
2168 ALOGV("%s: Pre capture trigger was present? %zu",
2169 __FUNCTION__,
2170 e.count);
2171 }
2172
2173 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2174 // Run precapture sequence
2175 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2176 mAeCounter = 0;
2177 }
2178
2179 if (mFacePriority) {
2180 mAeTargetExposureTime = kFacePriorityExposureTime;
2181 } else {
2182 mAeTargetExposureTime = kNormalExposureTime;
2183 }
2184
2185 if (mAeCounter > kPrecaptureMinFrames &&
2186 (mAeTargetExposureTime - mAeCurrentExposureTime) <
2187 mAeTargetExposureTime / 10) {
2188 // Done with precapture
2189 mAeCounter = 0;
2190 mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
2191 ANDROID_CONTROL_AE_STATE_CONVERGED;
2192 } else {
2193 // Converge some more
2194 mAeCurrentExposureTime +=
2195 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2196 kExposureTrackRate;
2197 mAeCounter++;
2198 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
2199 }
2200
2201 } else if (!aeLocked) {
2202 // Run standard occasional AE scan
2203 switch (mAeState) {
2204 case ANDROID_CONTROL_AE_STATE_CONVERGED:
2205 case ANDROID_CONTROL_AE_STATE_INACTIVE:
2206 mAeCounter++;
2207 if (mAeCounter > kStableAeMaxFrames) {
2208 mAeTargetExposureTime =
2209 mFacePriority ? kFacePriorityExposureTime :
2210 kNormalExposureTime;
2211 float exposureStep = ((double)rand() / RAND_MAX) *
2212 (kExposureWanderMax - kExposureWanderMin) +
2213 kExposureWanderMin;
2214 mAeTargetExposureTime *= std::pow(2, exposureStep);
2215 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2216 }
2217 break;
2218 case ANDROID_CONTROL_AE_STATE_SEARCHING:
2219 mAeCurrentExposureTime +=
2220 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2221 kExposureTrackRate;
2222 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
2223 mAeTargetExposureTime / 10) {
2224 // Close enough
2225 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2226 mAeCounter = 0;
2227 }
2228 break;
2229 case ANDROID_CONTROL_AE_STATE_LOCKED:
2230 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2231 mAeCounter = 0;
2232 break;
2233 default:
2234 ALOGE("%s: Emulator in unexpected AE state %d",
2235 __FUNCTION__, mAeState);
2236 return INVALID_OPERATION;
2237 }
2238 } else {
2239 // AE is locked
2240 mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
2241 }
2242
2243 return OK;
2244}
2245
2246status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
2247 camera_metadata_entry e;
2248
2249 e = settings.find(ANDROID_CONTROL_AF_MODE);
2250 if (e.count == 0) {
2251 ALOGE("%s: No AF mode entry!", __FUNCTION__);
2252 return BAD_VALUE;
2253 }
2254 uint8_t afMode = e.data.u8[0];
2255
2256 e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
2257 typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
2258 af_trigger_t afTrigger;
2259 // If we have an afTrigger, afTriggerId should be set too
2260 if (e.count != 0) {
2261 afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
2262
2263 e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID);
2264
2265 if (e.count == 0) {
2266 ALOGE("%s: When android.control.afTrigger is set "
2267 " in the request, afTriggerId needs to be set as well",
2268 __FUNCTION__);
2269 return BAD_VALUE;
2270 }
2271
2272 mAfTriggerId = e.data.i32[0];
2273
2274 ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
2275 ALOGV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId);
2276 ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
2277 } else {
2278 afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
2279 }
2280 if (!mFacingBack) {
2281 afMode = ANDROID_CONTROL_AF_MODE_OFF;
2282 }
2283
2284 switch (afMode) {
2285 case ANDROID_CONTROL_AF_MODE_OFF:
2286 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2287 return OK;
2288 case ANDROID_CONTROL_AF_MODE_AUTO:
2289 case ANDROID_CONTROL_AF_MODE_MACRO:
2290 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2291 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2292 if (!mFacingBack) {
2293 ALOGE("%s: Front camera doesn't support AF mode %d",
2294 __FUNCTION__, afMode);
2295 return BAD_VALUE;
2296 }
2297 mSensor->setAutoFocuas(afMode);
2298 // OK, handle transitions lower on
2299 break;
2300 default:
2301 ALOGE("%s: Emulator doesn't support AF mode %d",
2302 __FUNCTION__, afMode);
2303 return BAD_VALUE;
2304 }
2305#if 0
2306 e = settings.find(ANDROID_CONTROL_AF_REGIONS);
2307 if (e.count == 0) {
2308 ALOGE("%s:Get ANDROID_CONTROL_AF_REGIONS failed\n", __FUNCTION__);
2309 return BAD_VALUE;
2310 }
2311 int32_t x0 = e.data.i32[0];
2312 int32_t y0 = e.data.i32[1];
2313 int32_t x1 = e.data.i32[2];
2314 int32_t y1 = e.data.i32[3];
2315 mSensor->setFocuasArea(x0, y0, x1, y1);
2316 DBG_LOGB(" x0:%d, y0:%d,x1:%d,y1:%d,\n", x0, y0, x1, y1);
2317#endif
2318
2319
2320 bool afModeChanged = mAfMode != afMode;
2321 mAfMode = afMode;
2322
2323 /**
2324 * Simulate AF triggers. Transition at most 1 state per frame.
2325 * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
2326 */
2327
2328 bool afTriggerStart = false;
2329 bool afTriggerCancel = false;
2330 switch (afTrigger) {
2331 case ANDROID_CONTROL_AF_TRIGGER_IDLE:
2332 break;
2333 case ANDROID_CONTROL_AF_TRIGGER_START:
2334 afTriggerStart = true;
2335 break;
2336 case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
2337 afTriggerCancel = true;
2338 // Cancel trigger always transitions into INACTIVE
2339 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2340
2341 ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
2342
2343 // Stay in 'inactive' until at least next frame
2344 return OK;
2345 default:
2346 ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
2347 return BAD_VALUE;
2348 }
2349
2350 // If we get down here, we're either in an autofocus mode
2351 // or in a continuous focus mode (and no other modes)
2352
2353 int oldAfState = mAfState;
2354 switch (mAfState) {
2355 case ANDROID_CONTROL_AF_STATE_INACTIVE:
2356 if (afTriggerStart) {
2357 switch (afMode) {
2358 case ANDROID_CONTROL_AF_MODE_AUTO:
2359 // fall-through
2360 case ANDROID_CONTROL_AF_MODE_MACRO:
2361 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2362 break;
2363 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2364 // fall-through
2365 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2366 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2367 break;
2368 }
2369 } else {
2370 // At least one frame stays in INACTIVE
2371 if (!afModeChanged) {
2372 switch (afMode) {
2373 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2374 // fall-through
2375 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2376 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
2377 break;
2378 }
2379 }
2380 }
2381 break;
2382 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2383 /**
2384 * When the AF trigger is activated, the algorithm should finish
2385 * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
2386 * or AF_NOT_FOCUSED as appropriate
2387 */
2388 if (afTriggerStart) {
2389 // Randomly transition to focused or not focused
2390 if (rand() % 3) {
2391 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2392 } else {
2393 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2394 }
2395 }
2396 /**
2397 * When the AF trigger is not involved, the AF algorithm should
2398 * start in INACTIVE state, and then transition into PASSIVE_SCAN
2399 * and PASSIVE_FOCUSED states
2400 */
2401 else if (!afTriggerCancel) {
2402 // Randomly transition to passive focus
2403 if (rand() % 3 == 0) {
2404 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
2405 }
2406 }
2407
2408 break;
2409 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2410 if (afTriggerStart) {
2411 // Randomly transition to focused or not focused
2412 if (rand() % 3) {
2413 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2414 } else {
2415 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2416 }
2417 }
2418 // TODO: initiate passive scan (PASSIVE_SCAN)
2419 break;
2420 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2421 // Simulate AF sweep completing instantaneously
2422
2423 // Randomly transition to focused or not focused
2424 if (rand() % 3) {
2425 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2426 } else {
2427 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2428 }
2429 break;
2430 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2431 if (afTriggerStart) {
2432 switch (afMode) {
2433 case ANDROID_CONTROL_AF_MODE_AUTO:
2434 // fall-through
2435 case ANDROID_CONTROL_AF_MODE_MACRO:
2436 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2437 break;
2438 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2439 // fall-through
2440 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2441 // continuous autofocus => trigger start has no effect
2442 break;
2443 }
2444 }
2445 break;
2446 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2447 if (afTriggerStart) {
2448 switch (afMode) {
2449 case ANDROID_CONTROL_AF_MODE_AUTO:
2450 // fall-through
2451 case ANDROID_CONTROL_AF_MODE_MACRO:
2452 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2453 break;
2454 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2455 // fall-through
2456 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2457 // continuous autofocus => trigger start has no effect
2458 break;
2459 }
2460 }
2461 break;
2462 default:
2463 ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
2464 }
2465
2466 {
2467 char afStateString[100] = {0,};
2468 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2469 oldAfState,
2470 afStateString,
2471 sizeof(afStateString));
2472
2473 char afNewStateString[100] = {0,};
2474 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2475 mAfState,
2476 afNewStateString,
2477 sizeof(afNewStateString));
2478 ALOGVV("%s: AF state transitioned from %s to %s",
2479 __FUNCTION__, afStateString, afNewStateString);
2480 }
2481
2482
2483 return OK;
2484}
2485
2486status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
2487 camera_metadata_entry e;
2488
2489 e = settings.find(ANDROID_CONTROL_AWB_MODE);
2490 if (e.count == 0) {
2491 ALOGE("%s: No AWB mode entry!", __FUNCTION__);
2492 return BAD_VALUE;
2493 }
2494 uint8_t awbMode = e.data.u8[0];
2495 //DBG_LOGB(" awbMode%d\n", awbMode);
2496
2497 // TODO: Add white balance simulation
2498
2499 switch (awbMode) {
2500 case ANDROID_CONTROL_AWB_MODE_OFF:
2501 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2502 return OK;
2503 case ANDROID_CONTROL_AWB_MODE_AUTO:
2504 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
2505 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
2506 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
2507 case ANDROID_CONTROL_AWB_MODE_SHADE:
2508 mAwbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; //add for cts
2509 return mSensor->setAWB(awbMode);
2510 // OK
2511 break;
2512 default:
2513 ALOGE("%s: Emulator doesn't support AWB mode %d",
2514 __FUNCTION__, awbMode);
2515 return BAD_VALUE;
2516 }
2517
2518 return OK;
2519}
2520
2521
2522void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
2523 if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) {
2524 settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
2525 &mAeCurrentExposureTime, 1);
2526 settings.update(ANDROID_SENSOR_SENSITIVITY,
2527 &mAeCurrentSensitivity, 1);
2528 }
2529
2530 settings.update(ANDROID_CONTROL_AE_STATE,
2531 &mAeState, 1);
2532 settings.update(ANDROID_CONTROL_AF_STATE,
2533 &mAfState, 1);
2534 settings.update(ANDROID_CONTROL_AWB_STATE,
2535 &mAwbState, 1);
2536 /**
2537 * TODO: Trigger IDs need a think-through
2538 */
2539 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,
2540 &mAfTriggerId, 1);
2541}
2542
2543void EmulatedFakeCamera3::signalReadoutIdle() {
2544 Mutex::Autolock l(mLock);
2545 // Need to chek isIdle again because waiting on mLock may have allowed
2546 // something to be placed in the in-flight queue.
2547 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
2548 ALOGV("Now idle");
2549 mStatus = STATUS_READY;
2550 }
2551}
2552
2553void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
2554 nsecs_t timestamp) {
2555 switch(e) {
2556 case Sensor::SensorListener::EXPOSURE_START: {
2557 ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
2558 __FUNCTION__, frameNumber, timestamp);
2559 // Trigger shutter notify to framework
2560 camera3_notify_msg_t msg;
2561 msg.type = CAMERA3_MSG_SHUTTER;
2562 msg.message.shutter.frame_number = frameNumber;
2563 msg.message.shutter.timestamp = timestamp;
2564 sendNotify(&msg);
2565 break;
2566 }
2567 default:
2568 ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
2569 e, timestamp);
2570 break;
2571 }
2572}
2573
2574EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) :
2575 mParent(parent), mJpegWaiting(false) {
2576}
2577
2578EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
2579 for (List<Request>::iterator i = mInFlightQueue.begin();
2580 i != mInFlightQueue.end(); i++) {
2581 delete i->buffers;
2582 delete i->sensorBuffers;
2583 }
2584}
2585
2586void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
2587 Mutex::Autolock l(mLock);
2588
2589 mInFlightQueue.push_back(r);
2590 mInFlightSignal.signal();
2591}
2592
2593bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
2594 Mutex::Autolock l(mLock);
2595 return mInFlightQueue.empty() && !mThreadActive;
2596}
2597
2598status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
2599 status_t res;
2600 Mutex::Autolock l(mLock);
2601 int loopCount = 0;
2602 while (mInFlightQueue.size() >= kMaxQueueSize) {
2603 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2604 if (res != OK && res != TIMED_OUT) {
2605 ALOGE("%s: Error waiting for in-flight queue to shrink",
2606 __FUNCTION__);
2607 return INVALID_OPERATION;
2608 }
2609 if (loopCount == kMaxWaitLoops) {
2610 ALOGE("%s: Timed out waiting for in-flight queue to shrink",
2611 __FUNCTION__);
2612 return TIMED_OUT;
2613 }
2614 loopCount++;
2615 }
2616 return OK;
2617}
2618
2619bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
2620 status_t res;
2621
2622 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
2623
2624 // First wait for a request from the in-flight queue
2625
2626 if (mCurrentRequest.settings.isEmpty()) {
2627 Mutex::Autolock l(mLock);
2628 if (mInFlightQueue.empty()) {
2629 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2630 if (res == TIMED_OUT) {
2631 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
2632 __FUNCTION__);
2633 return true;
2634 } else if (res != NO_ERROR) {
2635 ALOGE("%s: Error waiting for capture requests: %d",
2636 __FUNCTION__, res);
2637 return false;
2638 }
2639 }
2640 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
2641 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
2642 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
2643 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
2644 mInFlightQueue.erase(mInFlightQueue.begin());
2645 mInFlightSignal.signal();
2646 mThreadActive = true;
2647 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
2648 mCurrentRequest.frameNumber);
2649 }
2650
2651 // Then wait for it to be delivered from the sensor
2652 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
2653 __FUNCTION__);
2654
2655 nsecs_t captureTime;
2656 bool gotFrame =
2657 mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
2658 if (!gotFrame) {
2659 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
2660 __FUNCTION__);
2661 return true;
2662 }
2663
2664 ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
2665 mCurrentRequest.frameNumber, captureTime);
2666
2667 // Check if we need to JPEG encode a buffer, and send it for async
2668 // compression if so. Otherwise prepare the buffer for return.
2669 bool needJpeg = false;
2670 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
2671 while(buf != mCurrentRequest.buffers->end()) {
2672 bool goodBuffer = true;
2673 if ( buf->stream->format ==
2674 HAL_PIXEL_FORMAT_BLOB) {
2675 Mutex::Autolock jl(mJpegLock);
2676 if (mJpegWaiting) {
2677
2678 // This shouldn't happen, because processCaptureRequest should
2679 // be stalling until JPEG compressor is free.
2680 //
2681 ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
2682 goodBuffer = false;
2683 }
2684 if (goodBuffer) {
2685 // Compressor takes ownership of sensorBuffers here
2686 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
2687 this);
2688 goodBuffer = (res == OK);
2689 }
2690 if (goodBuffer) {
2691 needJpeg = true;
2692
2693 mJpegHalBuffer = *buf;
2694 mJpegFrameNumber = mCurrentRequest.frameNumber;
2695 mJpegWaiting = true;
2696
2697 mCurrentRequest.sensorBuffers = NULL;
2698 buf = mCurrentRequest.buffers->erase(buf);
2699
2700 continue;
2701 }
2702 ALOGE("%s: Error compressing output buffer: %s (%d)",
2703 __FUNCTION__, strerror(-res), res);
2704 // fallthrough for cleanup
2705 }
2706 GraphicBufferMapper::get().unlock(*(buf->buffer));
2707
2708 buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
2709 CAMERA3_BUFFER_STATUS_ERROR;
2710 buf->acquire_fence = -1;
2711 buf->release_fence = -1;
2712
2713 ++buf;
2714 } // end while
2715
2716 // Construct result for all completed buffers and results
2717
2718 camera3_capture_result result;
2719
2720 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
2721 &captureTime, 1);
2722
2723 memset(&result, 0, sizeof(result));
2724 result.frame_number = mCurrentRequest.frameNumber;
2725 result.result = mCurrentRequest.settings.getAndLock();
2726 result.num_output_buffers = mCurrentRequest.buffers->size();
2727 result.output_buffers = mCurrentRequest.buffers->array();
2728 result.partial_result = 1;
2729
2730 // Go idle if queue is empty, before sending result
2731 bool signalIdle = false;
2732 {
2733 Mutex::Autolock l(mLock);
2734 if (mInFlightQueue.empty()) {
2735 mThreadActive = false;
2736 signalIdle = true;
2737 }
2738 }
2739 if (signalIdle) mParent->signalReadoutIdle();
2740
2741 // Send it off to the framework
2742 ALOGVV("%s: ReadoutThread: Send result to framework",
2743 __FUNCTION__);
2744 mParent->sendCaptureResult(&result);
2745
2746 // Clean up
2747 mCurrentRequest.settings.unlock(result.result);
2748
2749 delete mCurrentRequest.buffers;
2750 mCurrentRequest.buffers = NULL;
2751 if (!needJpeg) {
2752 delete mCurrentRequest.sensorBuffers;
2753 mCurrentRequest.sensorBuffers = NULL;
2754 }
2755 mCurrentRequest.settings.clear();
2756
2757 return true;
2758}
2759
2760void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
2761 const StreamBuffer &jpegBuffer, bool success) {
2762 Mutex::Autolock jl(mJpegLock);
2763
2764 GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer));
2765
2766 mJpegHalBuffer.status = success ?
2767 CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2768 mJpegHalBuffer.acquire_fence = -1;
2769 mJpegHalBuffer.release_fence = -1;
2770 mJpegWaiting = false;
2771
2772 camera3_capture_result result;
2773 result.frame_number = mJpegFrameNumber;
2774 result.result = NULL;
2775 result.num_output_buffers = 1;
2776 result.output_buffers = &mJpegHalBuffer;
2777 result.partial_result = 1;
2778
2779 if (!success) {
2780 ALOGE("%s: Compression failure, returning error state buffer to"
2781 " framework", __FUNCTION__);
2782 } else {
2783 DBG_LOGB("%s: Compression complete, returning buffer to framework",
2784 __FUNCTION__);
2785 }
2786
2787 mParent->sendCaptureResult(&result);
2788}
2789
2790void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
2791 const StreamBuffer &inputBuffer) {
2792 // Should never get here, since the input buffer has to be returned
2793 // by end of processCaptureRequest
2794 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2795}
2796
2797
2798}; // namespace android
2799