summaryrefslogtreecommitdiff
path: root/v3/EmulatedFakeCamera3.cpp (plain)
blob: 2967de38a205c50614a07645ebd9a65dc3dba814
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/*
18 * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22#include <inttypes.h>
23
24//#define LOG_NDEBUG 0
25//#define LOG_NNDEBUG 0
26#define LOG_TAG "EmulatedCamera_FakeCamera3"
27#include <utils/Log.h>
28
29#include "EmulatedFakeCamera3.h"
30#include "EmulatedCameraFactory.h"
31#include <ui/Fence.h>
32#include <ui/Rect.h>
33#include <ui/GraphicBufferMapper.h>
34#include <sys/types.h>
35
36#include <cutils/properties.h>
37#include "fake-pipeline2/Sensor.h"
38#include "fake-pipeline2/JpegCompressor.h"
39#include <cmath>
40#include <gralloc_priv.h>
41#include <binder/IPCThreadState.h>
42
43#if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
44#define ALOGVV ALOGV
45#else
46#define ALOGVV(...) ((void)0)
47#endif
48
49namespace android {
50
51/**
52 * Constants for camera capabilities
53 */
54
55const int64_t USEC = 1000LL;
56const int64_t MSEC = USEC * 1000LL;
57const int64_t SEC = MSEC * 1000LL;
58
59
60const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
61 //HAL_PIXEL_FORMAT_RAW_SENSOR,
62 HAL_PIXEL_FORMAT_BLOB,
63 //HAL_PIXEL_FORMAT_RGBA_8888,
64 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
65 // These are handled by YCbCr_420_888
66 HAL_PIXEL_FORMAT_YV12,
67 HAL_PIXEL_FORMAT_YCrCb_420_SP,
68 //HAL_PIXEL_FORMAT_YCbCr_422_I,
69 HAL_PIXEL_FORMAT_YCbCr_420_888
70};
71
72const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = {
73 640, 480
74 // Sensor::kResolution[0], Sensor::kResolution[1]
75};
76
77const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = {
78 (const uint64_t)Sensor::kFrameDurationRange[0]
79};
80
81const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[6] = {
82 640, 480, 320, 240,// 1280, 720
83 // Sensor::kResolution[0], Sensor::kResolution[1]
84};
85
86const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = {
87 640, 480, 320, 240
88 // Sensor::kResolution[0], Sensor::kResolution[1]
89};
90
91const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = {
92 (const uint64_t)Sensor::kFrameDurationRange[0]
93};
94
95const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = {
96 1280,720
97 // Sensor::kResolution[0], Sensor::kResolution[1]
98};
99
100const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = {
101 640, 480
102 // Sensor::kResolution[0], Sensor::kResolution[1]
103};
104
105
106const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = {
107 (const uint64_t)Sensor::kFrameDurationRange[0]
108};
109
110/**
111 * 3A constants
112 */
113
114// Default exposure and gain targets for different scenarios
115const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC;
116const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
117const int EmulatedFakeCamera3::kNormalSensitivity = 100;
118const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400;
119const float EmulatedFakeCamera3::kExposureTrackRate = 0.1;
120const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10;
121const int EmulatedFakeCamera3::kStableAeMaxFrames = 100;
122const float EmulatedFakeCamera3::kExposureWanderMin = -2;
123const float EmulatedFakeCamera3::kExposureWanderMax = 1;
124
125/**
126 * Camera device lifecycle methods
127 */
128static const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(camera3_jpeg_blob);
129jpegsize EmulatedFakeCamera3::getMaxJpegResolution(uint32_t picSizes[],int count) {
130 uint32_t maxJpegWidth = 0, maxJpegHeight = 0;
131 jpegsize maxJpegResolution;
132 for (int i=0; i < count; i+= 4) {
133 uint32_t width = picSizes[i+1];
134 uint32_t height = picSizes[i+2];
135 if (picSizes[i+0] == HAL_PIXEL_FORMAT_BLOB &&
136 (width * height > maxJpegWidth * maxJpegHeight)) {
137 maxJpegWidth = width;
138 maxJpegHeight = height;
139 }
140 }
141 maxJpegResolution.width = maxJpegWidth;
142 maxJpegResolution.height = maxJpegHeight;
143 return maxJpegResolution;
144}
145ssize_t EmulatedFakeCamera3::getJpegBufferSize(int width, int height) {
146 if (maxJpegResolution.width == 0) {
147 return BAD_VALUE;
148 }
149 ssize_t maxJpegBufferSize = JpegCompressor::kMaxJpegSize;
150
151 // Calculate final jpeg buffer size for the given resolution.
152 float scaleFactor = ((float) (width * height)) /
153 (maxJpegResolution.width * maxJpegResolution.height);
154 ssize_t jpegBufferSize = scaleFactor * maxJpegBufferSize;
155 // Bound the buffer size to [MIN_JPEG_BUFFER_SIZE, maxJpegBufferSize].
156 if (jpegBufferSize > maxJpegBufferSize) {
157 jpegBufferSize = maxJpegBufferSize;
158 } else if (jpegBufferSize < kMinJpegBufferSize) {
159 jpegBufferSize = kMinJpegBufferSize;
160 }
161 return jpegBufferSize;
162}
163
164EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, struct hw_module_t* module) :
165 EmulatedCamera3(cameraId, module) {
166 ALOGI("Constructing emulated fake camera 3 cameraID:%d", mCameraID);
167
168 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
169 mDefaultTemplates[i] = NULL;
170 }
171
172 /**
173 * Front cameras = limited mode
174 * Back cameras = full mode
175 */
176 //TODO limited or full mode, read this from camera driver
177 //mFullMode = facingBack;
178 mCameraStatus = CAMERA_INIT;
179 mSupportCap = 0;
180 mSupportRotate = 0;
181 mFullMode = 0;
182
183 gLoadXml.parseXMLFile();
184}
185
186EmulatedFakeCamera3::~EmulatedFakeCamera3() {
187 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
188 if (mDefaultTemplates[i] != NULL) {
189 free_camera_metadata(mDefaultTemplates[i]);
190 }
191 }
192
193 if (mCameraInfo != NULL) {
194 CAMHAL_LOGIA("free mCameraInfo");
195 free_camera_metadata(mCameraInfo);
196 mCameraInfo = NULL;
197 }
198}
199
200status_t EmulatedFakeCamera3::Initialize() {
201 DBG_LOGB("mCameraID=%d,mStatus=%d,ddd\n", mCameraID, mStatus);
202 status_t res;
203
204#ifdef HAVE_VERSION_INFO
205 CAMHAL_LOGIB("\n--------------------------------\n"
206 "author:aml.sh multi-media team\n"
207 "branch name: %s\n"
208 "git version: %s \n"
209 "last changed: %s\n"
210 "build-time: %s\n"
211 "build-name: %s\n"
212 "uncommitted-file-num:%d\n"
213 "ssh user@%s, cd %s\n"
214 "hostname %s\n"
215 "--------------------------------\n",
216 CAMHAL_BRANCH_NAME,
217 CAMHAL_GIT_VERSION,
218 CAMHAL_LAST_CHANGED,
219 CAMHAL_BUILD_TIME,
220 CAMHAL_BUILD_NAME,
221 CAMHAL_GIT_UNCOMMIT_FILE_NUM,
222 CAMHAL_IP, CAMHAL_PATH, CAMHAL_HOSTNAME
223 );
224#endif
225
226
227 if (mStatus != STATUS_ERROR) {
228 ALOGE("%s: Already initialized!", __FUNCTION__);
229 return INVALID_OPERATION;
230 }
231
232 res = constructStaticInfo();
233 if (res != OK) {
234 ALOGE("%s: Unable to allocate static info: %s (%d)",
235 __FUNCTION__, strerror(-res), res);
236 return res;
237 }
238
239 return EmulatedCamera3::Initialize();
240}
241
242status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) {
243 ALOGV("%s: E", __FUNCTION__);
244 DBG_LOGA("ddd");
245 Mutex::Autolock l(mLock);
246 status_t res;
247
248 if ((mStatus != STATUS_CLOSED) || !mPlugged) {
249 ALOGE("%s: Can't connect in state %d, mPlugged=%d",
250 __FUNCTION__, mStatus, mPlugged);
251 return INVALID_OPERATION;
252 }
253
254 mSensor = new Sensor();
255 mSensor->setSensorListener(this);
256
257 res = mSensor->startUp(mCameraID);
258 DBG_LOGB("mSensor startUp, mCameraID=%d\n", mCameraID);
259 if (res != NO_ERROR) return res;
260
261 mSupportCap = mSensor->IoctlStateProbe();
262 if (mSupportCap & IOCTL_MASK_ROTATE) {
263 mSupportRotate = true;
264 }
265
266 mReadoutThread = new ReadoutThread(this);
267 mJpegCompressor = new JpegCompressor();
268
269 res = mReadoutThread->setJpegCompressorListener(this);
270 if (res != NO_ERROR) {
271 return res;
272 }
273 res = mReadoutThread->startJpegCompressor(this);
274 if (res != NO_ERROR) {
275 return res;
276 }
277
278 res = mReadoutThread->run("EmuCam3::readoutThread");
279 if (res != NO_ERROR) return res;
280
281 // Initialize fake 3A
282
283 mControlMode = ANDROID_CONTROL_MODE_AUTO;
284 mFacePriority = false;
285 mAeMode = ANDROID_CONTROL_AE_MODE_ON;
286 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
287 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
288 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;//ANDROID_CONTROL_AE_STATE_INACTIVE;
289 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
290 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
291 mAfTriggerId = 0;
292 mAeCurrentExposureTime = kNormalExposureTime;
293 mAeCurrentSensitivity = kNormalSensitivity;
294
295 return EmulatedCamera3::connectCamera(device);
296}
297
298status_t EmulatedFakeCamera3::plugCamera() {
299 {
300 Mutex::Autolock l(mLock);
301
302 if (!mPlugged) {
303 CAMHAL_LOGIB("%s: Plugged back in", __FUNCTION__);
304 mPlugged = true;
305 }
306 }
307
308 return NO_ERROR;
309}
310
311status_t EmulatedFakeCamera3::unplugCamera() {
312 {
313 Mutex::Autolock l(mLock);
314
315 if (mPlugged) {
316 CAMHAL_LOGIB("%s: Unplugged camera", __FUNCTION__);
317 mPlugged = false;
318 }
319 }
320 return true;
321}
322
323camera_device_status_t EmulatedFakeCamera3::getHotplugStatus() {
324 Mutex::Autolock l(mLock);
325 return mPlugged ?
326 CAMERA_DEVICE_STATUS_PRESENT :
327 CAMERA_DEVICE_STATUS_NOT_PRESENT;
328}
329
330bool EmulatedFakeCamera3::getCameraStatus()
331{
332 CAMHAL_LOGVB("%s, mCameraStatus = %d",__FUNCTION__,mCameraStatus);
333 bool ret = false;
334 if (mStatus == STATUS_CLOSED) {
335 ret = true;
336 } else {
337 ret = false;
338 }
339 return ret;
340}
341
342status_t EmulatedFakeCamera3::closeCamera() {
343 DBG_LOGB("%s, %d\n", __FUNCTION__, __LINE__);
344
345 status_t res;
346 {
347 Mutex::Autolock l(mLock);
348 if (mStatus == STATUS_CLOSED) return OK;
349 }
350 mSensor->sendExitSingalToSensor();
351 res = mSensor->shutDown();
352 if (res != NO_ERROR) {
353 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
354 return res;
355 }
356 mSensor.clear();
357
358 {
359 Mutex::Autolock l(mLock);
360 res = mReadoutThread->shutdownJpegCompressor(this);
361 if (res != OK) {
362 ALOGE("%s: Unable to shut down JpegCompressor: %d", __FUNCTION__, res);
363 return res;
364 }
365 mReadoutThread->sendExitReadoutThreadSignal();
366 mReadoutThread->requestExit();
367 }
368 mReadoutThread->join();
369 DBG_LOGA("Sucess exit ReadOutThread");
370 {
371 Mutex::Autolock l(mLock);
372 // Clear out private stream information
373 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
374 PrivateStreamInfo *privStream =
375 static_cast<PrivateStreamInfo*>((*s)->priv);
376 delete privStream;
377 (*s)->priv = NULL;
378 }
379 mStreams.clear();
380 mReadoutThread.clear();
381 }
382
383 return EmulatedCamera3::closeCamera();
384}
385
386status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
387 char property[PROPERTY_VALUE_MAX];
388 char* tempApkName = gLoadXml.getApkPackageName(IPCThreadState::self()->getCallingPid());
389 List_Or * temp=new List_Or();
390 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
391 if (mSensorType == SENSOR_USB) {
392 if (mFacingBack) {
393 property_get("hw.camera.orientation.back", property, "0");
394 } else {
395 property_get("hw.camera.orientation.front", property, "0");
396 }
397 int32_t orientation = atoi(property);
398
399 if (gLoadXml.findApkCp(tempApkName, temp)) {
400 orientation = atoi(temp->pro);
401 }
402 if (temp != NULL) {
403 delete temp;
404 temp = NULL;
405 }
406
407 property_get("hw.camera.usb.orientation_offset", property, "0");
408 orientation += atoi(property);
409 orientation %= 360;
410 info->orientation = orientation ;
411 } else {
412 if (mFacingBack) {
413 property_get("hw.camera.orientation.back", property, "270");
414 } else {
415 property_get("hw.camera.orientation.front", property, "90");
416 }
417 info->orientation = atoi(property);
418 }
419 return EmulatedCamera3::getCameraInfo(info);
420}
421
422/**
423 * Camera3 interface methods
424 */
425
426void EmulatedFakeCamera3::getValidJpegSize(uint32_t picSizes[], uint32_t availablejpegsize[], int count) {
427 int i,j,k;
428 bool valid = true;
429 for (i=0,j=0; i < count; i+= 4) {
430 for (k= 0; k<=j ;k+=2) {
431 if ((availablejpegsize[k]*availablejpegsize[k+1]) == (picSizes[i+1]*picSizes[i+2])) {
432
433 valid = false;
434 }
435 }
436 if (valid) {
437 availablejpegsize[j] = picSizes[i+1];
438 availablejpegsize[j+1] = picSizes[i+2];
439 j+=2;
440 }
441 valid = true;
442 }
443}
444
445status_t EmulatedFakeCamera3::checkValidJpegSize(uint32_t width, uint32_t height) {
446
447 int validsizecount = 0;
448 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
449 for (uint32_t f = 0; f < count; f+=2) {
450 if (mAvailableJpegSize[f] != 0) {
451 if ((mAvailableJpegSize[f] == width)&&(mAvailableJpegSize[f+1] == height)) {
452 validsizecount++;
453 }
454 } else {
455 break;
456 }
457 }
458 if (validsizecount == 0)
459 return BAD_VALUE;
460 return OK;
461}
462
463status_t EmulatedFakeCamera3::configureStreams(
464 camera3_stream_configuration *streamList) {
465 Mutex::Autolock l(mLock);
466 uint32_t width, height, pixelfmt;
467 bool isRestart = false;
468 DBG_LOGB("%s: %d streams", __FUNCTION__, streamList->num_streams);
469
470 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
471 ALOGE("%s: Cannot configure streams in state %d",
472 __FUNCTION__, mStatus);
473 return NO_INIT;
474 }
475
476 /**
477 * Sanity-check input list.
478 */
479 if (streamList == NULL) {
480 ALOGE("%s: NULL stream configuration", __FUNCTION__);
481 return BAD_VALUE;
482 }
483
484 if (streamList->streams == NULL) {
485 ALOGE("%s: NULL stream list", __FUNCTION__);
486 return BAD_VALUE;
487 }
488
489 if (streamList->num_streams < 1) {
490 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
491 streamList->num_streams);
492 return BAD_VALUE;
493 }
494
495 camera3_stream_t *inputStream = NULL;
496 for (size_t i = 0; i < streamList->num_streams; i++) {
497 camera3_stream_t *newStream = streamList->streams[i];
498
499 if (newStream == NULL) {
500 ALOGE("%s: Stream index %zu was NULL",
501 __FUNCTION__, i);
502 return BAD_VALUE;
503 }
504
505 if (newStream->max_buffers <= 0) {
506 isRestart = true;//mSensor->isNeedRestart(newStream->width, newStream->height, newStream->format);
507 DBG_LOGB("format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d, isRestart=%d\n",
508 newStream->format, newStream->width, newStream->height,
509 newStream->stream_type, newStream->max_buffers,
510 isRestart);
511 }
512 ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
513 __FUNCTION__, newStream, i, newStream->stream_type,
514 newStream->usage,
515 newStream->format);
516
517 if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
518 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
519 if (inputStream != NULL) {
520
521 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
522 return BAD_VALUE;
523 }
524 inputStream = newStream;
525 }
526
527 bool validFormat = false;
528 for (size_t f = 0;
529 f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]);
530 f++) {
531 if (newStream->format == kAvailableFormats[f]) {
532 validFormat = true;
533 //HAL_PIXEL_FORMAT_YCrCb_420_SP,
534 if (HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format)
535 newStream->format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
536
537 break;
538 }
539 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
540 }
541 if (!validFormat) {
542 ALOGE("%s: Unsupported stream format 0x%x requested",
543 __FUNCTION__, newStream->format);
544 return BAD_VALUE;
545 }
546
547 status_t ret = checkValidJpegSize(newStream->width, newStream->height);
548 if (ret != OK) {
549 return BAD_VALUE;
550 }
551
552 }
553 mInputStream = inputStream;
554 width = 0;
555 height = 0;
556 for (size_t i = 0; i < streamList->num_streams; i++) {
557 camera3_stream_t *newStream = streamList->streams[i];
558 DBG_LOGB("find propert width and height, format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d\n",
559 newStream->format, newStream->width, newStream->height, newStream->stream_type, newStream->max_buffers);
560 if ((HAL_PIXEL_FORMAT_BLOB != newStream->format) &&
561 (CAMERA3_STREAM_OUTPUT == newStream->stream_type)) {
562
563 if (width < newStream->width)
564 width = newStream->width;
565
566 if (height < newStream->height)
567 height = newStream->height;
568
569 pixelfmt = (uint32_t)newStream->format;
570 if (HAL_PIXEL_FORMAT_YCbCr_420_888 == pixelfmt)
571 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
572 }
573
574 }
575
576 //TODO modify this ugly code
577 if (isRestart) {
578 isRestart = mSensor->isNeedRestart(width, height, pixelfmt);
579 }
580
581 if (isRestart) {
582 mSensor->streamOff();
583 pixelfmt = mSensor->halFormatToSensorFormat(pixelfmt);
584 mSensor->setOutputFormat(width, height, pixelfmt, 0);
585 mSensor->streamOn();
586 DBG_LOGB("width=%d, height=%d, pixelfmt=%.4s\n",
587 width, height, (char*)&pixelfmt);
588 }
589
590 /**
591 * Initially mark all existing streams as not alive
592 */
593 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
594 PrivateStreamInfo *privStream =
595 static_cast<PrivateStreamInfo*>((*s)->priv);
596 privStream->alive = false;
597 }
598
599 /**
600 * Find new streams and mark still-alive ones
601 */
602 for (size_t i = 0; i < streamList->num_streams; i++) {
603 camera3_stream_t *newStream = streamList->streams[i];
604 if (newStream->priv == NULL) {
605 // New stream, construct info
606 PrivateStreamInfo *privStream = new PrivateStreamInfo();
607 privStream->alive = true;
608 privStream->registered = false;
609
610 newStream->usage =
611 mSensor->getStreamUsage(newStream->stream_type);
612
613 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
614 newStream->max_buffers = kMaxBufferCount;
615 newStream->priv = privStream;
616 mStreams.push_back(newStream);
617 } else {
618 // Existing stream, mark as still alive.
619 PrivateStreamInfo *privStream =
620 static_cast<PrivateStreamInfo*>(newStream->priv);
621 CAMHAL_LOGDA("Existing stream ?");
622 privStream->alive = true;
623 }
624 DBG_LOGB("%d, newStream=%p, stream_type=%d, usage=%x, priv=%p, w*h=%dx%d\n",
625 i, newStream, newStream->stream_type, newStream->usage, newStream->priv, newStream->width, newStream->height);
626 }
627
628 /**
629 * Reap the dead streams
630 */
631 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
632 PrivateStreamInfo *privStream =
633 static_cast<PrivateStreamInfo*>((*s)->priv);
634 if (!privStream->alive) {
635 DBG_LOGA("delete not alive streams");
636 (*s)->priv = NULL;
637 delete privStream;
638 s = mStreams.erase(s);
639 } else {
640 ++s;
641 }
642 }
643
644 /**
645 * Can't reuse settings across configure call
646 */
647 mPrevSettings.clear();
648
649 return OK;
650}
651
652status_t EmulatedFakeCamera3::registerStreamBuffers(
653 const camera3_stream_buffer_set *bufferSet) {
654 DBG_LOGB("%s: E", __FUNCTION__);
655 Mutex::Autolock l(mLock);
656
657 /**
658 * Sanity checks
659 */
660 DBG_LOGA("==========sanity checks\n");
661
662 // OK: register streams at any time during configure
663 // (but only once per stream)
664 if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) {
665 ALOGE("%s: Cannot register buffers in state %d",
666 __FUNCTION__, mStatus);
667 return NO_INIT;
668 }
669
670 if (bufferSet == NULL) {
671 ALOGE("%s: NULL buffer set!", __FUNCTION__);
672 return BAD_VALUE;
673 }
674
675 StreamIterator s = mStreams.begin();
676 for (; s != mStreams.end(); ++s) {
677 if (bufferSet->stream == *s) break;
678 }
679 if (s == mStreams.end()) {
680 ALOGE("%s: Trying to register buffers for a non-configured stream!",
681 __FUNCTION__);
682 return BAD_VALUE;
683 }
684
685 /**
686 * Register the buffers. This doesn't mean anything to the emulator besides
687 * marking them off as registered.
688 */
689
690 PrivateStreamInfo *privStream =
691 static_cast<PrivateStreamInfo*>((*s)->priv);
692
693#if 0
694 if (privStream->registered) {
695 ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__);
696 return BAD_VALUE;
697 }
698#endif
699
700 privStream->registered = true;
701
702 return OK;
703}
704
705const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings(
706 int type) {
707 DBG_LOGB("%s: E", __FUNCTION__);
708 Mutex::Autolock l(mLock);
709
710 if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
711 ALOGE("%s: Unknown request settings template: %d",
712 __FUNCTION__, type);
713 return NULL;
714 }
715
716 /**
717 * Cache is not just an optimization - pointer returned has to live at
718 * least as long as the camera device instance does.
719 */
720 if (mDefaultTemplates[type] != NULL) {
721 return mDefaultTemplates[type];
722 }
723
724 CameraMetadata settings;
725
726 /** android.request */
727 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
728 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
729
730 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
731 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
732
733 static const int32_t id = 0;
734 settings.update(ANDROID_REQUEST_ID, &id, 1);
735
736 static const int32_t frameCount = 0;
737 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
738
739 /** android.lens */
740
741 static const float focusDistance = 0;
742 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
743
744 static const float aperture = 2.8f;
745 settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
746
747// static const float focalLength = 5.0f;
748 static const float focalLength = 3.299999952316284f;
749 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
750
751 static const float filterDensity = 0;
752 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
753
754 static const uint8_t opticalStabilizationMode =
755 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
756 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
757 &opticalStabilizationMode, 1);
758
759 // FOCUS_RANGE set only in frame
760
761 /** android.sensor */
762
763 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
764 settings.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
765 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
766 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
767 static const int64_t exposureTime = 10 * MSEC;
768 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
769
770 int64_t frameDuration = mSensor->getMinFrameDuration();
771 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
772
773 static const int32_t sensitivity = 100;
774 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
775
776 static const int64_t rollingShutterSkew = 0;
777 settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
778 // TIMESTAMP set only in frame
779
780 /** android.flash */
781
782 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
783 settings.update(ANDROID_FLASH_STATE, &flashstate, 1);
784
785 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
786 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
787
788 static const uint8_t flashPower = 10;
789 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
790
791 static const int64_t firingTime = 0;
792 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
793
794 /** Processing block modes */
795 uint8_t hotPixelMode = 0;
796 uint8_t demosaicMode = 0;
797 uint8_t noiseMode = 0;
798 uint8_t shadingMode = 0;
799 uint8_t colorMode = 0;
800 uint8_t tonemapMode = 0;
801 uint8_t edgeMode = 0;
802 switch (type) {
803
804 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
805 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
806 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
807 // fall-through
808 case CAMERA3_TEMPLATE_STILL_CAPTURE:
809 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
810 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
811 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
812 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
813 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
814 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
815 break;
816 case CAMERA3_TEMPLATE_PREVIEW:
817 // fall-through
818 case CAMERA3_TEMPLATE_VIDEO_RECORD:
819 // fall-through
820 case CAMERA3_TEMPLATE_MANUAL:
821 // fall-through
822 default:
823 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
824 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
825 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
826 shadingMode = ANDROID_SHADING_MODE_FAST;
827 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
828 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
829 edgeMode = ANDROID_EDGE_MODE_FAST;
830 break;
831 }
832 settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
833 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
834 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
835 settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
836 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
837 settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
838 settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
839
840 /** android.noise */
841 static const uint8_t noiseStrength = 5;
842 settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
843 static uint8_t availableNBModes[] = {
844 ANDROID_NOISE_REDUCTION_MODE_OFF,
845 ANDROID_NOISE_REDUCTION_MODE_FAST,
846 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
847 };
848 settings.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
849 availableNBModes, sizeof(availableNBModes)/sizeof(availableNBModes));
850
851
852 /** android.color */
853 static const float colorTransform[9] = {
854 1.0f, 0.f, 0.f,
855 0.f, 1.f, 0.f,
856 0.f, 0.f, 1.f
857 };
858 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
859
860 /** android.tonemap */
861 static const float tonemapCurve[4] = {
862 0.f, 0.f,
863 1.f, 1.f
864 };
865 settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
866 settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
867 settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
868
869 /** android.edge */
870 static const uint8_t edgeStrength = 5;
871 settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
872
873 /** android.scaler */
874 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
875 settings.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
876
877 static const int32_t cropRegion[] = {
878 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
879 };
880 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
881
882 /** android.jpeg */
883 static const uint8_t jpegQuality = 80;
884 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
885
886 static const int32_t thumbnailSize[2] = {
887 160, 120
888 };
889 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
890
891 static const uint8_t thumbnailQuality = 80;
892 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
893
894 static const double gpsCoordinates[3] = {
895 0, 0, 0
896 };
897 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3); //default 2 value
898
899 static const uint8_t gpsProcessingMethod[32] = "None";
900 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
901
902 static const int64_t gpsTimestamp = 0;
903 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
904
905 static const int32_t jpegOrientation = 0;
906 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
907
908 /** android.stats */
909
910 static const uint8_t faceDetectMode =
911 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
912 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
913
914 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
915 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
916
917 static const uint8_t sharpnessMapMode =
918 ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
919 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
920
921 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
922 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
923 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
924 settings.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
925 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
926 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
927 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
928 // sharpnessMap only in frames
929
930 /** android.control */
931
932 uint8_t controlIntent = 0;
933 uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; //default value
934 uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
935 uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
936 switch (type) {
937 case CAMERA3_TEMPLATE_PREVIEW:
938 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
939 break;
940 case CAMERA3_TEMPLATE_STILL_CAPTURE:
941 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
942 break;
943 case CAMERA3_TEMPLATE_VIDEO_RECORD:
944 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
945 break;
946 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
947 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
948 break;
949 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
950 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
951 break;
952 case CAMERA3_TEMPLATE_MANUAL:
953 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
954 controlMode = ANDROID_CONTROL_MODE_OFF;
955 aeMode = ANDROID_CONTROL_AE_MODE_OFF;
956 awbMode = ANDROID_CONTROL_AWB_MODE_OFF;
957 break;
958 default:
959 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
960 break;
961 }
962 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
963 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
964
965 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
966 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
967
968 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
969 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
970
971 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
972
973 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
974 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
975
976 static const uint8_t aePrecaptureTrigger =
977 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
978 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
979
980 static const int32_t mAfTriggerId = 0;
981 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,&mAfTriggerId, 1);
982 static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
983 settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
984
985 static const int32_t controlRegions[5] = {
986 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
987 1000
988 };
989// settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
990
991 static const int32_t aeExpCompensation = 0;
992 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
993
994 static const int32_t aeTargetFpsRange[2] = {
995 30, 30
996 };
997 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
998
999 static const uint8_t aeAntibandingMode =
1000 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
1001 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
1002
1003 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
1004
1005 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
1006 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
1007
1008// settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
1009
1010 uint8_t afMode = 0;
1011 switch (type) {
1012 case CAMERA3_TEMPLATE_PREVIEW:
1013 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1014 break;
1015 case CAMERA3_TEMPLATE_STILL_CAPTURE:
1016 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1017 break;
1018 case CAMERA3_TEMPLATE_VIDEO_RECORD:
1019 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1020 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
1021 break;
1022 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
1023 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1024 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
1025 break;
1026 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
1027 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1028 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
1029 break;
1030 case CAMERA3_TEMPLATE_MANUAL:
1031 afMode = ANDROID_CONTROL_AF_MODE_OFF;
1032 break;
1033 default:
1034 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1035 break;
1036 }
1037 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
1038
1039 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
1040 settings.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
1041
1042// settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
1043
1044 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
1045 settings.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
1046 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1047 settings.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
1048 static const uint8_t vstabMode =
1049 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
1050 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
1051
1052 // aeState, awbState, afState only in frame
1053
1054 mDefaultTemplates[type] = settings.release();
1055
1056 return mDefaultTemplates[type];
1057}
1058
1059status_t EmulatedFakeCamera3::processCaptureRequest(
1060 camera3_capture_request *request) {
1061 status_t res;
1062 nsecs_t exposureTime;
1063 nsecs_t frameDuration;
1064 uint32_t sensitivity;
1065 uint32_t frameNumber;
1066 bool mHaveThumbnail = false;
1067 CameraMetadata settings;
1068 Buffers *sensorBuffers = NULL;
1069 HalBufferVector *buffers = NULL;
1070 {
1071 Mutex::Autolock l(mLock);
1072
1073 /** Validation */
1074
1075 if (mStatus < STATUS_READY) {
1076 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
1077 mStatus);
1078 return INVALID_OPERATION;
1079 }
1080
1081 if (request == NULL) {
1082 ALOGE("%s: NULL request!", __FUNCTION__);
1083 return BAD_VALUE;
1084 }
1085
1086 frameNumber = request->frame_number;
1087
1088 if (request->settings == NULL && mPrevSettings.isEmpty()) {
1089 ALOGE("%s: Request %d: NULL settings for first request after"
1090 "configureStreams()", __FUNCTION__, frameNumber);
1091 return BAD_VALUE;
1092 }
1093
1094 if (request->input_buffer != NULL &&
1095 request->input_buffer->stream != mInputStream) {
1096 DBG_LOGB("%s: Request %d: Input buffer not from input stream!",
1097 __FUNCTION__, frameNumber);
1098 DBG_LOGB("%s: Bad stream %p, expected: %p",
1099 __FUNCTION__, request->input_buffer->stream,
1100 mInputStream);
1101 DBG_LOGB("%s: Bad stream type %d, expected stream type %d",
1102 __FUNCTION__, request->input_buffer->stream->stream_type,
1103 mInputStream ? mInputStream->stream_type : -1);
1104
1105 return BAD_VALUE;
1106 }
1107
1108 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1109 ALOGE("%s: Request %d: No output buffers provided!",
1110 __FUNCTION__, frameNumber);
1111 return BAD_VALUE;
1112 }
1113
1114 // Validate all buffers, starting with input buffer if it's given
1115
1116 ssize_t idx;
1117 const camera3_stream_buffer_t *b;
1118 if (request->input_buffer != NULL) {
1119 idx = -1;
1120 b = request->input_buffer;
1121 } else {
1122 idx = 0;
1123 b = request->output_buffers;
1124 }
1125 do {
1126 PrivateStreamInfo *priv =
1127 static_cast<PrivateStreamInfo*>(b->stream->priv);
1128 if (priv == NULL) {
1129 ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
1130 __FUNCTION__, frameNumber, idx);
1131 return BAD_VALUE;
1132 }
1133#if 0
1134 if (!priv->alive || !priv->registered) {
1135 ALOGE("%s: Request %d: Buffer %zu: Unregistered or dead stream! alive=%d, registered=%d\n",
1136 __FUNCTION__, frameNumber, idx,
1137 priv->alive, priv->registered);
1138 //return BAD_VALUE;
1139 }
1140#endif
1141 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1142 ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
1143 __FUNCTION__, frameNumber, idx);
1144 return BAD_VALUE;
1145 }
1146 if (b->release_fence != -1) {
1147 ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
1148 __FUNCTION__, frameNumber, idx);
1149 return BAD_VALUE;
1150 }
1151 if (b->buffer == NULL) {
1152 ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
1153 __FUNCTION__, frameNumber, idx);
1154 return BAD_VALUE;
1155 }
1156 idx++;
1157 b = &(request->output_buffers[idx]);
1158 } while (idx < (ssize_t)request->num_output_buffers);
1159
1160 // TODO: Validate settings parameters
1161
1162 /**
1163 * Start processing this request
1164 */
1165 mStatus = STATUS_ACTIVE;
1166
1167 camera_metadata_entry e;
1168
1169 if (request->settings == NULL) {
1170 settings.acquire(mPrevSettings);
1171 } else {
1172 settings = request->settings;
1173
1174 uint8_t antiBanding = 0;
1175 uint8_t effectMode = 0;
1176 int exposureCmp = 0;
1177
1178 e = settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE);
1179 if (e.count == 0) {
1180 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1181 return BAD_VALUE;
1182 }
1183 antiBanding = e.data.u8[0];
1184 mSensor->setAntiBanding(antiBanding);
1185
1186 e = settings.find(ANDROID_CONTROL_EFFECT_MODE);
1187 if (e.count == 0) {
1188 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1189 return BAD_VALUE;
1190 }
1191 effectMode = e.data.u8[0];
1192 mSensor->setEffect(effectMode);
1193
1194 e = settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION);
1195 if (e.count == 0) {
1196 ALOGE("%s: No exposure entry!", __FUNCTION__);
1197 //return BAD_VALUE;
1198 } else {
1199 exposureCmp = e.data.i32[0];
1200 DBG_LOGB("set expsore compensaton %d\n", exposureCmp);
1201 mSensor->setExposure(exposureCmp);
1202 }
1203
1204 int32_t cropRegion[4];
1205 int32_t cropWidth;
1206 int32_t outputWidth = request->output_buffers[0].stream->width;
1207
1208 e = settings.find(ANDROID_SCALER_CROP_REGION);
1209 if (e.count == 0) {
1210 ALOGE("%s: No corp region entry!", __FUNCTION__);
1211 //return BAD_VALUE;
1212 } else {
1213 cropRegion[0] = e.data.i32[0];
1214 cropRegion[1] = e.data.i32[1];
1215 cropWidth = cropRegion[2] = e.data.i32[2];
1216 cropRegion[3] = e.data.i32[3];
1217 for (int i = mZoomMin; i <= mZoomMax; i += mZoomStep) {
1218 //if ( (float) i / mZoomMin >= (float) outputWidth / cropWidth) {
1219 if ( i * cropWidth >= outputWidth * mZoomMin ) {
1220 mSensor->setZoom(i);
1221 break;
1222 }
1223 }
1224 DBG_LOGB("cropRegion:%d, %d, %d, %d\n", cropRegion[0], cropRegion[1],cropRegion[2],cropRegion[3]);
1225 }
1226 }
1227
1228 uint8_t len[] = {1};
1229 settings.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1);
1230
1231 uint8_t maxlen[] = {0};
1232 settings.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1);
1233
1234 res = process3A(settings);
1235 if (res != OK) {
1236 CAMHAL_LOGDB("%s: process3A failed!", __FUNCTION__);
1237 //return res;
1238 }
1239
1240 // TODO: Handle reprocessing
1241
1242 /**
1243 * Get ready for sensor config
1244 */
1245
1246 bool needJpeg = false;
1247 ssize_t jpegbuffersize;
1248 uint32_t jpegpixelfmt;
1249
1250 exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
1251 frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
1252 sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
1253
1254 sensorBuffers = new Buffers();
1255 buffers = new HalBufferVector();
1256
1257 sensorBuffers->setCapacity(request->num_output_buffers);
1258 buffers->setCapacity(request->num_output_buffers);
1259
1260 // Process all the buffers we got for output, constructing internal buffer
1261 // structures for them, and lock them for writing.
1262 for (size_t i = 0; i < request->num_output_buffers; i++) {
1263 const camera3_stream_buffer &srcBuf = request->output_buffers[i];
1264 const private_handle_t *privBuffer =
1265 (const private_handle_t*)(*srcBuf.buffer);
1266 StreamBuffer destBuf;
1267 destBuf.streamId = kGenericStreamId;
1268 destBuf.width = srcBuf.stream->width;
1269 destBuf.height = srcBuf.stream->height;
1270 destBuf.format = privBuffer->format; // Use real private format
1271 destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc
1272 destBuf.buffer = srcBuf.buffer;
1273 destBuf.share_fd = privBuffer->share_fd;
1274
1275 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
1276 needJpeg = true;
1277 memset(&info,0,sizeof(struct ExifInfo));
1278 info.orientation = settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
1279 jpegpixelfmt = mSensor->getOutputFormat();
1280 if (!mSupportRotate) {
1281 info.mainwidth = srcBuf.stream->width;
1282 info.mainheight = srcBuf.stream->height;
1283 } else {
1284 if ((info.orientation == 90) || (info.orientation == 270)) {
1285 info.mainwidth = srcBuf.stream->height;
1286 info.mainheight = srcBuf.stream->width;
1287 } else {
1288 info.mainwidth = srcBuf.stream->width;
1289 info.mainheight = srcBuf.stream->height;
1290 }
1291 }
1292 if ((jpegpixelfmt == V4L2_PIX_FMT_MJPEG) || (jpegpixelfmt == V4L2_PIX_FMT_YUYV)) {
1293 mSensor->setOutputFormat(info.mainwidth,info.mainheight,jpegpixelfmt,1);
1294 } else {
1295 mSensor->setOutputFormat(info.mainwidth,info.mainheight,V4L2_PIX_FMT_RGB24,1);
1296 }
1297 }
1298
1299 // Wait on fence
1300 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
1301 res = bufferAcquireFence->wait(kFenceTimeoutMs);
1302 if (res == TIMED_OUT) {
1303 ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
1304 __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
1305 }
1306 if (res == OK) {
1307 // Lock buffer for writing
1308 const Rect rect(destBuf.width, destBuf.height);
1309 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1310 if (privBuffer->format == HAL_PIXEL_FORMAT_YCbCr_420_888/*HAL_PIXEL_FORMAT_YCrCb_420_SP*/) {
1311 android_ycbcr ycbcr = android_ycbcr();
1312 res = GraphicBufferMapper::get().lockYCbCr(
1313 *(destBuf.buffer),
1314 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK,
1315 rect,
1316 &ycbcr);
1317 // This is only valid because we know that emulator's
1318 // YCbCr_420_888 is really contiguous NV21 under the hood
1319 destBuf.img = static_cast<uint8_t*>(ycbcr.y);
1320 } else {
1321 ALOGE("Unexpected private format for flexible YUV: 0x%x",
1322 privBuffer->format);
1323 res = INVALID_OPERATION;
1324 }
1325 } else {
1326 res = GraphicBufferMapper::get().lock(*(destBuf.buffer),
1327 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK,
1328 rect,
1329 (void**)&(destBuf.img));
1330 }
1331 if (res != OK) {
1332 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
1333 __FUNCTION__, frameNumber, i);
1334 }
1335 }
1336
1337 if (res != OK) {
1338 // Either waiting or locking failed. Unlock locked buffers and bail
1339 // out.
1340 for (size_t j = 0; j < i; j++) {
1341 GraphicBufferMapper::get().unlock(
1342 *(request->output_buffers[i].buffer));
1343 }
1344 ALOGE("line:%d, format for this usage: %d x %d, usage %x, format=%x, returned\n",
1345 __LINE__, destBuf.width, destBuf.height, privBuffer->usage, privBuffer->format);
1346 return NO_INIT;
1347 }
1348 sensorBuffers->push_back(destBuf);
1349 buffers->push_back(srcBuf);
1350 }
1351
1352 if (needJpeg) {
1353 if (!mSupportRotate) {
1354 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1355 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1356 } else {
1357 if ((info.orientation == 90) || (info.orientation == 270)) {
1358 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1359 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1360 } else {
1361 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1362 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1363 }
1364 }
1365 if (settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
1366 info.latitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[0];
1367 info.longitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[1];
1368 info.altitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[2];
1369 info.has_latitude = true;
1370 info.has_longitude = true;
1371 info.has_altitude = true;
1372 } else {
1373 info.has_latitude = false;
1374 info.has_longitude = false;
1375 info.has_altitude = false;
1376 }
1377 if (settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
1378 uint8_t * gpsString = settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
1379 memcpy(info.gpsProcessingMethod, gpsString , sizeof(info.gpsProcessingMethod)-1);
1380 info.has_gpsProcessingMethod = true;
1381 } else {
1382 info.has_gpsProcessingMethod = false;
1383 }
1384 if (settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
1385 info.gpsTimestamp = settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
1386 info.has_gpsTimestamp = true;
1387 } else {
1388 info.has_gpsTimestamp = false;
1389 }
1390 if (settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
1391 info.focallen = settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
1392 info.has_focallen = true;
1393 } else {
1394 info.has_focallen = false;
1395 }
1396 jpegbuffersize = getJpegBufferSize(info.mainwidth,info.mainheight);
1397
1398 mJpegCompressor->SetMaxJpegBufferSize(jpegbuffersize);
1399 mJpegCompressor->SetExifInfo(info);
1400 mSensor->setPictureRotate(info.orientation);
1401 if ((info.thumbwidth > 0) && (info.thumbheight > 0)) {
1402 mHaveThumbnail = true;
1403 }
1404 DBG_LOGB("%s::thumbnailSize_width=%d,thumbnailSize_height=%d,mainsize_width=%d,mainsize_height=%d,jpegOrientation=%d",__FUNCTION__,
1405 info.thumbwidth,info.thumbheight,info.mainwidth,info.mainheight,info.orientation);
1406 }
1407 /**
1408 * Wait for JPEG compressor to not be busy, if needed
1409 */
1410#if 0
1411 if (needJpeg) {
1412 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1413 if (!ready) {
1414 ALOGE("%s: Timeout waiting for JPEG compression to complete!",
1415 __FUNCTION__);
1416 return NO_INIT;
1417 }
1418 }
1419#else
1420 while (needJpeg) {
1421 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1422 if (ready) {
1423 break;
1424 }
1425 }
1426#endif
1427 }
1428 /**
1429 * Wait until the in-flight queue has room
1430 */
1431 res = mReadoutThread->waitForReadout();
1432 if (res != OK) {
1433 ALOGE("%s: Timeout waiting for previous requests to complete!",
1434 __FUNCTION__);
1435 return NO_INIT;
1436 }
1437
1438 /**
1439 * Wait until sensor's ready. This waits for lengthy amounts of time with
1440 * mLock held, but the interface spec is that no other calls may by done to
1441 * the HAL by the framework while process_capture_request is happening.
1442 */
1443 {
1444 Mutex::Autolock l(mLock);
1445 int syncTimeoutCount = 0;
1446 while (!mSensor->waitForVSync(kSyncWaitTimeout)) {
1447 if (mStatus == STATUS_ERROR) {
1448 return NO_INIT;
1449 }
1450 if (syncTimeoutCount == kMaxSyncTimeoutCount) {
1451 ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
1452 __FUNCTION__, frameNumber,
1453 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
1454 return NO_INIT;
1455 }
1456 syncTimeoutCount++;
1457 }
1458
1459 /**
1460 * Configure sensor and queue up the request to the readout thread
1461 */
1462 mSensor->setExposureTime(exposureTime);
1463 mSensor->setFrameDuration(frameDuration);
1464 mSensor->setSensitivity(sensitivity);
1465 mSensor->setDestinationBuffers(sensorBuffers);
1466 mSensor->setFrameNumber(request->frame_number);
1467
1468 ReadoutThread::Request r;
1469 r.frameNumber = request->frame_number;
1470 r.settings = settings;
1471 r.sensorBuffers = sensorBuffers;
1472 r.buffers = buffers;
1473 r.havethumbnail = mHaveThumbnail;
1474
1475 mReadoutThread->queueCaptureRequest(r);
1476 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
1477
1478 // Cache the settings for next time
1479 mPrevSettings.acquire(settings);
1480 }
1481 CAMHAL_LOGDB("%s , X" , __FUNCTION__);
1482 return OK;
1483}
1484
1485/** Debug methods */
1486
1487void EmulatedFakeCamera3::dump(int fd) {
1488
1489 String8 result;
1490 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
1491 result = String8::format("%s, valid resolution\n", __FILE__);
1492
1493 for (uint32_t f = 0; f < count; f+=2) {
1494 if (mAvailableJpegSize[f] == 0)
1495 break;
1496 result.appendFormat("width: %d , height =%d\n",
1497 mAvailableJpegSize[f], mAvailableJpegSize[f+1]);
1498 }
1499 result.appendFormat("\nmZoomMin: %d , mZoomMax =%d, mZoomStep=%d\n",
1500 mZoomMin, mZoomMax, mZoomStep);
1501
1502 if (mZoomStep <= 0) {
1503 result.appendFormat("!!!!!!!!!camera apk may have no picture out\n");
1504 }
1505
1506 write(fd, result.string(), result.size());
1507
1508 if (mSensor.get() != NULL) {
1509 mSensor->dump(fd);
1510 }
1511
1512}
1513//flush all request
1514//TODO returned buffers every request held immediately with
1515//CAMERA3_BUFFER_STATUS_ERROR flag.
1516int EmulatedFakeCamera3::flush_all_requests() {
1517 DBG_LOGA("flush all request");
1518 return 0;
1519}
1520/** Tag query methods */
1521const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) {
1522 return NULL;
1523}
1524
1525const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) {
1526 return NULL;
1527}
1528
1529int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) {
1530 return 0;
1531}
1532
1533/**
1534 * Private methods
1535 */
1536
1537camera_metadata_ro_entry_t EmulatedFakeCamera3::staticInfo(const CameraMetadata *info, uint32_t tag,
1538 size_t minCount, size_t maxCount, bool required) const {
1539
1540 camera_metadata_ro_entry_t entry = info->find(tag);
1541
1542 if (CC_UNLIKELY( entry.count == 0 ) && required) {
1543 const char* tagSection = get_camera_metadata_section_name(tag);
1544 if (tagSection == NULL) tagSection = "<unknown>";
1545 const char* tagName = get_camera_metadata_tag_name(tag);
1546 if (tagName == NULL) tagName = "<unknown>";
1547
1548 ALOGE("Error finding static metadata entry '%s.%s' (%x)",
1549 tagSection, tagName, tag);
1550 } else if (CC_UNLIKELY(
1551 (minCount != 0 && entry.count < minCount) ||
1552 (maxCount != 0 && entry.count > maxCount) ) ) {
1553 const char* tagSection = get_camera_metadata_section_name(tag);
1554 if (tagSection == NULL) tagSection = "<unknown>";
1555 const char* tagName = get_camera_metadata_tag_name(tag);
1556 if (tagName == NULL) tagName = "<unknown>";
1557 ALOGE("Malformed static metadata entry '%s.%s' (%x):"
1558 "Expected between %zu and %zu values, but got %zu values",
1559 tagSection, tagName, tag, minCount, maxCount, entry.count);
1560 }
1561
1562 return entry;
1563}
1564
1565//this is only for debug
1566void EmulatedFakeCamera3::getStreamConfigurationp(CameraMetadata *info) {
1567 const int STREAM_CONFIGURATION_SIZE = 4;
1568 const int STREAM_FORMAT_OFFSET = 0;
1569 const int STREAM_WIDTH_OFFSET = 1;
1570 const int STREAM_HEIGHT_OFFSET = 2;
1571 const int STREAM_IS_INPUT_OFFSET = 3;
1572
1573 camera_metadata_ro_entry_t availableStreamConfigs =
1574 staticInfo(info, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
1575 CAMHAL_LOGDB(" stream, availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1576
1577 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1578 int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
1579 int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
1580 int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
1581 int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
1582 CAMHAL_LOGDB("f=%x, w*h=%dx%d, du=%d\n", format, width, height, isInput);
1583 }
1584
1585}
1586
1587//this is only for debug
1588void EmulatedFakeCamera3::getStreamConfigurationDurations(CameraMetadata *info) {
1589 const int STREAM_CONFIGURATION_SIZE = 4;
1590 const int STREAM_FORMAT_OFFSET = 0;
1591 const int STREAM_WIDTH_OFFSET = 1;
1592 const int STREAM_HEIGHT_OFFSET = 2;
1593 const int STREAM_IS_INPUT_OFFSET = 3;
1594
1595 camera_metadata_ro_entry_t availableStreamConfigs =
1596 staticInfo(info, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
1597 CAMHAL_LOGDB("availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1598
1599 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1600 int64_t format = availableStreamConfigs.data.i64[i + STREAM_FORMAT_OFFSET];
1601 int64_t width = availableStreamConfigs.data.i64[i + STREAM_WIDTH_OFFSET];
1602 int64_t height = availableStreamConfigs.data.i64[i + STREAM_HEIGHT_OFFSET];
1603 int64_t isInput = availableStreamConfigs.data.i64[i + STREAM_IS_INPUT_OFFSET];
1604 CAMHAL_LOGDB("f=%llx, w*h=%lldx%lld, du=%lld\n", format, width, height, isInput);
1605 }
1606}
1607
1608void EmulatedFakeCamera3::updateCameraMetaData(CameraMetadata *info) {
1609
1610}
1611
1612status_t EmulatedFakeCamera3::constructStaticInfo() {
1613
1614 status_t ret = OK;
1615 CameraMetadata info;
1616 uint32_t picSizes[64 * 8];
1617 int64_t* duration = NULL;
1618 int count, duration_count, availablejpegsize;
1619 uint8_t maxCount = 10;
1620 char property[PROPERTY_VALUE_MAX];
1621 unsigned int supportrotate;
1622 availablejpegsize = ARRAY_SIZE(mAvailableJpegSize);
1623 memset(mAvailableJpegSize,0,(sizeof(uint32_t))*availablejpegsize);
1624 sp<Sensor> s = new Sensor();
1625 ret = s->startUp(mCameraID);
1626 if (ret != OK) {
1627 DBG_LOGA("sensor start up failed");
1628 return ret;
1629 }
1630
1631 mSensorType = s->getSensorType();
1632
1633 if ( mSensorType == SENSOR_USB) {
1634 char property[PROPERTY_VALUE_MAX];
1635 property_get("rw.camera.usb.faceback", property, "false");
1636 if (strstr(property, "true"))
1637 mFacingBack = 1;
1638 else
1639 mFacingBack = 0;
1640 ALOGI("Setting usb camera cameraID:%d to back camera:%s\n",
1641 mCameraID, property);
1642 } else {
1643 if (s->mSensorFace == SENSOR_FACE_FRONT) {
1644 mFacingBack = 0;
1645 } else if (s->mSensorFace == SENSOR_FACE_BACK) {
1646 mFacingBack = 1;
1647 } else if (s->mSensorFace == SENSOR_FACE_NONE) {
1648 if (gEmulatedCameraFactory.getEmulatedCameraNum() == 1) {
1649 mFacingBack = 1;
1650 } else if ( mCameraID == 0) {
1651 mFacingBack = 1;
1652 } else {
1653 mFacingBack = 0;
1654 }
1655 }
1656
1657 ALOGI("Setting on board camera cameraID:%d to back camera:%d[0 false, 1 true]\n",
1658 mCameraID, mFacingBack);
1659 }
1660
1661 mSupportCap = s->IoctlStateProbe();
1662 if (mSupportCap & IOCTL_MASK_ROTATE) {
1663 supportrotate = true;
1664 } else {
1665 supportrotate = false;
1666 }
1667 // android.lens
1668
1669 // 5 cm min focus distance for back camera, infinity (fixed focus) for front
1670 // TODO read this ioctl from camera driver
1671 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
1672 const float minFocusDistance = 0.0;
1673 info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1674 &minFocusDistance, 1);
1675
1676 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
1677 const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
1678 info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1679 &minFocusDistance, 1);
1680
1681 static const float focalLength = 3.30f; // mm
1682 info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1683 &focalLength, 1);
1684 static const float aperture = 2.8f;
1685 info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1686 &aperture, 1);
1687 static const float filterDensity = 0;
1688 info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1689 &filterDensity, 1);
1690 static const uint8_t availableOpticalStabilization =
1691 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1692 info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1693 &availableOpticalStabilization, 1);
1694
1695 static const int32_t lensShadingMapSize[] = {1, 1};
1696 info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1697 sizeof(lensShadingMapSize)/sizeof(int32_t));
1698
1699 uint8_t lensFacing = mFacingBack ?
1700 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1701 info.update(ANDROID_LENS_FACING, &lensFacing, 1);
1702
1703 float lensPosition[3];
1704 if (mFacingBack) {
1705 // Back-facing camera is center-top on device
1706 lensPosition[0] = 0;
1707 lensPosition[1] = 20;
1708 lensPosition[2] = -5;
1709 } else {
1710 // Front-facing camera is center-right on device
1711 lensPosition[0] = 20;
1712 lensPosition[1] = 20;
1713 lensPosition[2] = 0;
1714 }
1715#if PLATFORM_SDK_VERSION <= 22
1716 info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
1717 sizeof(float));
1718#endif
1719 static const uint8_t lensCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED;
1720 info.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,&lensCalibration,1);
1721
1722 // android.sensor
1723
1724 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1725 info.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
1726 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1727 info.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
1728 info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1729 Sensor::kExposureTimeRange, 2);
1730
1731 info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1732 &Sensor::kFrameDurationRange[1], 1);
1733
1734 info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
1735 Sensor::kSensitivityRange,
1736 sizeof(Sensor::kSensitivityRange)
1737 /sizeof(int32_t));
1738
1739 info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1740 &Sensor::kColorFilterArrangement, 1);
1741
1742 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1743 info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1744 sensorPhysicalSize, 2);
1745
1746 info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1747 (int32_t*)Sensor::kResolution, 2);
1748
1749 //(int32_t*)Sensor::kResolution, 2);
1750
1751 info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1752 (int32_t*)&Sensor::kMaxRawValue, 1);
1753
1754 static const int32_t blackLevelPattern[4] = {
1755 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
1756 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
1757 };
1758 info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1759 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
1760
1761 static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
1762 info.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
1763 if (mSensorType == SENSOR_USB) {
1764 if (mFacingBack) {
1765 property_get("hw.camera.orientation.back", property, "0");
1766 } else {
1767 property_get("hw.camera.orientation.front", property, "0");
1768 }
1769 int32_t orientation = atoi(property);
1770 property_get("hw.camera.usb.orientation_offset", property, "0");
1771 orientation += atoi(property);
1772 orientation %= 360;
1773 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1774 } else {
1775 if (mFacingBack) {
1776 property_get("hw.camera.orientation.back", property, "270");
1777 const int32_t orientation = atoi(property);
1778 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1779 } else {
1780 property_get("hw.camera.orientation.front", property, "90");
1781 const int32_t orientation = atoi(property);
1782 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1783 }
1784 }
1785
1786 static const int64_t rollingShutterSkew = 0;
1787 info.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
1788
1789 //TODO: sensor color calibration fields
1790
1791 // android.flash
1792 static const uint8_t flashAvailable = 0;
1793 info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1794
1795 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
1796 info.update(ANDROID_FLASH_STATE, &flashstate, 1);
1797
1798 static const int64_t flashChargeDuration = 0;
1799 info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
1800
1801 /** android.noise */
1802 static const uint8_t availableNBModes = ANDROID_NOISE_REDUCTION_MODE_OFF;
1803 info.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, &availableNBModes, 1);
1804
1805 // android.tonemap
1806
1807 static const int32_t tonemapCurvePoints = 128;
1808 info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
1809
1810 // android.scaler
1811
1812 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
1813 info.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
1814
1815 info.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1816 kAvailableFormats,
1817 sizeof(kAvailableFormats)/sizeof(int32_t));
1818
1819 info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1820 (int64_t*)kAvailableRawMinDurations,
1821 sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
1822
1823 //for version 3.2 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
1824 count = sizeof(picSizes)/sizeof(picSizes[0]);
1825 count = s->getStreamConfigurations(picSizes, kAvailableFormats, count);
1826
1827 info.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1828 (int32_t*)picSizes, count);
1829
1830 if (count < availablejpegsize) {
1831 availablejpegsize = count;
1832 }
1833 getValidJpegSize(picSizes,mAvailableJpegSize,availablejpegsize);
1834
1835 maxJpegResolution = getMaxJpegResolution(picSizes,count);
1836 int32_t full_size[4];
1837 if (mFacingBack) {
1838 full_size[0] = 0;
1839 full_size[1] = 0;
1840 full_size[2] = maxJpegResolution.width;
1841 full_size[3] = maxJpegResolution.height;
1842 } else {
1843 full_size[0] = 0;
1844 full_size[1] = 0;
1845 full_size[2] = maxJpegResolution.width;
1846 full_size[3] = maxJpegResolution.height;
1847 }
1848 info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1849 (int32_t*)full_size,
1850 sizeof(full_size)/sizeof(full_size[0]));
1851 duration = new int64_t[count];
1852 if (duration == NULL) {
1853 DBG_LOGA("allocate memory for duration failed");
1854 return NO_MEMORY;
1855 } else {
1856 memset(duration,0,sizeof(int64_t)*count);
1857 }
1858 duration_count = s->getStreamConfigurationDurations(picSizes, duration , count);
1859
1860 info.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1861 duration, duration_count);
1862 info.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1863 duration, duration_count);
1864
1865 info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1866 (int64_t*)kAvailableProcessedMinDurations,
1867 sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
1868
1869 info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
1870 (int64_t*)kAvailableJpegMinDurations,
1871 sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
1872
1873
1874 // android.jpeg
1875
1876 static const int32_t jpegThumbnailSizes[] = {
1877 0, 0,
1878 160, 120,
1879 320, 240
1880 };
1881 info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1882 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
1883
1884 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1885 info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1886
1887 // android.stats
1888
1889 static const uint8_t availableFaceDetectModes[] = {
1890 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1891 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
1892 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
1893 };
1894
1895 info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1896 availableFaceDetectModes,
1897 sizeof(availableFaceDetectModes));
1898
1899 static const int32_t maxFaceCount = 8;
1900 info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1901 &maxFaceCount, 1);
1902
1903 static const int32_t histogramSize = 64;
1904 info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1905 &histogramSize, 1);
1906
1907 static const int32_t maxHistogramCount = 1000;
1908 info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1909 &maxHistogramCount, 1);
1910
1911 static const int32_t sharpnessMapSize[2] = {64, 64};
1912 info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1913 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
1914
1915 static const int32_t maxSharpnessMapValue = 1000;
1916 info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1917 &maxSharpnessMapValue, 1);
1918 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
1919 info.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
1920
1921 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
1922 info.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
1923 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
1924 info.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
1925 // android.control
1926
1927 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
1928 info.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
1929
1930 static const uint8_t availableSceneModes[] = {
1931 // ANDROID_CONTROL_SCENE_MODE_DISABLED,
1932 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY
1933 };
1934 info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1935 availableSceneModes, sizeof(availableSceneModes));
1936
1937 static const uint8_t availableEffects[] = {
1938 ANDROID_CONTROL_EFFECT_MODE_OFF
1939 };
1940 info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1941 availableEffects, sizeof(availableEffects));
1942
1943 static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0};
1944 info.update(ANDROID_CONTROL_MAX_REGIONS,
1945 max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
1946
1947 static const uint8_t availableAeModes[] = {
1948 ANDROID_CONTROL_AE_MODE_OFF,
1949 ANDROID_CONTROL_AE_MODE_ON
1950 };
1951 info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1952 availableAeModes, sizeof(availableAeModes));
1953
1954
1955 static const int32_t availableTargetFpsRanges[] = {
1956 5, 15, 15, 15, 5, 25, 25, 25, 5, 30, 30, 30,
1957 };
1958 info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1959 availableTargetFpsRanges,
1960 sizeof(availableTargetFpsRanges)/sizeof(int32_t));
1961
1962 uint8_t awbModes[maxCount];
1963 count = s->getAWB(awbModes, maxCount);
1964 if (count < 0) {
1965 static const uint8_t availableAwbModes[] = {
1966 ANDROID_CONTROL_AWB_MODE_OFF,
1967 ANDROID_CONTROL_AWB_MODE_AUTO,
1968 ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1969 ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1970 ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1971 ANDROID_CONTROL_AWB_MODE_SHADE
1972 };
1973 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1974 availableAwbModes, sizeof(availableAwbModes));
1975 } else {
1976 DBG_LOGB("getAWB %d ",count);
1977 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1978 awbModes, count);
1979 }
1980
1981 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
1982 info.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
1983
1984 static const uint8_t availableAfModesFront[] = {
1985 ANDROID_CONTROL_AF_MODE_OFF
1986 };
1987
1988 if (mFacingBack) {
1989 uint8_t afMode[maxCount];
1990 count = s->getAutoFocus(afMode, maxCount);
1991 if (count < 0) {
1992 static const uint8_t availableAfModesBack[] = {
1993 ANDROID_CONTROL_AF_MODE_OFF,
1994 //ANDROID_CONTROL_AF_MODE_AUTO,
1995 //ANDROID_CONTROL_AF_MODE_MACRO,
1996 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
1997 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE,
1998 };
1999
2000 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2001 availableAfModesBack, sizeof(availableAfModesBack));
2002 } else {
2003 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2004 afMode, count);
2005 }
2006 } else {
2007 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2008 availableAfModesFront, sizeof(availableAfModesFront));
2009 }
2010
2011 uint8_t antiBanding[maxCount];
2012 count = s->getAntiBanding(antiBanding, maxCount);
2013 if (count < 0) {
2014 static const uint8_t availableAntibanding[] = {
2015 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
2016 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
2017 };
2018 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2019 availableAntibanding, sizeof(availableAntibanding));
2020 } else {
2021 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2022 antiBanding, count);
2023 }
2024
2025 camera_metadata_rational step;
2026 int maxExp, minExp, def;
2027 ret = s->getExposure(&maxExp, &minExp, &def, &step);
2028 if (ret < 0) {
2029 static const int32_t aeExpCompensation = 0;
2030 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
2031
2032 static const camera_metadata_rational exposureCompensationStep = {
2033 1, 3
2034 };
2035 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2036 &exposureCompensationStep, 1);
2037
2038 int32_t exposureCompensationRange[] = {0, 0};
2039 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2040 exposureCompensationRange,
2041 sizeof(exposureCompensationRange)/sizeof(int32_t));
2042 } else {
2043 DBG_LOGB("exposure compensation support:(%d, %d)\n", minExp, maxExp);
2044 int32_t exposureCompensationRange[] = {minExp, maxExp};
2045 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2046 exposureCompensationRange,
2047 sizeof(exposureCompensationRange)/sizeof(int32_t));
2048 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2049 &step, 1);
2050 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &def, 1);
2051 }
2052
2053 ret = s->getZoom(&mZoomMin, &mZoomMax, &mZoomStep);
2054 if (ret < 0) {
2055 float maxZoom = 1.0;
2056 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2057 &maxZoom, 1);
2058 } else {
2059 float maxZoom = mZoomMax / mZoomMin;
2060 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2061 &maxZoom, 1);
2062 }
2063
2064 static const uint8_t availableVstabModes[] = {
2065 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
2066 };
2067 info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2068 availableVstabModes, sizeof(availableVstabModes));
2069
2070 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
2071 info.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
2072 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2073 info.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
2074 // android.info
2075 const uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2076 //mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
2077 // ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2078 info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2079 &supportedHardwareLevel,
2080 /*count*/1);
2081
2082 int32_t android_sync_max_latency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
2083 info.update(ANDROID_SYNC_MAX_LATENCY, &android_sync_max_latency, 1);
2084
2085 uint8_t len[] = {1};
2086 info.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1);
2087
2088 uint8_t maxlen[] = {2};
2089 info.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1);
2090 uint8_t cap[] = {
2091 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
2092 };
2093 info.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2094 (uint8_t *)cap, sizeof(cap)/sizeof(cap[0]));
2095
2096
2097 int32_t partialResultCount = 1;
2098 info.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,&partialResultCount,1);
2099 int32_t maxNumOutputStreams[3] = {0,2,1};
2100 info.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,maxNumOutputStreams,3);
2101 uint8_t aberrationMode[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF};
2102 info.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
2103 aberrationMode, 1);
2104 info.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
2105 aberrationMode, 1);
2106
2107 getAvailableChKeys(&info, supportedHardwareLevel);
2108
2109 if (mCameraInfo != NULL) {
2110 CAMHAL_LOGDA("mCameraInfo is not null, mem leak?");
2111 }
2112 mCameraInfo = info.release();
2113 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
2114
2115 if (duration != NULL) {
2116 delete [] duration;
2117 }
2118
2119 s->shutDown();
2120 s.clear();
2121 mPlugged = true;
2122
2123 return OK;
2124}
2125
2126status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
2127 /**
2128 * Extract top-level 3A controls
2129 */
2130 status_t res;
2131
2132 bool facePriority = false;
2133
2134 camera_metadata_entry e;
2135
2136 e = settings.find(ANDROID_CONTROL_MODE);
2137 if (e.count == 0) {
2138 ALOGE("%s: No control mode entry!", __FUNCTION__);
2139 return BAD_VALUE;
2140 }
2141 uint8_t controlMode = e.data.u8[0];
2142
2143 e = settings.find(ANDROID_CONTROL_SCENE_MODE);
2144 if (e.count == 0) {
2145 ALOGE("%s: No scene mode entry!", __FUNCTION__);
2146 return BAD_VALUE;
2147 }
2148 uint8_t sceneMode = e.data.u8[0];
2149
2150 if (controlMode == ANDROID_CONTROL_MODE_OFF) {
2151 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2152 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2153 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2154 update3A(settings);
2155 return OK;
2156 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2157 switch(sceneMode) {
2158 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
2159 mFacePriority = true;
2160 break;
2161 default:
2162 ALOGE("%s: Emulator doesn't support scene mode %d",
2163 __FUNCTION__, sceneMode);
2164 return BAD_VALUE;
2165 }
2166 } else {
2167 mFacePriority = false;
2168 }
2169
2170 // controlMode == AUTO or sceneMode = FACE_PRIORITY
2171 // Process individual 3A controls
2172
2173 res = doFakeAE(settings);
2174 if (res != OK) return res;
2175
2176 res = doFakeAF(settings);
2177 if (res != OK) return res;
2178
2179 res = doFakeAWB(settings);
2180 if (res != OK) return res;
2181
2182 update3A(settings);
2183 return OK;
2184}
2185
2186status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
2187 camera_metadata_entry e;
2188
2189 e = settings.find(ANDROID_CONTROL_AE_MODE);
2190 if (e.count == 0) {
2191 ALOGE("%s: No AE mode entry!", __FUNCTION__);
2192 return BAD_VALUE;
2193 }
2194 uint8_t aeMode = e.data.u8[0];
2195
2196 switch (aeMode) {
2197 case ANDROID_CONTROL_AE_MODE_OFF:
2198 // AE is OFF
2199 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2200 return OK;
2201 case ANDROID_CONTROL_AE_MODE_ON:
2202 // OK for AUTO modes
2203 break;
2204 default:
2205 ALOGVV("%s: Emulator doesn't support AE mode %d",
2206 __FUNCTION__, aeMode);
2207 return BAD_VALUE;
2208 }
2209
2210 e = settings.find(ANDROID_CONTROL_AE_LOCK);
2211 if (e.count == 0) {
2212 ALOGE("%s: No AE lock entry!", __FUNCTION__);
2213 return BAD_VALUE;
2214 }
2215 bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON);
2216
2217 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
2218 bool precaptureTrigger = false;
2219 if (e.count != 0) {
2220 precaptureTrigger =
2221 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
2222 }
2223
2224 if (precaptureTrigger) {
2225 ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
2226 } else if (e.count > 0) {
2227 ALOGV("%s: Pre capture trigger was present? %zu",
2228 __FUNCTION__,
2229 e.count);
2230 }
2231
2232 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2233 // Run precapture sequence
2234 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2235 mAeCounter = 0;
2236 }
2237
2238 if (mFacePriority) {
2239 mAeTargetExposureTime = kFacePriorityExposureTime;
2240 } else {
2241 mAeTargetExposureTime = kNormalExposureTime;
2242 }
2243
2244 if (mAeCounter > kPrecaptureMinFrames &&
2245 (mAeTargetExposureTime - mAeCurrentExposureTime) <
2246 mAeTargetExposureTime / 10) {
2247 // Done with precapture
2248 mAeCounter = 0;
2249 mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
2250 ANDROID_CONTROL_AE_STATE_CONVERGED;
2251 } else {
2252 // Converge some more
2253 mAeCurrentExposureTime +=
2254 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2255 kExposureTrackRate;
2256 mAeCounter++;
2257 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
2258 }
2259
2260 } else if (!aeLocked) {
2261 // Run standard occasional AE scan
2262 switch (mAeState) {
2263 case ANDROID_CONTROL_AE_STATE_CONVERGED:
2264 case ANDROID_CONTROL_AE_STATE_INACTIVE:
2265 mAeCounter++;
2266 if (mAeCounter > kStableAeMaxFrames) {
2267 mAeTargetExposureTime =
2268 mFacePriority ? kFacePriorityExposureTime :
2269 kNormalExposureTime;
2270 float exposureStep = ((double)rand() / RAND_MAX) *
2271 (kExposureWanderMax - kExposureWanderMin) +
2272 kExposureWanderMin;
2273 mAeTargetExposureTime *= std::pow(2, exposureStep);
2274 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2275 }
2276 break;
2277 case ANDROID_CONTROL_AE_STATE_SEARCHING:
2278 mAeCurrentExposureTime +=
2279 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2280 kExposureTrackRate;
2281 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
2282 mAeTargetExposureTime / 10) {
2283 // Close enough
2284 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2285 mAeCounter = 0;
2286 }
2287 break;
2288 case ANDROID_CONTROL_AE_STATE_LOCKED:
2289 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2290 mAeCounter = 0;
2291 break;
2292 default:
2293 ALOGE("%s: Emulator in unexpected AE state %d",
2294 __FUNCTION__, mAeState);
2295 return INVALID_OPERATION;
2296 }
2297 } else {
2298 // AE is locked
2299 mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
2300 }
2301
2302 return OK;
2303}
2304
2305status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
2306 camera_metadata_entry e;
2307
2308 e = settings.find(ANDROID_CONTROL_AF_MODE);
2309 if (e.count == 0) {
2310 ALOGE("%s: No AF mode entry!", __FUNCTION__);
2311 return BAD_VALUE;
2312 }
2313 uint8_t afMode = e.data.u8[0];
2314
2315 e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
2316 typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
2317 af_trigger_t afTrigger;
2318 // If we have an afTrigger, afTriggerId should be set too
2319 if (e.count != 0) {
2320 afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
2321
2322 e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID);
2323
2324 if (e.count == 0) {
2325 ALOGE("%s: When android.control.afTrigger is set "
2326 " in the request, afTriggerId needs to be set as well",
2327 __FUNCTION__);
2328 return BAD_VALUE;
2329 }
2330
2331 mAfTriggerId = e.data.i32[0];
2332
2333 ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
2334 ALOGV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId);
2335 ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
2336 } else {
2337 afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
2338 }
2339 if (!mFacingBack) {
2340 afMode = ANDROID_CONTROL_AF_MODE_OFF;
2341 }
2342
2343 switch (afMode) {
2344 case ANDROID_CONTROL_AF_MODE_OFF:
2345 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2346 return OK;
2347 case ANDROID_CONTROL_AF_MODE_AUTO:
2348 case ANDROID_CONTROL_AF_MODE_MACRO:
2349 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2350 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2351 if (!mFacingBack) {
2352 ALOGE("%s: Front camera doesn't support AF mode %d",
2353 __FUNCTION__, afMode);
2354 return BAD_VALUE;
2355 }
2356 mSensor->setAutoFocuas(afMode);
2357 // OK, handle transitions lower on
2358 break;
2359 default:
2360 ALOGE("%s: Emulator doesn't support AF mode %d",
2361 __FUNCTION__, afMode);
2362 return BAD_VALUE;
2363 }
2364#if 0
2365 e = settings.find(ANDROID_CONTROL_AF_REGIONS);
2366 if (e.count == 0) {
2367 ALOGE("%s:Get ANDROID_CONTROL_AF_REGIONS failed\n", __FUNCTION__);
2368 return BAD_VALUE;
2369 }
2370 int32_t x0 = e.data.i32[0];
2371 int32_t y0 = e.data.i32[1];
2372 int32_t x1 = e.data.i32[2];
2373 int32_t y1 = e.data.i32[3];
2374 mSensor->setFocuasArea(x0, y0, x1, y1);
2375 DBG_LOGB(" x0:%d, y0:%d,x1:%d,y1:%d,\n", x0, y0, x1, y1);
2376#endif
2377
2378
2379 bool afModeChanged = mAfMode != afMode;
2380 mAfMode = afMode;
2381
2382 /**
2383 * Simulate AF triggers. Transition at most 1 state per frame.
2384 * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
2385 */
2386
2387 bool afTriggerStart = false;
2388 bool afTriggerCancel = false;
2389 switch (afTrigger) {
2390 case ANDROID_CONTROL_AF_TRIGGER_IDLE:
2391 break;
2392 case ANDROID_CONTROL_AF_TRIGGER_START:
2393 afTriggerStart = true;
2394 break;
2395 case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
2396 afTriggerCancel = true;
2397 // Cancel trigger always transitions into INACTIVE
2398 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2399
2400 ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
2401
2402 // Stay in 'inactive' until at least next frame
2403 return OK;
2404 default:
2405 ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
2406 return BAD_VALUE;
2407 }
2408
2409 // If we get down here, we're either in an autofocus mode
2410 // or in a continuous focus mode (and no other modes)
2411
2412 int oldAfState = mAfState;
2413 switch (mAfState) {
2414 case ANDROID_CONTROL_AF_STATE_INACTIVE:
2415 if (afTriggerStart) {
2416 switch (afMode) {
2417 case ANDROID_CONTROL_AF_MODE_AUTO:
2418 // fall-through
2419 case ANDROID_CONTROL_AF_MODE_MACRO:
2420 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2421 break;
2422 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2423 // fall-through
2424 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2425 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2426 break;
2427 }
2428 } else {
2429 // At least one frame stays in INACTIVE
2430 if (!afModeChanged) {
2431 switch (afMode) {
2432 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2433 // fall-through
2434 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2435 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
2436 break;
2437 }
2438 }
2439 }
2440 break;
2441 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2442 /**
2443 * When the AF trigger is activated, the algorithm should finish
2444 * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
2445 * or AF_NOT_FOCUSED as appropriate
2446 */
2447 if (afTriggerStart) {
2448 // Randomly transition to focused or not focused
2449 if (rand() % 3) {
2450 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2451 } else {
2452 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2453 }
2454 }
2455 /**
2456 * When the AF trigger is not involved, the AF algorithm should
2457 * start in INACTIVE state, and then transition into PASSIVE_SCAN
2458 * and PASSIVE_FOCUSED states
2459 */
2460 else if (!afTriggerCancel) {
2461 // Randomly transition to passive focus
2462 if (rand() % 3 == 0) {
2463 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
2464 }
2465 }
2466
2467 break;
2468 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2469 if (afTriggerStart) {
2470 // Randomly transition to focused or not focused
2471 if (rand() % 3) {
2472 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2473 } else {
2474 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2475 }
2476 }
2477 // TODO: initiate passive scan (PASSIVE_SCAN)
2478 break;
2479 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2480 // Simulate AF sweep completing instantaneously
2481
2482 // Randomly transition to focused or not focused
2483 if (rand() % 3) {
2484 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2485 } else {
2486 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2487 }
2488 break;
2489 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2490 if (afTriggerStart) {
2491 switch (afMode) {
2492 case ANDROID_CONTROL_AF_MODE_AUTO:
2493 // fall-through
2494 case ANDROID_CONTROL_AF_MODE_MACRO:
2495 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2496 break;
2497 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2498 // fall-through
2499 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2500 // continuous autofocus => trigger start has no effect
2501 break;
2502 }
2503 }
2504 break;
2505 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2506 if (afTriggerStart) {
2507 switch (afMode) {
2508 case ANDROID_CONTROL_AF_MODE_AUTO:
2509 // fall-through
2510 case ANDROID_CONTROL_AF_MODE_MACRO:
2511 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2512 break;
2513 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2514 // fall-through
2515 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2516 // continuous autofocus => trigger start has no effect
2517 break;
2518 }
2519 }
2520 break;
2521 default:
2522 ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
2523 }
2524
2525 {
2526 char afStateString[100] = {0,};
2527 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2528 oldAfState,
2529 afStateString,
2530 sizeof(afStateString));
2531
2532 char afNewStateString[100] = {0,};
2533 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2534 mAfState,
2535 afNewStateString,
2536 sizeof(afNewStateString));
2537 ALOGVV("%s: AF state transitioned from %s to %s",
2538 __FUNCTION__, afStateString, afNewStateString);
2539 }
2540
2541
2542 return OK;
2543}
2544
2545status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
2546 camera_metadata_entry e;
2547
2548 e = settings.find(ANDROID_CONTROL_AWB_MODE);
2549 if (e.count == 0) {
2550 ALOGE("%s: No AWB mode entry!", __FUNCTION__);
2551 return BAD_VALUE;
2552 }
2553 uint8_t awbMode = e.data.u8[0];
2554 //DBG_LOGB(" awbMode%d\n", awbMode);
2555
2556 // TODO: Add white balance simulation
2557
2558 switch (awbMode) {
2559 case ANDROID_CONTROL_AWB_MODE_OFF:
2560 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2561 return OK;
2562 case ANDROID_CONTROL_AWB_MODE_AUTO:
2563 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
2564 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
2565 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
2566 case ANDROID_CONTROL_AWB_MODE_SHADE:
2567 mAwbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; //add for cts
2568 return mSensor->setAWB(awbMode);
2569 // OK
2570 break;
2571 default:
2572 ALOGE("%s: Emulator doesn't support AWB mode %d",
2573 __FUNCTION__, awbMode);
2574 return BAD_VALUE;
2575 }
2576
2577 return OK;
2578}
2579
2580
2581void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
2582 if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) {
2583 settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
2584 &mAeCurrentExposureTime, 1);
2585 settings.update(ANDROID_SENSOR_SENSITIVITY,
2586 &mAeCurrentSensitivity, 1);
2587 }
2588
2589 settings.update(ANDROID_CONTROL_AE_STATE,
2590 &mAeState, 1);
2591 settings.update(ANDROID_CONTROL_AF_STATE,
2592 &mAfState, 1);
2593 settings.update(ANDROID_CONTROL_AWB_STATE,
2594 &mAwbState, 1);
2595 /**
2596 * TODO: Trigger IDs need a think-through
2597 */
2598 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,
2599 &mAfTriggerId, 1);
2600}
2601
2602void EmulatedFakeCamera3::signalReadoutIdle() {
2603 Mutex::Autolock l(mLock);
2604 CAMHAL_LOGDB("%s , E" , __FUNCTION__);
2605 // Need to chek isIdle again because waiting on mLock may have allowed
2606 // something to be placed in the in-flight queue.
2607 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
2608 ALOGV("Now idle");
2609 mStatus = STATUS_READY;
2610 }
2611 CAMHAL_LOGDB("%s , X , mStatus = %d " , __FUNCTION__, mStatus);
2612}
2613
2614void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
2615 nsecs_t timestamp) {
2616 switch(e) {
2617 case Sensor::SensorListener::EXPOSURE_START: {
2618 ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
2619 __FUNCTION__, frameNumber, timestamp);
2620 // Trigger shutter notify to framework
2621 camera3_notify_msg_t msg;
2622 msg.type = CAMERA3_MSG_SHUTTER;
2623 msg.message.shutter.frame_number = frameNumber;
2624 msg.message.shutter.timestamp = timestamp;
2625 sendNotify(&msg);
2626 break;
2627 }
2628 case Sensor::SensorListener::ERROR_CAMERA_DEVICE: {
2629 camera3_notify_msg_t msg;
2630 msg.type = CAMERA3_MSG_ERROR;
2631 msg.message.error.frame_number = frameNumber;
2632 msg.message.error.error_stream = NULL;
2633 msg.message.error.error_code = 1;
2634 sendNotify(&msg);
2635 break;
2636 }
2637 default:
2638 ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
2639 e, timestamp);
2640 break;
2641 }
2642}
2643
2644EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) :
2645 mParent(parent), mJpegWaiting(false) {
2646 mExitReadoutThread = false;
2647}
2648
2649EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
2650 for (List<Request>::iterator i = mInFlightQueue.begin();
2651 i != mInFlightQueue.end(); i++) {
2652 delete i->buffers;
2653 delete i->sensorBuffers;
2654 }
2655}
2656
2657void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
2658 Mutex::Autolock l(mLock);
2659
2660 mInFlightQueue.push_back(r);
2661 mInFlightSignal.signal();
2662}
2663
2664bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
2665 Mutex::Autolock l(mLock);
2666 return mInFlightQueue.empty() && !mThreadActive;
2667}
2668
2669status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
2670 status_t res;
2671 Mutex::Autolock l(mLock);
2672 CAMHAL_LOGDB("%s , E" , __FUNCTION__);
2673 int loopCount = 0;
2674 while (mInFlightQueue.size() >= kMaxQueueSize) {
2675 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2676 if (res != OK && res != TIMED_OUT) {
2677 ALOGE("%s: Error waiting for in-flight queue to shrink",
2678 __FUNCTION__);
2679 return INVALID_OPERATION;
2680 }
2681 if (loopCount == kMaxWaitLoops) {
2682 ALOGE("%s: Timed out waiting for in-flight queue to shrink",
2683 __FUNCTION__);
2684 return TIMED_OUT;
2685 }
2686 loopCount++;
2687 }
2688 return OK;
2689}
2690
2691status_t EmulatedFakeCamera3::ReadoutThread::setJpegCompressorListener(EmulatedFakeCamera3 *parent) {
2692 status_t res;
2693 res = mParent->mJpegCompressor->setlistener(this);
2694 if (res != NO_ERROR) {
2695 ALOGE("%s: set JpegCompressor Listner failed",__FUNCTION__);
2696 }
2697 return res;
2698}
2699
2700status_t EmulatedFakeCamera3::ReadoutThread::startJpegCompressor(EmulatedFakeCamera3 *parent) {
2701 status_t res;
2702 res = mParent->mJpegCompressor->start();
2703 if (res != NO_ERROR) {
2704 ALOGE("%s: JpegCompressor start failed",__FUNCTION__);
2705 }
2706 return res;
2707}
2708
2709status_t EmulatedFakeCamera3::ReadoutThread::shutdownJpegCompressor(EmulatedFakeCamera3 *parent) {
2710 status_t res;
2711 res = mParent->mJpegCompressor->cancel();
2712 if (res != OK) {
2713 ALOGE("%s: JpegCompressor cancel failed",__FUNCTION__);
2714 }
2715 return res;
2716}
2717
2718void EmulatedFakeCamera3::ReadoutThread::sendExitReadoutThreadSignal(void) {
2719 mExitReadoutThread = true;
2720 mInFlightSignal.signal();
2721}
2722
2723bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
2724 status_t res;
2725 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
2726
2727 // First wait for a request from the in-flight queue
2728 if (mExitReadoutThread) {
2729 return false;
2730 }
2731
2732 if (mCurrentRequest.settings.isEmpty()) {
2733 Mutex::Autolock l(mLock);
2734 if (mInFlightQueue.empty()) {
2735 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2736 if (res == TIMED_OUT) {
2737 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
2738 __FUNCTION__);
2739 return true;
2740 } else if (res != NO_ERROR) {
2741 ALOGE("%s: Error waiting for capture requests: %d",
2742 __FUNCTION__, res);
2743 return false;
2744 }
2745 }
2746
2747 if (mExitReadoutThread) {
2748 return false;
2749 }
2750
2751 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
2752 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
2753 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
2754 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
2755 mCurrentRequest.havethumbnail = mInFlightQueue.begin()->havethumbnail;
2756 mInFlightQueue.erase(mInFlightQueue.begin());
2757 mInFlightSignal.signal();
2758 mThreadActive = true;
2759 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
2760 mCurrentRequest.frameNumber);
2761 }
2762
2763 // Then wait for it to be delivered from the sensor
2764 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
2765 __FUNCTION__);
2766
2767 nsecs_t captureTime;
2768 status_t gotFrame =
2769 mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
2770 if (gotFrame == 0) {
2771 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
2772 __FUNCTION__);
2773 return true;
2774 }
2775
2776 if (gotFrame == -1) {
2777 DBG_LOGA("Sensor thread had exited , here should exit ReadoutThread Loop");
2778 return false;
2779 }
2780
2781 ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
2782 mCurrentRequest.frameNumber, captureTime);
2783
2784 // Check if we need to JPEG encode a buffer, and send it for async
2785 // compression if so. Otherwise prepare the buffer for return.
2786 bool needJpeg = false;
2787 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
2788 while (buf != mCurrentRequest.buffers->end()) {
2789 bool goodBuffer = true;
2790 if ( buf->stream->format ==
2791 HAL_PIXEL_FORMAT_BLOB) {
2792 Mutex::Autolock jl(mJpegLock);
2793 needJpeg = true;
2794 CaptureRequest currentcapture;
2795 currentcapture.frameNumber = mCurrentRequest.frameNumber;
2796 currentcapture.sensorBuffers = mCurrentRequest.sensorBuffers;
2797 currentcapture.buf = buf;
2798 currentcapture.mNeedThumbnail = mCurrentRequest.havethumbnail;
2799 mParent->mJpegCompressor->queueRequest(currentcapture);
2800 //this sensorBuffers delete in the jpegcompress;
2801 mCurrentRequest.sensorBuffers = NULL;
2802 buf = mCurrentRequest.buffers->erase(buf);
2803 continue;
2804 }
2805 GraphicBufferMapper::get().unlock(*(buf->buffer));
2806
2807 buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
2808 CAMERA3_BUFFER_STATUS_ERROR;
2809 buf->acquire_fence = -1;
2810 buf->release_fence = -1;
2811
2812 ++buf;
2813 } // end while
2814
2815 // Construct result for all completed buffers and results
2816
2817 camera3_capture_result result;
2818
2819 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
2820 &captureTime, 1);
2821
2822 memset(&result, 0, sizeof(result));
2823 result.frame_number = mCurrentRequest.frameNumber;
2824 result.result = mCurrentRequest.settings.getAndLock();
2825 result.num_output_buffers = mCurrentRequest.buffers->size();
2826 result.output_buffers = mCurrentRequest.buffers->array();
2827 result.partial_result = 1;
2828
2829 // Go idle if queue is empty, before sending result
2830
2831 bool signalIdle = false;
2832 {
2833 Mutex::Autolock l(mLock);
2834 if (mInFlightQueue.empty()) {
2835 mThreadActive = false;
2836 signalIdle = true;
2837 }
2838 }
2839
2840 if (signalIdle) mParent->signalReadoutIdle();
2841
2842 // Send it off to the framework
2843 ALOGVV("%s: ReadoutThread: Send result to framework",
2844 __FUNCTION__);
2845 mParent->sendCaptureResult(&result);
2846
2847 // Clean up
2848 mCurrentRequest.settings.unlock(result.result);
2849
2850 delete mCurrentRequest.buffers;
2851 mCurrentRequest.buffers = NULL;
2852 if (!needJpeg) {
2853 delete mCurrentRequest.sensorBuffers;
2854 mCurrentRequest.sensorBuffers = NULL;
2855 }
2856 mCurrentRequest.settings.clear();
2857 CAMHAL_LOGDB("%s , X " , __FUNCTION__);
2858 return true;
2859}
2860
2861void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
2862 const StreamBuffer &jpegBuffer, bool success , CaptureRequest &r) {
2863 Mutex::Autolock jl(mJpegLock);
2864 GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer));
2865
2866 mJpegHalBuffer = *(r.buf);
2867 mJpegHalBuffer.status = success ?
2868 CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2869 mJpegHalBuffer.acquire_fence = -1;
2870 mJpegHalBuffer.release_fence = -1;
2871 mJpegWaiting = false;
2872
2873 camera3_capture_result result;
2874 result.frame_number = r.frameNumber;
2875 result.result = NULL;
2876 result.num_output_buffers = 1;
2877 result.output_buffers = &mJpegHalBuffer;
2878 result.partial_result = 1;
2879
2880 if (!success) {
2881 ALOGE("%s: Compression failure, returning error state buffer to"
2882 " framework", __FUNCTION__);
2883 } else {
2884 DBG_LOGB("%s: Compression complete, returning buffer to framework",
2885 __FUNCTION__);
2886 }
2887
2888 mParent->sendCaptureResult(&result);
2889
2890}
2891
2892void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
2893 const StreamBuffer &inputBuffer) {
2894 // Should never get here, since the input buffer has to be returned
2895 // by end of processCaptureRequest
2896 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2897}
2898
2899
2900}; // namespace android
2901