summaryrefslogtreecommitdiff
path: root/v3/EmulatedFakeCamera3.cpp (plain)
blob: 79aafa1a6f2dd835872a08517d936d1fae7fe719
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/*
18 * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22#include <inttypes.h>
23
24#define LOG_NDEBUG 0
25//#define LOG_NNDEBUG 0
26#define LOG_TAG "EmulatedCamera_FakeCamera3"
27#include <utils/Log.h>
28
29#include "EmulatedFakeCamera3.h"
30#include "EmulatedCameraFactory.h"
31#include <ui/Fence.h>
32#include <ui/Rect.h>
33#include <ui/GraphicBufferMapper.h>
34#include <sys/types.h>
35
36#include <cutils/properties.h>
37#include "fake-pipeline2/Sensor.h"
38#include "fake-pipeline2/JpegCompressor.h"
39#include <cmath>
40#include <gralloc_priv.h>
41#include <binder/IPCThreadState.h>
42
43#if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
44#define ALOGVV ALOGV
45#else
46#define ALOGVV(...) ((void)0)
47#endif
48
49namespace android {
50
51/**
52 * Constants for camera capabilities
53 */
54
55const int64_t USEC = 1000LL;
56const int64_t MSEC = USEC * 1000LL;
57const int64_t SEC = MSEC * 1000LL;
58
59
60const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
61 //HAL_PIXEL_FORMAT_RAW_SENSOR,
62 HAL_PIXEL_FORMAT_BLOB,
63 //HAL_PIXEL_FORMAT_RGBA_8888,
64 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
65 // These are handled by YCbCr_420_888
66 HAL_PIXEL_FORMAT_YV12,
67 HAL_PIXEL_FORMAT_YCrCb_420_SP,
68 //HAL_PIXEL_FORMAT_YCbCr_422_I,
69 HAL_PIXEL_FORMAT_YCbCr_420_888
70};
71
72const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = {
73 640, 480
74 // Sensor::kResolution[0], Sensor::kResolution[1]
75};
76
77const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = {
78 (const uint64_t)Sensor::kFrameDurationRange[0]
79};
80
81const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[6] = {
82 640, 480, 320, 240,// 1280, 720
83 // Sensor::kResolution[0], Sensor::kResolution[1]
84};
85
86const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = {
87 640, 480, 320, 240
88 // Sensor::kResolution[0], Sensor::kResolution[1]
89};
90
91const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = {
92 (const uint64_t)Sensor::kFrameDurationRange[0]
93};
94
95const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = {
96 1280,720
97 // Sensor::kResolution[0], Sensor::kResolution[1]
98};
99
100const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = {
101 640, 480
102 // Sensor::kResolution[0], Sensor::kResolution[1]
103};
104
105
106const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = {
107 (const uint64_t)Sensor::kFrameDurationRange[0]
108};
109
110/**
111 * 3A constants
112 */
113
114// Default exposure and gain targets for different scenarios
115const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC;
116const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
117const int EmulatedFakeCamera3::kNormalSensitivity = 100;
118const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400;
119const float EmulatedFakeCamera3::kExposureTrackRate = 0.1;
120const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10;
121const int EmulatedFakeCamera3::kStableAeMaxFrames = 100;
122const float EmulatedFakeCamera3::kExposureWanderMin = -2;
123const float EmulatedFakeCamera3::kExposureWanderMax = 1;
124
125/**
126 * Camera device lifecycle methods
127 */
128static const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(camera3_jpeg_blob);
129jpegsize EmulatedFakeCamera3::getMaxJpegResolution(uint32_t picSizes[],int count) {
130 uint32_t maxJpegWidth = 0, maxJpegHeight = 0;
131 jpegsize maxJpegResolution;
132 for (int i=0; i < count; i+= 4) {
133 uint32_t width = picSizes[i+1];
134 uint32_t height = picSizes[i+2];
135 if (picSizes[i+0] == HAL_PIXEL_FORMAT_BLOB &&
136 (width * height > maxJpegWidth * maxJpegHeight)) {
137 maxJpegWidth = width;
138 maxJpegHeight = height;
139 }
140 }
141 maxJpegResolution.width = maxJpegWidth;
142 maxJpegResolution.height = maxJpegHeight;
143 return maxJpegResolution;
144}
145ssize_t EmulatedFakeCamera3::getJpegBufferSize(int width, int height) {
146 if (maxJpegResolution.width == 0) {
147 return BAD_VALUE;
148 }
149 ssize_t maxJpegBufferSize = JpegCompressor::kMaxJpegSize;
150
151 // Calculate final jpeg buffer size for the given resolution.
152 float scaleFactor = ((float) (width * height)) /
153 (maxJpegResolution.width * maxJpegResolution.height);
154 ssize_t jpegBufferSize = scaleFactor * maxJpegBufferSize;
155 // Bound the buffer size to [MIN_JPEG_BUFFER_SIZE, maxJpegBufferSize].
156 if (jpegBufferSize > maxJpegBufferSize) {
157 jpegBufferSize = maxJpegBufferSize;
158 } else if (jpegBufferSize < kMinJpegBufferSize) {
159 jpegBufferSize = kMinJpegBufferSize;
160 }
161 return jpegBufferSize;
162}
163
164EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, struct hw_module_t* module) :
165 EmulatedCamera3(cameraId, module) {
166 ALOGI("Constructing emulated fake camera 3 cameraID:%d", mCameraID);
167
168 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
169 mDefaultTemplates[i] = NULL;
170 }
171
172 /**
173 * Front cameras = limited mode
174 * Back cameras = full mode
175 */
176 //TODO limited or full mode, read this from camera driver
177 //mFullMode = facingBack;
178 mCameraStatus = CAMERA_INIT;
179 mSupportCap = 0;
180 mSupportRotate = 0;
181 mFullMode = 0;
182 mFlushTag = false;
183
184 gLoadXml.parseXMLFile();
185}
186
187EmulatedFakeCamera3::~EmulatedFakeCamera3() {
188 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
189 if (mDefaultTemplates[i] != NULL) {
190 free_camera_metadata(mDefaultTemplates[i]);
191 }
192 }
193
194 if (mCameraInfo != NULL) {
195 CAMHAL_LOGIA("free mCameraInfo");
196 free_camera_metadata(mCameraInfo);
197 mCameraInfo = NULL;
198 }
199}
200
201status_t EmulatedFakeCamera3::Initialize() {
202 DBG_LOGB("mCameraID=%d,mStatus=%d,ddd\n", mCameraID, mStatus);
203 status_t res;
204
205#ifdef HAVE_VERSION_INFO
206 CAMHAL_LOGIB("\n--------------------------------\n"
207 "author:aml.sh multi-media team\n"
208 "branch name: %s\n"
209 "git version: %s \n"
210 "last changed: %s\n"
211 "build-time: %s\n"
212 "build-name: %s\n"
213 "uncommitted-file-num:%d\n"
214 "ssh user@%s, cd %s\n"
215 "hostname %s\n"
216 "--------------------------------\n",
217 CAMHAL_BRANCH_NAME,
218 CAMHAL_GIT_VERSION,
219 CAMHAL_LAST_CHANGED,
220 CAMHAL_BUILD_TIME,
221 CAMHAL_BUILD_NAME,
222 CAMHAL_GIT_UNCOMMIT_FILE_NUM,
223 CAMHAL_IP, CAMHAL_PATH, CAMHAL_HOSTNAME
224 );
225#endif
226
227
228 if (mStatus != STATUS_ERROR) {
229 ALOGE("%s: Already initialized!", __FUNCTION__);
230 return INVALID_OPERATION;
231 }
232
233 res = constructStaticInfo();
234 if (res != OK) {
235 ALOGE("%s: Unable to allocate static info: %s (%d)",
236 __FUNCTION__, strerror(-res), res);
237 return res;
238 }
239
240 return EmulatedCamera3::Initialize();
241}
242
243status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) {
244 ALOGV("%s: E", __FUNCTION__);
245 DBG_LOGB("%s, ddd", __FUNCTION__);
246 Mutex::Autolock l(mLock);
247 status_t res;
248 DBG_LOGB("%s , mStatus = %d" , __FUNCTION__, mStatus);
249
250 if ((mStatus != STATUS_CLOSED) || !mPlugged) {
251 ALOGE("%s: Can't connect in state %d, mPlugged=%d",
252 __FUNCTION__, mStatus, mPlugged);
253 return INVALID_OPERATION;
254 }
255
256 mSensor = new Sensor();
257 mSensor->setSensorListener(this);
258
259 res = mSensor->startUp(mCameraID);
260 DBG_LOGB("mSensor startUp, mCameraID=%d\n", mCameraID);
261 if (res != NO_ERROR) return res;
262
263 mSupportCap = mSensor->IoctlStateProbe();
264 if (mSupportCap & IOCTL_MASK_ROTATE) {
265 mSupportRotate = true;
266 }
267
268 mReadoutThread = new ReadoutThread(this);
269 mJpegCompressor = new JpegCompressor();
270
271 res = mReadoutThread->setJpegCompressorListener(this);
272 if (res != NO_ERROR) {
273 return res;
274 }
275 res = mReadoutThread->startJpegCompressor(this);
276 if (res != NO_ERROR) {
277 return res;
278 }
279
280 res = mReadoutThread->run("EmuCam3::readoutThread");
281 if (res != NO_ERROR) return res;
282
283 // Initialize fake 3A
284
285 mControlMode = ANDROID_CONTROL_MODE_AUTO;
286 mFacePriority = false;
287 mAeMode = ANDROID_CONTROL_AE_MODE_ON;
288 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
289 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
290 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;//ANDROID_CONTROL_AE_STATE_INACTIVE;
291 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
292 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
293 mAfTriggerId = 0;
294 mAeCurrentExposureTime = kNormalExposureTime;
295 mAeCurrentSensitivity = kNormalSensitivity;
296
297 return EmulatedCamera3::connectCamera(device);
298}
299
300status_t EmulatedFakeCamera3::plugCamera() {
301 {
302 Mutex::Autolock l(mLock);
303
304 if (!mPlugged) {
305 CAMHAL_LOGIB("%s: Plugged back in", __FUNCTION__);
306 mPlugged = true;
307 }
308 }
309
310 return NO_ERROR;
311}
312
313status_t EmulatedFakeCamera3::unplugCamera() {
314 {
315 Mutex::Autolock l(mLock);
316
317 if (mPlugged) {
318 CAMHAL_LOGIB("%s: Unplugged camera", __FUNCTION__);
319 mPlugged = false;
320 }
321 }
322 return true;
323}
324
325camera_device_status_t EmulatedFakeCamera3::getHotplugStatus() {
326 Mutex::Autolock l(mLock);
327 return mPlugged ?
328 CAMERA_DEVICE_STATUS_PRESENT :
329 CAMERA_DEVICE_STATUS_NOT_PRESENT;
330}
331
332bool EmulatedFakeCamera3::getCameraStatus()
333{
334 CAMHAL_LOGVB("%s, mCameraStatus = %d",__FUNCTION__,mCameraStatus);
335 bool ret = false;
336 if (mStatus == STATUS_CLOSED) {
337 ret = true;
338 } else {
339 ret = false;
340 }
341 return ret;
342}
343
344status_t EmulatedFakeCamera3::closeCamera() {
345 DBG_LOGB("%s, %d\n", __FUNCTION__, __LINE__);
346 status_t res;
347 {
348 Mutex::Autolock l(mLock);
349 if (mStatus == STATUS_CLOSED) return OK;
350 }
351
352 CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__);
353 mReadoutThread->sendFlushSingnal();
354 mSensor->sendExitSingalToSensor();
355 res = mSensor->shutDown();
356 if (res != NO_ERROR) {
357 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
358 return res;
359 }
360 mSensor.clear();
361 CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__);
362
363 {
364 Mutex::Autolock l(mLock);
365 res = mReadoutThread->shutdownJpegCompressor(this);
366 if (res != OK) {
367 ALOGE("%s: Unable to shut down JpegCompressor: %d", __FUNCTION__, res);
368 return res;
369 }
370 mReadoutThread->sendExitReadoutThreadSignal();
371 mReadoutThread->requestExit();
372 }
373 CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__);
374
375 mReadoutThread->join();
376 DBG_LOGA("Sucess exit ReadOutThread");
377 {
378 Mutex::Autolock l(mLock);
379 // Clear out private stream information
380 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
381 PrivateStreamInfo *privStream =
382 static_cast<PrivateStreamInfo*>((*s)->priv);
383 delete privStream;
384 (*s)->priv = NULL;
385 }
386 mStreams.clear();
387 mReadoutThread.clear();
388 }
389 CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__);
390 return EmulatedCamera3::closeCamera();
391}
392
393status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
394 char property[PROPERTY_VALUE_MAX];
395 char* tempApkName = gLoadXml.getApkPackageName(IPCThreadState::self()->getCallingPid());
396 List_Or * temp=new List_Or();
397 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
398 if (mSensorType == SENSOR_USB) {
399 if (mFacingBack) {
400 property_get("hw.camera.orientation.back", property, "0");
401 } else {
402 property_get("hw.camera.orientation.front", property, "0");
403 }
404 int32_t orientation = atoi(property);
405
406 if (gLoadXml.findApkCp(tempApkName, temp)) {
407 orientation = atoi(temp->pro);
408 }
409 if (temp != NULL) {
410 delete temp;
411 temp = NULL;
412 }
413
414 property_get("hw.camera.usb.orientation_offset", property, "0");
415 orientation += atoi(property);
416 orientation %= 360;
417 info->orientation = orientation ;
418 } else {
419 if (mFacingBack) {
420 property_get("hw.camera.orientation.back", property, "270");
421 } else {
422 property_get("hw.camera.orientation.front", property, "90");
423 }
424 info->orientation = atoi(property);
425 }
426 return EmulatedCamera3::getCameraInfo(info);
427}
428
429/**
430 * Camera3 interface methods
431 */
432
433void EmulatedFakeCamera3::getValidJpegSize(uint32_t picSizes[], uint32_t availablejpegsize[], int count) {
434 int i,j,k;
435 bool valid = true;
436 for (i=0,j=0; i < count; i+= 4) {
437 for (k= 0; k<=j ;k+=2) {
438 if ((availablejpegsize[k]*availablejpegsize[k+1]) == (picSizes[i+1]*picSizes[i+2])) {
439
440 valid = false;
441 }
442 }
443 if (valid) {
444 availablejpegsize[j] = picSizes[i+1];
445 availablejpegsize[j+1] = picSizes[i+2];
446 j+=2;
447 }
448 valid = true;
449 }
450}
451
452status_t EmulatedFakeCamera3::checkValidJpegSize(uint32_t width, uint32_t height) {
453
454 int validsizecount = 0;
455 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
456 for (uint32_t f = 0; f < count; f+=2) {
457 if (mAvailableJpegSize[f] != 0) {
458 if ((mAvailableJpegSize[f] == width)&&(mAvailableJpegSize[f+1] == height)) {
459 validsizecount++;
460 }
461 } else {
462 break;
463 }
464 }
465 if (validsizecount == 0)
466 return BAD_VALUE;
467 return OK;
468}
469
470status_t EmulatedFakeCamera3::configureStreams(
471 camera3_stream_configuration *streamList) {
472 Mutex::Autolock l(mLock);
473 uint32_t width, height, pixelfmt;
474 bool isRestart = false;
475 mFlushTag = false;
476 DBG_LOGB("%s: %d streams", __FUNCTION__, streamList->num_streams);
477
478 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
479 ALOGE("%s: Cannot configure streams in state %d",
480 __FUNCTION__, mStatus);
481 return NO_INIT;
482 }
483
484 /**
485 * Sanity-check input list.
486 */
487 if (streamList == NULL) {
488 ALOGE("%s: NULL stream configuration", __FUNCTION__);
489 return BAD_VALUE;
490 }
491
492 if (streamList->streams == NULL) {
493 ALOGE("%s: NULL stream list", __FUNCTION__);
494 return BAD_VALUE;
495 }
496
497 if (streamList->num_streams < 1) {
498 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
499 streamList->num_streams);
500 return BAD_VALUE;
501 }
502
503 camera3_stream_t *inputStream = NULL;
504 for (size_t i = 0; i < streamList->num_streams; i++) {
505 camera3_stream_t *newStream = streamList->streams[i];
506
507 if (newStream == NULL) {
508 ALOGE("%s: Stream index %zu was NULL",
509 __FUNCTION__, i);
510 return BAD_VALUE;
511 }
512
513 if (newStream->max_buffers <= 0) {
514 isRestart = true;//mSensor->isNeedRestart(newStream->width, newStream->height, newStream->format);
515 DBG_LOGB("format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d, isRestart=%d\n",
516 newStream->format, newStream->width, newStream->height,
517 newStream->stream_type, newStream->max_buffers,
518 isRestart);
519 }
520 ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
521 __FUNCTION__, newStream, i, newStream->stream_type,
522 newStream->usage,
523 newStream->format);
524
525 if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
526 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
527 if (inputStream != NULL) {
528
529 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
530 return BAD_VALUE;
531 }
532 inputStream = newStream;
533 }
534
535 bool validFormat = false;
536 for (size_t f = 0;
537 f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]);
538 f++) {
539 if (newStream->format == kAvailableFormats[f]) {
540 validFormat = true;
541 //HAL_PIXEL_FORMAT_YCrCb_420_SP,
542 if (HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format)
543 newStream->format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
544
545 break;
546 }
547 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
548 }
549 if (!validFormat) {
550 ALOGE("%s: Unsupported stream format 0x%x requested",
551 __FUNCTION__, newStream->format);
552 return BAD_VALUE;
553 }
554
555 status_t ret = checkValidJpegSize(newStream->width, newStream->height);
556 if (ret != OK) {
557 return BAD_VALUE;
558 }
559
560 }
561 mInputStream = inputStream;
562 width = 0;
563 height = 0;
564 for (size_t i = 0; i < streamList->num_streams; i++) {
565 camera3_stream_t *newStream = streamList->streams[i];
566 DBG_LOGB("find propert width and height, format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d\n",
567 newStream->format, newStream->width, newStream->height, newStream->stream_type, newStream->max_buffers);
568 if ((HAL_PIXEL_FORMAT_BLOB != newStream->format) &&
569 (CAMERA3_STREAM_OUTPUT == newStream->stream_type)) {
570
571 if (width < newStream->width)
572 width = newStream->width;
573
574 if (height < newStream->height)
575 height = newStream->height;
576
577 pixelfmt = (uint32_t)newStream->format;
578 if (HAL_PIXEL_FORMAT_YCbCr_420_888 == pixelfmt)
579 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
580 }
581
582 }
583
584 //TODO modify this ugly code
585 if (isRestart) {
586 isRestart = mSensor->isNeedRestart(width, height, pixelfmt);
587 }
588
589 if (isRestart) {
590 mSensor->streamOff();
591 pixelfmt = mSensor->halFormatToSensorFormat(pixelfmt);
592 mSensor->setOutputFormat(width, height, pixelfmt, 0);
593 mSensor->streamOn();
594 DBG_LOGB("width=%d, height=%d, pixelfmt=%.4s\n",
595 width, height, (char*)&pixelfmt);
596 }
597
598 /**
599 * Initially mark all existing streams as not alive
600 */
601 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
602 PrivateStreamInfo *privStream =
603 static_cast<PrivateStreamInfo*>((*s)->priv);
604 privStream->alive = false;
605 }
606
607 /**
608 * Find new streams and mark still-alive ones
609 */
610 for (size_t i = 0; i < streamList->num_streams; i++) {
611 camera3_stream_t *newStream = streamList->streams[i];
612 if (newStream->priv == NULL) {
613 // New stream, construct info
614 PrivateStreamInfo *privStream = new PrivateStreamInfo();
615 privStream->alive = true;
616 privStream->registered = false;
617
618 newStream->usage =
619 mSensor->getStreamUsage(newStream->stream_type);
620
621 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
622 newStream->max_buffers = kMaxBufferCount;
623 newStream->priv = privStream;
624 mStreams.push_back(newStream);
625 } else {
626 // Existing stream, mark as still alive.
627 PrivateStreamInfo *privStream =
628 static_cast<PrivateStreamInfo*>(newStream->priv);
629 CAMHAL_LOGDA("Existing stream ?");
630 privStream->alive = true;
631 }
632 DBG_LOGB("%d, newStream=%p, stream_type=%d, usage=%x, priv=%p, w*h=%dx%d\n",
633 i, newStream, newStream->stream_type, newStream->usage, newStream->priv, newStream->width, newStream->height);
634 }
635
636 /**
637 * Reap the dead streams
638 */
639 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
640 PrivateStreamInfo *privStream =
641 static_cast<PrivateStreamInfo*>((*s)->priv);
642 if (!privStream->alive) {
643 DBG_LOGA("delete not alive streams");
644 (*s)->priv = NULL;
645 delete privStream;
646 s = mStreams.erase(s);
647 } else {
648 ++s;
649 }
650 }
651
652 /**
653 * Can't reuse settings across configure call
654 */
655 mPrevSettings.clear();
656
657 return OK;
658}
659
660status_t EmulatedFakeCamera3::registerStreamBuffers(
661 const camera3_stream_buffer_set *bufferSet) {
662 DBG_LOGB("%s: E", __FUNCTION__);
663 Mutex::Autolock l(mLock);
664
665 /**
666 * Sanity checks
667 */
668 DBG_LOGA("==========sanity checks\n");
669
670 // OK: register streams at any time during configure
671 // (but only once per stream)
672 if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) {
673 ALOGE("%s: Cannot register buffers in state %d",
674 __FUNCTION__, mStatus);
675 return NO_INIT;
676 }
677
678 if (bufferSet == NULL) {
679 ALOGE("%s: NULL buffer set!", __FUNCTION__);
680 return BAD_VALUE;
681 }
682
683 StreamIterator s = mStreams.begin();
684 for (; s != mStreams.end(); ++s) {
685 if (bufferSet->stream == *s) break;
686 }
687 if (s == mStreams.end()) {
688 ALOGE("%s: Trying to register buffers for a non-configured stream!",
689 __FUNCTION__);
690 return BAD_VALUE;
691 }
692
693 /**
694 * Register the buffers. This doesn't mean anything to the emulator besides
695 * marking them off as registered.
696 */
697
698 PrivateStreamInfo *privStream =
699 static_cast<PrivateStreamInfo*>((*s)->priv);
700
701#if 0
702 if (privStream->registered) {
703 ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__);
704 return BAD_VALUE;
705 }
706#endif
707
708 privStream->registered = true;
709
710 return OK;
711}
712
713const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings(
714 int type) {
715 DBG_LOGB("%s: E", __FUNCTION__);
716 Mutex::Autolock l(mLock);
717
718 if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
719 ALOGE("%s: Unknown request settings template: %d",
720 __FUNCTION__, type);
721 return NULL;
722 }
723
724 /**
725 * Cache is not just an optimization - pointer returned has to live at
726 * least as long as the camera device instance does.
727 */
728 if (mDefaultTemplates[type] != NULL) {
729 return mDefaultTemplates[type];
730 }
731
732 CameraMetadata settings;
733
734 /** android.request */
735 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
736 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
737
738 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
739 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
740
741 static const int32_t id = 0;
742 settings.update(ANDROID_REQUEST_ID, &id, 1);
743
744 static const int32_t frameCount = 0;
745 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
746
747 /** android.lens */
748
749 static const float focusDistance = 0;
750 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
751
752 static const float aperture = 2.8f;
753 settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
754
755// static const float focalLength = 5.0f;
756 static const float focalLength = 3.299999952316284f;
757 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
758
759 static const float filterDensity = 0;
760 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
761
762 static const uint8_t opticalStabilizationMode =
763 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
764 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
765 &opticalStabilizationMode, 1);
766
767 // FOCUS_RANGE set only in frame
768
769 /** android.sensor */
770
771 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
772 settings.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
773 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
774 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
775 static const int64_t exposureTime = 10 * MSEC;
776 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
777
778 int64_t frameDuration = mSensor->getMinFrameDuration();
779 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
780
781 static const int32_t sensitivity = 100;
782 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
783
784 static const int64_t rollingShutterSkew = 0;
785 settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
786 // TIMESTAMP set only in frame
787
788 /** android.flash */
789
790 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
791 settings.update(ANDROID_FLASH_STATE, &flashstate, 1);
792
793 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
794 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
795
796 static const uint8_t flashPower = 10;
797 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
798
799 static const int64_t firingTime = 0;
800 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
801
802 /** Processing block modes */
803 uint8_t hotPixelMode = 0;
804 uint8_t demosaicMode = 0;
805 uint8_t noiseMode = 0;
806 uint8_t shadingMode = 0;
807 uint8_t colorMode = 0;
808 uint8_t tonemapMode = 0;
809 uint8_t edgeMode = 0;
810 switch (type) {
811
812 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
813 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
814 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
815 // fall-through
816 case CAMERA3_TEMPLATE_STILL_CAPTURE:
817 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
818 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
819 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
820 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
821 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
822 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
823 break;
824 case CAMERA3_TEMPLATE_PREVIEW:
825 // fall-through
826 case CAMERA3_TEMPLATE_VIDEO_RECORD:
827 // fall-through
828 case CAMERA3_TEMPLATE_MANUAL:
829 // fall-through
830 default:
831 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
832 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
833 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
834 shadingMode = ANDROID_SHADING_MODE_FAST;
835 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
836 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
837 edgeMode = ANDROID_EDGE_MODE_FAST;
838 break;
839 }
840 settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
841 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
842 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
843 settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
844 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
845 settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
846 settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
847
848 /** android.noise */
849 static const uint8_t noiseStrength = 5;
850 settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
851 static uint8_t availableNBModes[] = {
852 ANDROID_NOISE_REDUCTION_MODE_OFF,
853 ANDROID_NOISE_REDUCTION_MODE_FAST,
854 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
855 };
856 settings.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
857 availableNBModes, sizeof(availableNBModes)/sizeof(availableNBModes));
858
859
860 /** android.color */
861 static const float colorTransform[9] = {
862 1.0f, 0.f, 0.f,
863 0.f, 1.f, 0.f,
864 0.f, 0.f, 1.f
865 };
866 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
867
868 /** android.tonemap */
869 static const float tonemapCurve[4] = {
870 0.f, 0.f,
871 1.f, 1.f
872 };
873 settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
874 settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
875 settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
876
877 /** android.edge */
878 static const uint8_t edgeStrength = 5;
879 settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
880
881 /** android.scaler */
882 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
883 settings.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
884
885 static const int32_t cropRegion[] = {
886 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
887 };
888 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
889
890 /** android.jpeg */
891 static const uint8_t jpegQuality = 80;
892 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
893
894 static const int32_t thumbnailSize[2] = {
895 160, 120
896 };
897 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
898
899 static const uint8_t thumbnailQuality = 80;
900 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
901
902 static const double gpsCoordinates[3] = {
903 0, 0, 0
904 };
905 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3); //default 2 value
906
907 static const uint8_t gpsProcessingMethod[32] = "None";
908 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
909
910 static const int64_t gpsTimestamp = 0;
911 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
912
913 static const int32_t jpegOrientation = 0;
914 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
915
916 /** android.stats */
917
918 static const uint8_t faceDetectMode =
919 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
920 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
921
922 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
923 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
924
925 static const uint8_t sharpnessMapMode =
926 ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
927 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
928
929 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
930 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
931 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
932 settings.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
933 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
934 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
935 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
936 // sharpnessMap only in frames
937
938 /** android.control */
939
940 uint8_t controlIntent = 0;
941 uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; //default value
942 uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
943 uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
944 switch (type) {
945 case CAMERA3_TEMPLATE_PREVIEW:
946 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
947 break;
948 case CAMERA3_TEMPLATE_STILL_CAPTURE:
949 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
950 break;
951 case CAMERA3_TEMPLATE_VIDEO_RECORD:
952 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
953 break;
954 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
955 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
956 break;
957 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
958 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
959 break;
960 case CAMERA3_TEMPLATE_MANUAL:
961 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
962 controlMode = ANDROID_CONTROL_MODE_OFF;
963 aeMode = ANDROID_CONTROL_AE_MODE_OFF;
964 awbMode = ANDROID_CONTROL_AWB_MODE_OFF;
965 break;
966 default:
967 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
968 break;
969 }
970 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
971 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
972
973 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
974 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
975
976 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
977 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
978
979 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
980
981 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
982 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
983
984 static const uint8_t aePrecaptureTrigger =
985 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
986 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
987
988 static const int32_t mAfTriggerId = 0;
989 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,&mAfTriggerId, 1);
990 static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
991 settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
992
993 static const int32_t controlRegions[5] = {
994 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
995 1000
996 };
997// settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
998
999 static const int32_t aeExpCompensation = 0;
1000 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
1001
1002 static const int32_t aeTargetFpsRange[2] = {
1003 30, 30
1004 };
1005 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
1006
1007 static const uint8_t aeAntibandingMode =
1008 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
1009 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
1010
1011 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
1012
1013 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
1014 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
1015
1016// settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
1017
1018 uint8_t afMode = 0;
1019 switch (type) {
1020 case CAMERA3_TEMPLATE_PREVIEW:
1021 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1022 break;
1023 case CAMERA3_TEMPLATE_STILL_CAPTURE:
1024 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1025 break;
1026 case CAMERA3_TEMPLATE_VIDEO_RECORD:
1027 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1028 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
1029 break;
1030 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
1031 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1032 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
1033 break;
1034 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
1035 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1036 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
1037 break;
1038 case CAMERA3_TEMPLATE_MANUAL:
1039 afMode = ANDROID_CONTROL_AF_MODE_OFF;
1040 break;
1041 default:
1042 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1043 break;
1044 }
1045 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
1046
1047 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
1048 settings.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
1049
1050// settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
1051
1052 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
1053 settings.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
1054 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1055 settings.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
1056 static const uint8_t vstabMode =
1057 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
1058 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
1059
1060 // aeState, awbState, afState only in frame
1061
1062 mDefaultTemplates[type] = settings.release();
1063
1064 return mDefaultTemplates[type];
1065}
1066
1067status_t EmulatedFakeCamera3::processCaptureRequest(
1068 camera3_capture_request *request) {
1069 status_t res;
1070 nsecs_t exposureTime;
1071 nsecs_t frameDuration;
1072 uint32_t sensitivity;
1073 uint32_t frameNumber;
1074 bool mHaveThumbnail = false;
1075 CameraMetadata settings;
1076 Buffers *sensorBuffers = NULL;
1077 HalBufferVector *buffers = NULL;
1078
1079 if (mFlushTag) {
1080 DBG_LOGA("already flush, but still send Capture Request .\n");
1081 }
1082
1083 {
1084 Mutex::Autolock l(mLock);
1085
1086 /** Validation */
1087
1088 if (mStatus < STATUS_READY) {
1089 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
1090 mStatus);
1091 return INVALID_OPERATION;
1092 }
1093
1094 if (request == NULL) {
1095 ALOGE("%s: NULL request!", __FUNCTION__);
1096 return BAD_VALUE;
1097 }
1098
1099 frameNumber = request->frame_number;
1100
1101 if (request->settings == NULL && mPrevSettings.isEmpty()) {
1102 ALOGE("%s: Request %d: NULL settings for first request after"
1103 "configureStreams()", __FUNCTION__, frameNumber);
1104 return BAD_VALUE;
1105 }
1106
1107 if (request->input_buffer != NULL &&
1108 request->input_buffer->stream != mInputStream) {
1109 DBG_LOGB("%s: Request %d: Input buffer not from input stream!",
1110 __FUNCTION__, frameNumber);
1111 DBG_LOGB("%s: Bad stream %p, expected: %p",
1112 __FUNCTION__, request->input_buffer->stream,
1113 mInputStream);
1114 DBG_LOGB("%s: Bad stream type %d, expected stream type %d",
1115 __FUNCTION__, request->input_buffer->stream->stream_type,
1116 mInputStream ? mInputStream->stream_type : -1);
1117
1118 return BAD_VALUE;
1119 }
1120
1121 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1122 ALOGE("%s: Request %d: No output buffers provided!",
1123 __FUNCTION__, frameNumber);
1124 return BAD_VALUE;
1125 }
1126
1127 // Validate all buffers, starting with input buffer if it's given
1128
1129 ssize_t idx;
1130 const camera3_stream_buffer_t *b;
1131 if (request->input_buffer != NULL) {
1132 idx = -1;
1133 b = request->input_buffer;
1134 } else {
1135 idx = 0;
1136 b = request->output_buffers;
1137 }
1138 do {
1139 PrivateStreamInfo *priv =
1140 static_cast<PrivateStreamInfo*>(b->stream->priv);
1141 if (priv == NULL) {
1142 ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
1143 __FUNCTION__, frameNumber, idx);
1144 return BAD_VALUE;
1145 }
1146#if 0
1147 if (!priv->alive || !priv->registered) {
1148 ALOGE("%s: Request %d: Buffer %zu: Unregistered or dead stream! alive=%d, registered=%d\n",
1149 __FUNCTION__, frameNumber, idx,
1150 priv->alive, priv->registered);
1151 //return BAD_VALUE;
1152 }
1153#endif
1154 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1155 ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
1156 __FUNCTION__, frameNumber, idx);
1157 return BAD_VALUE;
1158 }
1159 if (b->release_fence != -1) {
1160 ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
1161 __FUNCTION__, frameNumber, idx);
1162 return BAD_VALUE;
1163 }
1164 if (b->buffer == NULL) {
1165 ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
1166 __FUNCTION__, frameNumber, idx);
1167 return BAD_VALUE;
1168 }
1169 idx++;
1170 b = &(request->output_buffers[idx]);
1171 } while (idx < (ssize_t)request->num_output_buffers);
1172
1173 // TODO: Validate settings parameters
1174
1175 /**
1176 * Start processing this request
1177 */
1178 mStatus = STATUS_ACTIVE;
1179
1180 camera_metadata_entry e;
1181
1182 if (request->settings == NULL) {
1183 settings.acquire(mPrevSettings);
1184 } else {
1185 settings = request->settings;
1186
1187 uint8_t antiBanding = 0;
1188 uint8_t effectMode = 0;
1189 int exposureCmp = 0;
1190
1191 e = settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE);
1192 if (e.count == 0) {
1193 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1194 return BAD_VALUE;
1195 }
1196 antiBanding = e.data.u8[0];
1197 mSensor->setAntiBanding(antiBanding);
1198
1199 e = settings.find(ANDROID_CONTROL_EFFECT_MODE);
1200 if (e.count == 0) {
1201 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1202 return BAD_VALUE;
1203 }
1204 effectMode = e.data.u8[0];
1205 mSensor->setEffect(effectMode);
1206
1207 e = settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION);
1208 if (e.count == 0) {
1209 ALOGE("%s: No exposure entry!", __FUNCTION__);
1210 //return BAD_VALUE;
1211 } else {
1212 exposureCmp = e.data.i32[0];
1213 DBG_LOGB("set expsore compensaton %d\n", exposureCmp);
1214 mSensor->setExposure(exposureCmp);
1215 }
1216
1217 int32_t cropRegion[4];
1218 int32_t cropWidth;
1219 int32_t outputWidth = request->output_buffers[0].stream->width;
1220
1221 e = settings.find(ANDROID_SCALER_CROP_REGION);
1222 if (e.count == 0) {
1223 ALOGE("%s: No corp region entry!", __FUNCTION__);
1224 //return BAD_VALUE;
1225 } else {
1226 cropRegion[0] = e.data.i32[0];
1227 cropRegion[1] = e.data.i32[1];
1228 cropWidth = cropRegion[2] = e.data.i32[2];
1229 cropRegion[3] = e.data.i32[3];
1230 for (int i = mZoomMin; i <= mZoomMax; i += mZoomStep) {
1231 //if ( (float) i / mZoomMin >= (float) outputWidth / cropWidth) {
1232 if ( i * cropWidth >= outputWidth * mZoomMin ) {
1233 mSensor->setZoom(i);
1234 break;
1235 }
1236 }
1237 DBG_LOGB("cropRegion:%d, %d, %d, %d\n", cropRegion[0], cropRegion[1],cropRegion[2],cropRegion[3]);
1238 }
1239 }
1240
1241 uint8_t len[] = {1};
1242 settings.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1);
1243
1244 uint8_t maxlen[] = {0};
1245 settings.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1);
1246
1247 res = process3A(settings);
1248 if (res != OK) {
1249 ALOGVV("%s: process3A failed!", __FUNCTION__);
1250 //return res;
1251 }
1252
1253 // TODO: Handle reprocessing
1254
1255 /**
1256 * Get ready for sensor config
1257 */
1258
1259 bool needJpeg = false;
1260 ssize_t jpegbuffersize;
1261 uint32_t jpegpixelfmt;
1262
1263 exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
1264 frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
1265 sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
1266
1267 sensorBuffers = new Buffers();
1268 buffers = new HalBufferVector();
1269
1270 sensorBuffers->setCapacity(request->num_output_buffers);
1271 buffers->setCapacity(request->num_output_buffers);
1272
1273 // Process all the buffers we got for output, constructing internal buffer
1274 // structures for them, and lock them for writing.
1275 for (size_t i = 0; i < request->num_output_buffers; i++) {
1276 const camera3_stream_buffer &srcBuf = request->output_buffers[i];
1277 const private_handle_t *privBuffer =
1278 (const private_handle_t*)(*srcBuf.buffer);
1279 StreamBuffer destBuf;
1280 destBuf.streamId = kGenericStreamId;
1281 destBuf.width = srcBuf.stream->width;
1282 destBuf.height = srcBuf.stream->height;
1283 destBuf.format = privBuffer->format; // Use real private format
1284 destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc
1285 destBuf.buffer = srcBuf.buffer;
1286 destBuf.share_fd = privBuffer->share_fd;
1287
1288 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
1289 needJpeg = true;
1290 memset(&info,0,sizeof(struct ExifInfo));
1291 info.orientation = settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
1292 jpegpixelfmt = mSensor->getOutputFormat();
1293 if (!mSupportRotate) {
1294 info.mainwidth = srcBuf.stream->width;
1295 info.mainheight = srcBuf.stream->height;
1296 } else {
1297 if ((info.orientation == 90) || (info.orientation == 270)) {
1298 info.mainwidth = srcBuf.stream->height;
1299 info.mainheight = srcBuf.stream->width;
1300 } else {
1301 info.mainwidth = srcBuf.stream->width;
1302 info.mainheight = srcBuf.stream->height;
1303 }
1304 }
1305 if ((jpegpixelfmt == V4L2_PIX_FMT_MJPEG) || (jpegpixelfmt == V4L2_PIX_FMT_YUYV)) {
1306 mSensor->setOutputFormat(info.mainwidth,info.mainheight,jpegpixelfmt,1);
1307 } else {
1308 mSensor->setOutputFormat(info.mainwidth,info.mainheight,V4L2_PIX_FMT_RGB24,1);
1309 }
1310 }
1311
1312 // Wait on fence
1313 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
1314 res = bufferAcquireFence->wait(kFenceTimeoutMs);
1315 if (res == TIMED_OUT) {
1316 ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
1317 __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
1318 }
1319 if (res == OK) {
1320 // Lock buffer for writing
1321 const Rect rect(destBuf.width, destBuf.height);
1322 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1323 if (privBuffer->format == HAL_PIXEL_FORMAT_YCbCr_420_888/*HAL_PIXEL_FORMAT_YCrCb_420_SP*/) {
1324 android_ycbcr ycbcr = android_ycbcr();
1325 res = GraphicBufferMapper::get().lockYCbCr(
1326 *(destBuf.buffer),
1327 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK,
1328 rect,
1329 &ycbcr);
1330 // This is only valid because we know that emulator's
1331 // YCbCr_420_888 is really contiguous NV21 under the hood
1332 destBuf.img = static_cast<uint8_t*>(ycbcr.y);
1333 } else {
1334 ALOGE("Unexpected private format for flexible YUV: 0x%x",
1335 privBuffer->format);
1336 res = INVALID_OPERATION;
1337 }
1338 } else {
1339 res = GraphicBufferMapper::get().lock(*(destBuf.buffer),
1340 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK,
1341 rect,
1342 (void**)&(destBuf.img));
1343 }
1344 if (res != OK) {
1345 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
1346 __FUNCTION__, frameNumber, i);
1347 }
1348 }
1349
1350 if (res != OK) {
1351 // Either waiting or locking failed. Unlock locked buffers and bail
1352 // out.
1353 for (size_t j = 0; j < i; j++) {
1354 GraphicBufferMapper::get().unlock(
1355 *(request->output_buffers[i].buffer));
1356 }
1357 ALOGE("line:%d, format for this usage: %d x %d, usage %x, format=%x, returned\n",
1358 __LINE__, destBuf.width, destBuf.height, privBuffer->usage, privBuffer->format);
1359 return NO_INIT;
1360 }
1361 sensorBuffers->push_back(destBuf);
1362 buffers->push_back(srcBuf);
1363 }
1364
1365 if (needJpeg) {
1366 if (!mSupportRotate) {
1367 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1368 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1369 } else {
1370 if ((info.orientation == 90) || (info.orientation == 270)) {
1371 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1372 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1373 } else {
1374 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1375 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1376 }
1377 }
1378 if (settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
1379 info.latitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[0];
1380 info.longitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[1];
1381 info.altitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[2];
1382 info.has_latitude = true;
1383 info.has_longitude = true;
1384 info.has_altitude = true;
1385 } else {
1386 info.has_latitude = false;
1387 info.has_longitude = false;
1388 info.has_altitude = false;
1389 }
1390 if (settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
1391 uint8_t * gpsString = settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
1392 memcpy(info.gpsProcessingMethod, gpsString , sizeof(info.gpsProcessingMethod)-1);
1393 info.has_gpsProcessingMethod = true;
1394 } else {
1395 info.has_gpsProcessingMethod = false;
1396 }
1397 if (settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
1398 info.gpsTimestamp = settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
1399 info.has_gpsTimestamp = true;
1400 } else {
1401 info.has_gpsTimestamp = false;
1402 }
1403 if (settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
1404 info.focallen = settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
1405 info.has_focallen = true;
1406 } else {
1407 info.has_focallen = false;
1408 }
1409 jpegbuffersize = getJpegBufferSize(info.mainwidth,info.mainheight);
1410
1411 mJpegCompressor->SetMaxJpegBufferSize(jpegbuffersize);
1412 mJpegCompressor->SetExifInfo(info);
1413 mSensor->setPictureRotate(info.orientation);
1414 if ((info.thumbwidth > 0) && (info.thumbheight > 0)) {
1415 mHaveThumbnail = true;
1416 }
1417 DBG_LOGB("%s::thumbnailSize_width=%d,thumbnailSize_height=%d,mainsize_width=%d,mainsize_height=%d,jpegOrientation=%d",__FUNCTION__,
1418 info.thumbwidth,info.thumbheight,info.mainwidth,info.mainheight,info.orientation);
1419 }
1420 /**
1421 * Wait for JPEG compressor to not be busy, if needed
1422 */
1423#if 0
1424 if (needJpeg) {
1425 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1426 if (!ready) {
1427 ALOGE("%s: Timeout waiting for JPEG compression to complete!",
1428 __FUNCTION__);
1429 return NO_INIT;
1430 }
1431 }
1432#else
1433 while (needJpeg) {
1434 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1435 if (ready) {
1436 break;
1437 }
1438 }
1439#endif
1440 }
1441 /**
1442 * Wait until the in-flight queue has room
1443 */
1444 res = mReadoutThread->waitForReadout();
1445 if (res != OK) {
1446 ALOGE("%s: Timeout waiting for previous requests to complete!",
1447 __FUNCTION__);
1448 return NO_INIT;
1449 }
1450
1451 /**
1452 * Wait until sensor's ready. This waits for lengthy amounts of time with
1453 * mLock held, but the interface spec is that no other calls may by done to
1454 * the HAL by the framework while process_capture_request is happening.
1455 */
1456 {
1457 Mutex::Autolock l(mLock);
1458 int syncTimeoutCount = 0;
1459 while (!mSensor->waitForVSync(kSyncWaitTimeout)) {
1460 if (mStatus == STATUS_ERROR) {
1461 return NO_INIT;
1462 }
1463 if (syncTimeoutCount == kMaxSyncTimeoutCount) {
1464 ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
1465 __FUNCTION__, frameNumber,
1466 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
1467 return NO_INIT;
1468 }
1469 syncTimeoutCount++;
1470 }
1471
1472 /**
1473 * Configure sensor and queue up the request to the readout thread
1474 */
1475 mSensor->setExposureTime(exposureTime);
1476 mSensor->setFrameDuration(frameDuration);
1477 mSensor->setSensitivity(sensitivity);
1478 mSensor->setDestinationBuffers(sensorBuffers);
1479 mSensor->setFrameNumber(request->frame_number);
1480
1481 ReadoutThread::Request r;
1482 r.frameNumber = request->frame_number;
1483 r.settings = settings;
1484 r.sensorBuffers = sensorBuffers;
1485 r.buffers = buffers;
1486 r.havethumbnail = mHaveThumbnail;
1487
1488 mReadoutThread->queueCaptureRequest(r);
1489 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
1490
1491 // Cache the settings for next time
1492 mPrevSettings.acquire(settings);
1493 }
1494 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1495 return OK;
1496}
1497
1498/** Debug methods */
1499
1500void EmulatedFakeCamera3::dump(int fd) {
1501
1502 String8 result;
1503 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
1504 result = String8::format("%s, valid resolution\n", __FILE__);
1505
1506 for (uint32_t f = 0; f < count; f+=2) {
1507 if (mAvailableJpegSize[f] == 0)
1508 break;
1509 result.appendFormat("width: %d , height =%d\n",
1510 mAvailableJpegSize[f], mAvailableJpegSize[f+1]);
1511 }
1512 result.appendFormat("\nmZoomMin: %d , mZoomMax =%d, mZoomStep=%d\n",
1513 mZoomMin, mZoomMax, mZoomStep);
1514
1515 if (mZoomStep <= 0) {
1516 result.appendFormat("!!!!!!!!!camera apk may have no picture out\n");
1517 }
1518
1519 write(fd, result.string(), result.size());
1520
1521 if (mSensor.get() != NULL) {
1522 mSensor->dump(fd);
1523 }
1524
1525}
1526//flush all request
1527//TODO returned buffers every request held immediately with
1528//CAMERA3_BUFFER_STATUS_ERROR flag.
1529int EmulatedFakeCamera3::flush_all_requests() {
1530 DBG_LOGA("flush all request");
1531 mFlushTag = true;
1532 mReadoutThread->flushAllRequest(true);
1533 mReadoutThread->setFlushFlag(false);
1534 mSensor->setFlushFlag(false);
1535 return 0;
1536}
1537/** Tag query methods */
1538const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) {
1539 return NULL;
1540}
1541
1542const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) {
1543 return NULL;
1544}
1545
1546int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) {
1547 return 0;
1548}
1549
1550/**
1551 * Private methods
1552 */
1553
1554camera_metadata_ro_entry_t EmulatedFakeCamera3::staticInfo(const CameraMetadata *info, uint32_t tag,
1555 size_t minCount, size_t maxCount, bool required) const {
1556
1557 camera_metadata_ro_entry_t entry = info->find(tag);
1558
1559 if (CC_UNLIKELY( entry.count == 0 ) && required) {
1560 const char* tagSection = get_camera_metadata_section_name(tag);
1561 if (tagSection == NULL) tagSection = "<unknown>";
1562 const char* tagName = get_camera_metadata_tag_name(tag);
1563 if (tagName == NULL) tagName = "<unknown>";
1564
1565 ALOGE("Error finding static metadata entry '%s.%s' (%x)",
1566 tagSection, tagName, tag);
1567 } else if (CC_UNLIKELY(
1568 (minCount != 0 && entry.count < minCount) ||
1569 (maxCount != 0 && entry.count > maxCount) ) ) {
1570 const char* tagSection = get_camera_metadata_section_name(tag);
1571 if (tagSection == NULL) tagSection = "<unknown>";
1572 const char* tagName = get_camera_metadata_tag_name(tag);
1573 if (tagName == NULL) tagName = "<unknown>";
1574 ALOGE("Malformed static metadata entry '%s.%s' (%x):"
1575 "Expected between %zu and %zu values, but got %zu values",
1576 tagSection, tagName, tag, minCount, maxCount, entry.count);
1577 }
1578
1579 return entry;
1580}
1581
1582//this is only for debug
1583void EmulatedFakeCamera3::getStreamConfigurationp(CameraMetadata *info) {
1584 const int STREAM_CONFIGURATION_SIZE = 4;
1585 const int STREAM_FORMAT_OFFSET = 0;
1586 const int STREAM_WIDTH_OFFSET = 1;
1587 const int STREAM_HEIGHT_OFFSET = 2;
1588 const int STREAM_IS_INPUT_OFFSET = 3;
1589
1590 camera_metadata_ro_entry_t availableStreamConfigs =
1591 staticInfo(info, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
1592 CAMHAL_LOGDB(" stream, availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1593
1594 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1595 int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
1596 int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
1597 int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
1598 int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
1599 CAMHAL_LOGDB("f=%x, w*h=%dx%d, du=%d\n", format, width, height, isInput);
1600 }
1601
1602}
1603
1604//this is only for debug
1605void EmulatedFakeCamera3::getStreamConfigurationDurations(CameraMetadata *info) {
1606 const int STREAM_CONFIGURATION_SIZE = 4;
1607 const int STREAM_FORMAT_OFFSET = 0;
1608 const int STREAM_WIDTH_OFFSET = 1;
1609 const int STREAM_HEIGHT_OFFSET = 2;
1610 const int STREAM_IS_INPUT_OFFSET = 3;
1611
1612 camera_metadata_ro_entry_t availableStreamConfigs =
1613 staticInfo(info, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
1614 CAMHAL_LOGDB("availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1615
1616 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1617 int64_t format = availableStreamConfigs.data.i64[i + STREAM_FORMAT_OFFSET];
1618 int64_t width = availableStreamConfigs.data.i64[i + STREAM_WIDTH_OFFSET];
1619 int64_t height = availableStreamConfigs.data.i64[i + STREAM_HEIGHT_OFFSET];
1620 int64_t isInput = availableStreamConfigs.data.i64[i + STREAM_IS_INPUT_OFFSET];
1621 CAMHAL_LOGDB("f=%llx, w*h=%lldx%lld, du=%lld\n", format, width, height, isInput);
1622 }
1623}
1624
1625void EmulatedFakeCamera3::updateCameraMetaData(CameraMetadata *info) {
1626
1627}
1628
1629status_t EmulatedFakeCamera3::constructStaticInfo() {
1630
1631 status_t ret = OK;
1632 CameraMetadata info;
1633 uint32_t picSizes[64 * 8];
1634 int64_t* duration = NULL;
1635 int count, duration_count, availablejpegsize;
1636 uint8_t maxCount = 10;
1637 char property[PROPERTY_VALUE_MAX];
1638 unsigned int supportrotate;
1639 availablejpegsize = ARRAY_SIZE(mAvailableJpegSize);
1640 memset(mAvailableJpegSize,0,(sizeof(uint32_t))*availablejpegsize);
1641 sp<Sensor> s = new Sensor();
1642 ret = s->startUp(mCameraID);
1643 if (ret != OK) {
1644 DBG_LOGA("sensor start up failed");
1645 return ret;
1646 }
1647
1648 mSensorType = s->getSensorType();
1649
1650 if ( mSensorType == SENSOR_USB) {
1651 char property[PROPERTY_VALUE_MAX];
1652 property_get("rw.camera.usb.faceback", property, "false");
1653 if (strstr(property, "true"))
1654 mFacingBack = 1;
1655 else
1656 mFacingBack = 0;
1657 ALOGI("Setting usb camera cameraID:%d to back camera:%s\n",
1658 mCameraID, property);
1659 } else {
1660 if (s->mSensorFace == SENSOR_FACE_FRONT) {
1661 mFacingBack = 0;
1662 } else if (s->mSensorFace == SENSOR_FACE_BACK) {
1663 mFacingBack = 1;
1664 } else if (s->mSensorFace == SENSOR_FACE_NONE) {
1665 if (gEmulatedCameraFactory.getEmulatedCameraNum() == 1) {
1666 mFacingBack = 1;
1667 } else if ( mCameraID == 0) {
1668 mFacingBack = 1;
1669 } else {
1670 mFacingBack = 0;
1671 }
1672 }
1673
1674 ALOGI("Setting on board camera cameraID:%d to back camera:%d[0 false, 1 true]\n",
1675 mCameraID, mFacingBack);
1676 }
1677
1678 mSupportCap = s->IoctlStateProbe();
1679 if (mSupportCap & IOCTL_MASK_ROTATE) {
1680 supportrotate = true;
1681 } else {
1682 supportrotate = false;
1683 }
1684 // android.lens
1685
1686 // 5 cm min focus distance for back camera, infinity (fixed focus) for front
1687 // TODO read this ioctl from camera driver
1688 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
1689 const float minFocusDistance = 0.0;
1690 info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1691 &minFocusDistance, 1);
1692
1693 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
1694 const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
1695 info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1696 &minFocusDistance, 1);
1697
1698 static const float focalLength = 3.30f; // mm
1699 info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1700 &focalLength, 1);
1701 static const float aperture = 2.8f;
1702 info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1703 &aperture, 1);
1704 static const float filterDensity = 0;
1705 info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1706 &filterDensity, 1);
1707 static const uint8_t availableOpticalStabilization =
1708 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1709 info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1710 &availableOpticalStabilization, 1);
1711
1712 static const int32_t lensShadingMapSize[] = {1, 1};
1713 info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1714 sizeof(lensShadingMapSize)/sizeof(int32_t));
1715
1716 uint8_t lensFacing = mFacingBack ?
1717 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1718 info.update(ANDROID_LENS_FACING, &lensFacing, 1);
1719
1720 float lensPosition[3];
1721 if (mFacingBack) {
1722 // Back-facing camera is center-top on device
1723 lensPosition[0] = 0;
1724 lensPosition[1] = 20;
1725 lensPosition[2] = -5;
1726 } else {
1727 // Front-facing camera is center-right on device
1728 lensPosition[0] = 20;
1729 lensPosition[1] = 20;
1730 lensPosition[2] = 0;
1731 }
1732#if PLATFORM_SDK_VERSION <= 22
1733 info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
1734 sizeof(float));
1735#endif
1736 static const uint8_t lensCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED;
1737 info.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,&lensCalibration,1);
1738
1739 // android.sensor
1740
1741 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1742 info.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
1743 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1744 info.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
1745 info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1746 Sensor::kExposureTimeRange, 2);
1747
1748 info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1749 &Sensor::kFrameDurationRange[1], 1);
1750
1751 info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
1752 Sensor::kSensitivityRange,
1753 sizeof(Sensor::kSensitivityRange)
1754 /sizeof(int32_t));
1755
1756 info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1757 &Sensor::kColorFilterArrangement, 1);
1758
1759 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1760 info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1761 sensorPhysicalSize, 2);
1762
1763 info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1764 (int32_t*)Sensor::kResolution, 2);
1765
1766 //(int32_t*)Sensor::kResolution, 2);
1767
1768 info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1769 (int32_t*)&Sensor::kMaxRawValue, 1);
1770
1771 static const int32_t blackLevelPattern[4] = {
1772 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
1773 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
1774 };
1775 info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1776 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
1777
1778 static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
1779 info.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
1780 if (mSensorType == SENSOR_USB) {
1781 if (mFacingBack) {
1782 property_get("hw.camera.orientation.back", property, "0");
1783 } else {
1784 property_get("hw.camera.orientation.front", property, "0");
1785 }
1786 int32_t orientation = atoi(property);
1787 property_get("hw.camera.usb.orientation_offset", property, "0");
1788 orientation += atoi(property);
1789 orientation %= 360;
1790 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1791 } else {
1792 if (mFacingBack) {
1793 property_get("hw.camera.orientation.back", property, "270");
1794 const int32_t orientation = atoi(property);
1795 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1796 } else {
1797 property_get("hw.camera.orientation.front", property, "90");
1798 const int32_t orientation = atoi(property);
1799 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1800 }
1801 }
1802
1803 static const int64_t rollingShutterSkew = 0;
1804 info.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
1805
1806 //TODO: sensor color calibration fields
1807
1808 // android.flash
1809 static const uint8_t flashAvailable = 0;
1810 info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1811
1812 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
1813 info.update(ANDROID_FLASH_STATE, &flashstate, 1);
1814
1815 static const int64_t flashChargeDuration = 0;
1816 info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
1817
1818 /** android.noise */
1819 static const uint8_t availableNBModes = ANDROID_NOISE_REDUCTION_MODE_OFF;
1820 info.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, &availableNBModes, 1);
1821
1822 // android.tonemap
1823
1824 static const int32_t tonemapCurvePoints = 128;
1825 info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
1826
1827 // android.scaler
1828
1829 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
1830 info.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
1831
1832 info.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1833 kAvailableFormats,
1834 sizeof(kAvailableFormats)/sizeof(int32_t));
1835
1836 info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1837 (int64_t*)kAvailableRawMinDurations,
1838 sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
1839
1840 //for version 3.2 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
1841 count = sizeof(picSizes)/sizeof(picSizes[0]);
1842 count = s->getStreamConfigurations(picSizes, kAvailableFormats, count);
1843
1844 info.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1845 (int32_t*)picSizes, count);
1846
1847 if (count < availablejpegsize) {
1848 availablejpegsize = count;
1849 }
1850 getValidJpegSize(picSizes,mAvailableJpegSize,availablejpegsize);
1851
1852 maxJpegResolution = getMaxJpegResolution(picSizes,count);
1853 int32_t full_size[4];
1854 if (mFacingBack) {
1855 full_size[0] = 0;
1856 full_size[1] = 0;
1857 full_size[2] = maxJpegResolution.width;
1858 full_size[3] = maxJpegResolution.height;
1859 } else {
1860 full_size[0] = 0;
1861 full_size[1] = 0;
1862 full_size[2] = maxJpegResolution.width;
1863 full_size[3] = maxJpegResolution.height;
1864 }
1865 info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1866 (int32_t*)full_size,
1867 sizeof(full_size)/sizeof(full_size[0]));
1868 duration = new int64_t[count];
1869 if (duration == NULL) {
1870 DBG_LOGA("allocate memory for duration failed");
1871 return NO_MEMORY;
1872 } else {
1873 memset(duration,0,sizeof(int64_t)*count);
1874 }
1875 duration_count = s->getStreamConfigurationDurations(picSizes, duration , count);
1876
1877 info.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1878 duration, duration_count);
1879 info.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1880 duration, duration_count);
1881
1882 info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1883 (int64_t*)kAvailableProcessedMinDurations,
1884 sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
1885
1886 info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
1887 (int64_t*)kAvailableJpegMinDurations,
1888 sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
1889
1890
1891 // android.jpeg
1892
1893 static const int32_t jpegThumbnailSizes[] = {
1894 0, 0,
1895 160, 120,
1896 320, 240
1897 };
1898 info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1899 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
1900
1901 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1902 info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1903
1904 // android.stats
1905
1906 static const uint8_t availableFaceDetectModes[] = {
1907 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1908 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
1909 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
1910 };
1911
1912 info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1913 availableFaceDetectModes,
1914 sizeof(availableFaceDetectModes));
1915
1916 static const int32_t maxFaceCount = 8;
1917 info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1918 &maxFaceCount, 1);
1919
1920 static const int32_t histogramSize = 64;
1921 info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1922 &histogramSize, 1);
1923
1924 static const int32_t maxHistogramCount = 1000;
1925 info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1926 &maxHistogramCount, 1);
1927
1928 static const int32_t sharpnessMapSize[2] = {64, 64};
1929 info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1930 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
1931
1932 static const int32_t maxSharpnessMapValue = 1000;
1933 info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1934 &maxSharpnessMapValue, 1);
1935 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
1936 info.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
1937
1938 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
1939 info.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
1940 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
1941 info.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
1942 // android.control
1943
1944 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
1945 info.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
1946
1947 static const uint8_t availableSceneModes[] = {
1948 // ANDROID_CONTROL_SCENE_MODE_DISABLED,
1949 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY
1950 };
1951 info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1952 availableSceneModes, sizeof(availableSceneModes));
1953
1954 static const uint8_t availableEffects[] = {
1955 ANDROID_CONTROL_EFFECT_MODE_OFF
1956 };
1957 info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1958 availableEffects, sizeof(availableEffects));
1959
1960 static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0};
1961 info.update(ANDROID_CONTROL_MAX_REGIONS,
1962 max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
1963
1964 static const uint8_t availableAeModes[] = {
1965 ANDROID_CONTROL_AE_MODE_OFF,
1966 ANDROID_CONTROL_AE_MODE_ON
1967 };
1968 info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1969 availableAeModes, sizeof(availableAeModes));
1970
1971
1972 static const int32_t availableTargetFpsRanges[] = {
1973 5, 15, 15, 15, 5, 25, 25, 25, 5, 30, 30, 30,
1974 };
1975 info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1976 availableTargetFpsRanges,
1977 sizeof(availableTargetFpsRanges)/sizeof(int32_t));
1978
1979 uint8_t awbModes[maxCount];
1980 count = s->getAWB(awbModes, maxCount);
1981 if (count < 0) {
1982 static const uint8_t availableAwbModes[] = {
1983 ANDROID_CONTROL_AWB_MODE_OFF,
1984 ANDROID_CONTROL_AWB_MODE_AUTO,
1985 ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1986 ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1987 ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1988 ANDROID_CONTROL_AWB_MODE_SHADE
1989 };
1990 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1991 availableAwbModes, sizeof(availableAwbModes));
1992 } else {
1993 DBG_LOGB("getAWB %d ",count);
1994 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1995 awbModes, count);
1996 }
1997
1998 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
1999 info.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
2000
2001 static const uint8_t availableAfModesFront[] = {
2002 ANDROID_CONTROL_AF_MODE_OFF
2003 };
2004
2005 if (mFacingBack) {
2006 uint8_t afMode[maxCount];
2007 count = s->getAutoFocus(afMode, maxCount);
2008 if (count < 0) {
2009 static const uint8_t availableAfModesBack[] = {
2010 ANDROID_CONTROL_AF_MODE_OFF,
2011 //ANDROID_CONTROL_AF_MODE_AUTO,
2012 //ANDROID_CONTROL_AF_MODE_MACRO,
2013 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
2014 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE,
2015 };
2016
2017 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2018 availableAfModesBack, sizeof(availableAfModesBack));
2019 } else {
2020 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2021 afMode, count);
2022 }
2023 } else {
2024 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2025 availableAfModesFront, sizeof(availableAfModesFront));
2026 }
2027
2028 uint8_t antiBanding[maxCount];
2029 count = s->getAntiBanding(antiBanding, maxCount);
2030 if (count < 0) {
2031 static const uint8_t availableAntibanding[] = {
2032 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
2033 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
2034 };
2035 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2036 availableAntibanding, sizeof(availableAntibanding));
2037 } else {
2038 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2039 antiBanding, count);
2040 }
2041
2042 camera_metadata_rational step;
2043 int maxExp, minExp, def;
2044 ret = s->getExposure(&maxExp, &minExp, &def, &step);
2045 if (ret < 0) {
2046 static const int32_t aeExpCompensation = 0;
2047 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
2048
2049 static const camera_metadata_rational exposureCompensationStep = {
2050 1, 3
2051 };
2052 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2053 &exposureCompensationStep, 1);
2054
2055 int32_t exposureCompensationRange[] = {0, 0};
2056 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2057 exposureCompensationRange,
2058 sizeof(exposureCompensationRange)/sizeof(int32_t));
2059 } else {
2060 DBG_LOGB("exposure compensation support:(%d, %d)\n", minExp, maxExp);
2061 int32_t exposureCompensationRange[] = {minExp, maxExp};
2062 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2063 exposureCompensationRange,
2064 sizeof(exposureCompensationRange)/sizeof(int32_t));
2065 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2066 &step, 1);
2067 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &def, 1);
2068 }
2069
2070 ret = s->getZoom(&mZoomMin, &mZoomMax, &mZoomStep);
2071 if (ret < 0) {
2072 float maxZoom = 1.0;
2073 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2074 &maxZoom, 1);
2075 } else {
2076 float maxZoom = mZoomMax / mZoomMin;
2077 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2078 &maxZoom, 1);
2079 }
2080
2081 static const uint8_t availableVstabModes[] = {
2082 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
2083 };
2084 info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2085 availableVstabModes, sizeof(availableVstabModes));
2086
2087 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
2088 info.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
2089 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2090 info.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
2091 // android.info
2092 const uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2093 //mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
2094 // ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2095 info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2096 &supportedHardwareLevel,
2097 /*count*/1);
2098
2099 int32_t android_sync_max_latency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
2100 info.update(ANDROID_SYNC_MAX_LATENCY, &android_sync_max_latency, 1);
2101
2102 uint8_t len[] = {1};
2103 info.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1);
2104
2105 uint8_t maxlen[] = {2};
2106 info.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1);
2107 uint8_t cap[] = {
2108 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
2109 };
2110 info.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2111 (uint8_t *)cap, sizeof(cap)/sizeof(cap[0]));
2112
2113
2114 int32_t partialResultCount = 1;
2115 info.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,&partialResultCount,1);
2116 int32_t maxNumOutputStreams[3] = {0,2,1};
2117 info.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,maxNumOutputStreams,3);
2118 uint8_t aberrationMode[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF};
2119 info.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
2120 aberrationMode, 1);
2121 info.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
2122 aberrationMode, 1);
2123
2124 getAvailableChKeys(&info, supportedHardwareLevel);
2125
2126 if (mCameraInfo != NULL) {
2127 CAMHAL_LOGDA("mCameraInfo is not null, mem leak?");
2128 }
2129 mCameraInfo = info.release();
2130 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
2131
2132 if (duration != NULL) {
2133 delete [] duration;
2134 }
2135
2136 s->shutDown();
2137 s.clear();
2138 mPlugged = true;
2139
2140 return OK;
2141}
2142
2143status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
2144 /**
2145 * Extract top-level 3A controls
2146 */
2147 status_t res;
2148
2149 bool facePriority = false;
2150
2151 camera_metadata_entry e;
2152
2153 e = settings.find(ANDROID_CONTROL_MODE);
2154 if (e.count == 0) {
2155 ALOGE("%s: No control mode entry!", __FUNCTION__);
2156 return BAD_VALUE;
2157 }
2158 uint8_t controlMode = e.data.u8[0];
2159
2160 e = settings.find(ANDROID_CONTROL_SCENE_MODE);
2161 if (e.count == 0) {
2162 ALOGE("%s: No scene mode entry!", __FUNCTION__);
2163 return BAD_VALUE;
2164 }
2165 uint8_t sceneMode = e.data.u8[0];
2166
2167 if (controlMode == ANDROID_CONTROL_MODE_OFF) {
2168 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2169 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2170 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2171 update3A(settings);
2172 return OK;
2173 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2174 switch(sceneMode) {
2175 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
2176 mFacePriority = true;
2177 break;
2178 default:
2179 ALOGE("%s: Emulator doesn't support scene mode %d",
2180 __FUNCTION__, sceneMode);
2181 return BAD_VALUE;
2182 }
2183 } else {
2184 mFacePriority = false;
2185 }
2186
2187 // controlMode == AUTO or sceneMode = FACE_PRIORITY
2188 // Process individual 3A controls
2189
2190 res = doFakeAE(settings);
2191 if (res != OK) return res;
2192
2193 res = doFakeAF(settings);
2194 if (res != OK) return res;
2195
2196 res = doFakeAWB(settings);
2197 if (res != OK) return res;
2198
2199 update3A(settings);
2200 return OK;
2201}
2202
2203status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
2204 camera_metadata_entry e;
2205
2206 e = settings.find(ANDROID_CONTROL_AE_MODE);
2207 if (e.count == 0) {
2208 ALOGE("%s: No AE mode entry!", __FUNCTION__);
2209 return BAD_VALUE;
2210 }
2211 uint8_t aeMode = e.data.u8[0];
2212
2213 switch (aeMode) {
2214 case ANDROID_CONTROL_AE_MODE_OFF:
2215 // AE is OFF
2216 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2217 return OK;
2218 case ANDROID_CONTROL_AE_MODE_ON:
2219 // OK for AUTO modes
2220 break;
2221 default:
2222 ALOGVV("%s: Emulator doesn't support AE mode %d",
2223 __FUNCTION__, aeMode);
2224 return BAD_VALUE;
2225 }
2226
2227 e = settings.find(ANDROID_CONTROL_AE_LOCK);
2228 if (e.count == 0) {
2229 ALOGE("%s: No AE lock entry!", __FUNCTION__);
2230 return BAD_VALUE;
2231 }
2232 bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON);
2233
2234 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
2235 bool precaptureTrigger = false;
2236 if (e.count != 0) {
2237 precaptureTrigger =
2238 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
2239 }
2240
2241 if (precaptureTrigger) {
2242 ALOGVV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
2243 } else if (e.count > 0) {
2244 ALOGVV("%s: Pre capture trigger was present? %zu",
2245 __FUNCTION__,
2246 e.count);
2247 }
2248
2249 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2250 // Run precapture sequence
2251 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2252 mAeCounter = 0;
2253 }
2254
2255 if (mFacePriority) {
2256 mAeTargetExposureTime = kFacePriorityExposureTime;
2257 } else {
2258 mAeTargetExposureTime = kNormalExposureTime;
2259 }
2260
2261 if (mAeCounter > kPrecaptureMinFrames &&
2262 (mAeTargetExposureTime - mAeCurrentExposureTime) <
2263 mAeTargetExposureTime / 10) {
2264 // Done with precapture
2265 mAeCounter = 0;
2266 mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
2267 ANDROID_CONTROL_AE_STATE_CONVERGED;
2268 } else {
2269 // Converge some more
2270 mAeCurrentExposureTime +=
2271 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2272 kExposureTrackRate;
2273 mAeCounter++;
2274 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
2275 }
2276
2277 } else if (!aeLocked) {
2278 // Run standard occasional AE scan
2279 switch (mAeState) {
2280 case ANDROID_CONTROL_AE_STATE_CONVERGED:
2281 case ANDROID_CONTROL_AE_STATE_INACTIVE:
2282 mAeCounter++;
2283 if (mAeCounter > kStableAeMaxFrames) {
2284 mAeTargetExposureTime =
2285 mFacePriority ? kFacePriorityExposureTime :
2286 kNormalExposureTime;
2287 float exposureStep = ((double)rand() / RAND_MAX) *
2288 (kExposureWanderMax - kExposureWanderMin) +
2289 kExposureWanderMin;
2290 mAeTargetExposureTime *= std::pow(2, exposureStep);
2291 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2292 }
2293 break;
2294 case ANDROID_CONTROL_AE_STATE_SEARCHING:
2295 mAeCurrentExposureTime +=
2296 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2297 kExposureTrackRate;
2298 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
2299 mAeTargetExposureTime / 10) {
2300 // Close enough
2301 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2302 mAeCounter = 0;
2303 }
2304 break;
2305 case ANDROID_CONTROL_AE_STATE_LOCKED:
2306 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2307 mAeCounter = 0;
2308 break;
2309 default:
2310 ALOGE("%s: Emulator in unexpected AE state %d",
2311 __FUNCTION__, mAeState);
2312 return INVALID_OPERATION;
2313 }
2314 } else {
2315 // AE is locked
2316 mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
2317 }
2318
2319 return OK;
2320}
2321
2322status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
2323 camera_metadata_entry e;
2324
2325 e = settings.find(ANDROID_CONTROL_AF_MODE);
2326 if (e.count == 0) {
2327 ALOGE("%s: No AF mode entry!", __FUNCTION__);
2328 return BAD_VALUE;
2329 }
2330 uint8_t afMode = e.data.u8[0];
2331
2332 e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
2333 typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
2334 af_trigger_t afTrigger;
2335 // If we have an afTrigger, afTriggerId should be set too
2336 if (e.count != 0) {
2337 afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
2338
2339 e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID);
2340
2341 if (e.count == 0) {
2342 ALOGE("%s: When android.control.afTrigger is set "
2343 " in the request, afTriggerId needs to be set as well",
2344 __FUNCTION__);
2345 return BAD_VALUE;
2346 }
2347
2348 mAfTriggerId = e.data.i32[0];
2349
2350 ALOGVV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
2351 ALOGVV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId);
2352 ALOGVV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
2353 } else {
2354 afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
2355 }
2356 if (!mFacingBack) {
2357 afMode = ANDROID_CONTROL_AF_MODE_OFF;
2358 }
2359
2360 switch (afMode) {
2361 case ANDROID_CONTROL_AF_MODE_OFF:
2362 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2363 return OK;
2364 case ANDROID_CONTROL_AF_MODE_AUTO:
2365 case ANDROID_CONTROL_AF_MODE_MACRO:
2366 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2367 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2368 if (!mFacingBack) {
2369 ALOGE("%s: Front camera doesn't support AF mode %d",
2370 __FUNCTION__, afMode);
2371 return BAD_VALUE;
2372 }
2373 mSensor->setAutoFocuas(afMode);
2374 // OK, handle transitions lower on
2375 break;
2376 default:
2377 ALOGE("%s: Emulator doesn't support AF mode %d",
2378 __FUNCTION__, afMode);
2379 return BAD_VALUE;
2380 }
2381#if 0
2382 e = settings.find(ANDROID_CONTROL_AF_REGIONS);
2383 if (e.count == 0) {
2384 ALOGE("%s:Get ANDROID_CONTROL_AF_REGIONS failed\n", __FUNCTION__);
2385 return BAD_VALUE;
2386 }
2387 int32_t x0 = e.data.i32[0];
2388 int32_t y0 = e.data.i32[1];
2389 int32_t x1 = e.data.i32[2];
2390 int32_t y1 = e.data.i32[3];
2391 mSensor->setFocuasArea(x0, y0, x1, y1);
2392 DBG_LOGB(" x0:%d, y0:%d,x1:%d,y1:%d,\n", x0, y0, x1, y1);
2393#endif
2394
2395
2396 bool afModeChanged = mAfMode != afMode;
2397 mAfMode = afMode;
2398
2399 /**
2400 * Simulate AF triggers. Transition at most 1 state per frame.
2401 * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
2402 */
2403
2404 bool afTriggerStart = false;
2405 bool afTriggerCancel = false;
2406 switch (afTrigger) {
2407 case ANDROID_CONTROL_AF_TRIGGER_IDLE:
2408 break;
2409 case ANDROID_CONTROL_AF_TRIGGER_START:
2410 afTriggerStart = true;
2411 break;
2412 case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
2413 afTriggerCancel = true;
2414 // Cancel trigger always transitions into INACTIVE
2415 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2416
2417 ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
2418
2419 // Stay in 'inactive' until at least next frame
2420 return OK;
2421 default:
2422 ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
2423 return BAD_VALUE;
2424 }
2425
2426 // If we get down here, we're either in an autofocus mode
2427 // or in a continuous focus mode (and no other modes)
2428
2429 int oldAfState = mAfState;
2430 switch (mAfState) {
2431 case ANDROID_CONTROL_AF_STATE_INACTIVE:
2432 if (afTriggerStart) {
2433 switch (afMode) {
2434 case ANDROID_CONTROL_AF_MODE_AUTO:
2435 // fall-through
2436 case ANDROID_CONTROL_AF_MODE_MACRO:
2437 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2438 break;
2439 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2440 // fall-through
2441 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2442 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2443 break;
2444 }
2445 } else {
2446 // At least one frame stays in INACTIVE
2447 if (!afModeChanged) {
2448 switch (afMode) {
2449 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2450 // fall-through
2451 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2452 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
2453 break;
2454 }
2455 }
2456 }
2457 break;
2458 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2459 /**
2460 * When the AF trigger is activated, the algorithm should finish
2461 * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
2462 * or AF_NOT_FOCUSED as appropriate
2463 */
2464 if (afTriggerStart) {
2465 // Randomly transition to focused or not focused
2466 if (rand() % 3) {
2467 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2468 } else {
2469 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2470 }
2471 }
2472 /**
2473 * When the AF trigger is not involved, the AF algorithm should
2474 * start in INACTIVE state, and then transition into PASSIVE_SCAN
2475 * and PASSIVE_FOCUSED states
2476 */
2477 else if (!afTriggerCancel) {
2478 // Randomly transition to passive focus
2479 if (rand() % 3 == 0) {
2480 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
2481 }
2482 }
2483
2484 break;
2485 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2486 if (afTriggerStart) {
2487 // Randomly transition to focused or not focused
2488 if (rand() % 3) {
2489 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2490 } else {
2491 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2492 }
2493 }
2494 // TODO: initiate passive scan (PASSIVE_SCAN)
2495 break;
2496 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2497 // Simulate AF sweep completing instantaneously
2498
2499 // Randomly transition to focused or not focused
2500 if (rand() % 3) {
2501 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2502 } else {
2503 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2504 }
2505 break;
2506 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2507 if (afTriggerStart) {
2508 switch (afMode) {
2509 case ANDROID_CONTROL_AF_MODE_AUTO:
2510 // fall-through
2511 case ANDROID_CONTROL_AF_MODE_MACRO:
2512 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2513 break;
2514 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2515 // fall-through
2516 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2517 // continuous autofocus => trigger start has no effect
2518 break;
2519 }
2520 }
2521 break;
2522 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2523 if (afTriggerStart) {
2524 switch (afMode) {
2525 case ANDROID_CONTROL_AF_MODE_AUTO:
2526 // fall-through
2527 case ANDROID_CONTROL_AF_MODE_MACRO:
2528 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2529 break;
2530 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2531 // fall-through
2532 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2533 // continuous autofocus => trigger start has no effect
2534 break;
2535 }
2536 }
2537 break;
2538 default:
2539 ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
2540 }
2541
2542 {
2543 char afStateString[100] = {0,};
2544 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2545 oldAfState,
2546 afStateString,
2547 sizeof(afStateString));
2548
2549 char afNewStateString[100] = {0,};
2550 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2551 mAfState,
2552 afNewStateString,
2553 sizeof(afNewStateString));
2554 ALOGVV("%s: AF state transitioned from %s to %s",
2555 __FUNCTION__, afStateString, afNewStateString);
2556 }
2557
2558
2559 return OK;
2560}
2561
2562status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
2563 camera_metadata_entry e;
2564
2565 e = settings.find(ANDROID_CONTROL_AWB_MODE);
2566 if (e.count == 0) {
2567 ALOGE("%s: No AWB mode entry!", __FUNCTION__);
2568 return BAD_VALUE;
2569 }
2570 uint8_t awbMode = e.data.u8[0];
2571 //DBG_LOGB(" awbMode%d\n", awbMode);
2572
2573 // TODO: Add white balance simulation
2574
2575 switch (awbMode) {
2576 case ANDROID_CONTROL_AWB_MODE_OFF:
2577 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2578 return OK;
2579 case ANDROID_CONTROL_AWB_MODE_AUTO:
2580 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
2581 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
2582 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
2583 case ANDROID_CONTROL_AWB_MODE_SHADE:
2584 mAwbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; //add for cts
2585 return mSensor->setAWB(awbMode);
2586 // OK
2587 break;
2588 default:
2589 ALOGE("%s: Emulator doesn't support AWB mode %d",
2590 __FUNCTION__, awbMode);
2591 return BAD_VALUE;
2592 }
2593
2594 return OK;
2595}
2596
2597
2598void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
2599 if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) {
2600 settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
2601 &mAeCurrentExposureTime, 1);
2602 settings.update(ANDROID_SENSOR_SENSITIVITY,
2603 &mAeCurrentSensitivity, 1);
2604 }
2605
2606 settings.update(ANDROID_CONTROL_AE_STATE,
2607 &mAeState, 1);
2608 settings.update(ANDROID_CONTROL_AF_STATE,
2609 &mAfState, 1);
2610 settings.update(ANDROID_CONTROL_AWB_STATE,
2611 &mAwbState, 1);
2612 /**
2613 * TODO: Trigger IDs need a think-through
2614 */
2615 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,
2616 &mAfTriggerId, 1);
2617}
2618
2619void EmulatedFakeCamera3::signalReadoutIdle() {
2620 Mutex::Autolock l(mLock);
2621 CAMHAL_LOGVB("%s , E" , __FUNCTION__);
2622 // Need to chek isIdle again because waiting on mLock may have allowed
2623 // something to be placed in the in-flight queue.
2624 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
2625 ALOGV("Now idle");
2626 mStatus = STATUS_READY;
2627 }
2628 CAMHAL_LOGVB("%s , X , mStatus = %d " , __FUNCTION__, mStatus);
2629}
2630
2631void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
2632 nsecs_t timestamp) {
2633 switch(e) {
2634 case Sensor::SensorListener::EXPOSURE_START: {
2635 ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
2636 __FUNCTION__, frameNumber, timestamp);
2637 // Trigger shutter notify to framework
2638 camera3_notify_msg_t msg;
2639 msg.type = CAMERA3_MSG_SHUTTER;
2640 msg.message.shutter.frame_number = frameNumber;
2641 msg.message.shutter.timestamp = timestamp;
2642 sendNotify(&msg);
2643 break;
2644 }
2645 case Sensor::SensorListener::ERROR_CAMERA_DEVICE: {
2646 camera3_notify_msg_t msg;
2647 msg.type = CAMERA3_MSG_ERROR;
2648 msg.message.error.frame_number = frameNumber;
2649 msg.message.error.error_stream = NULL;
2650 msg.message.error.error_code = 1;
2651 sendNotify(&msg);
2652 break;
2653 }
2654 default:
2655 ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
2656 e, timestamp);
2657 break;
2658 }
2659}
2660
2661EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) :
2662 mParent(parent), mJpegWaiting(false) {
2663 mExitReadoutThread = false;
2664 mFlushFlag = false;
2665}
2666
2667EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
2668 for (List<Request>::iterator i = mInFlightQueue.begin();
2669 i != mInFlightQueue.end(); i++) {
2670 delete i->buffers;
2671 delete i->sensorBuffers;
2672 }
2673}
2674
2675status_t EmulatedFakeCamera3::ReadoutThread::flushAllRequest(bool flag) {
2676 status_t res;
2677 mFlushFlag = flag;
2678 Mutex::Autolock l(mLock);
2679 CAMHAL_LOGDB("count = %d" , mInFlightQueue.size());
2680 if (mInFlightQueue.size() > 0) {
2681 mParent->mSensor->setFlushFlag(true);
2682 res = mFlush.waitRelative(mLock, kSyncWaitTimeout * 15);
2683 if (res != OK && res != TIMED_OUT) {
2684 ALOGE("%s: Error waiting for mFlush singnal : %d",
2685 __FUNCTION__, res);
2686 return INVALID_OPERATION;
2687 }
2688 DBG_LOGA("finish flush all request");
2689 }
2690 return 0;
2691}
2692
2693void EmulatedFakeCamera3::ReadoutThread::sendFlushSingnal(void) {
2694 Mutex::Autolock l(mLock);
2695 mFlush.signal();
2696}
2697
2698void EmulatedFakeCamera3::ReadoutThread::setFlushFlag(bool flag) {
2699 mFlushFlag = flag;
2700}
2701
2702void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
2703 Mutex::Autolock l(mLock);
2704
2705 mInFlightQueue.push_back(r);
2706 mInFlightSignal.signal();
2707}
2708
2709bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
2710 Mutex::Autolock l(mLock);
2711 return mInFlightQueue.empty() && !mThreadActive;
2712}
2713
2714status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
2715 status_t res;
2716 Mutex::Autolock l(mLock);
2717 CAMHAL_LOGVB("%s , E" , __FUNCTION__);
2718 int loopCount = 0;
2719 while (mInFlightQueue.size() >= kMaxQueueSize) {
2720 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2721 if (res != OK && res != TIMED_OUT) {
2722 ALOGE("%s: Error waiting for in-flight queue to shrink",
2723 __FUNCTION__);
2724 return INVALID_OPERATION;
2725 }
2726 if (loopCount == kMaxWaitLoops) {
2727 ALOGE("%s: Timed out waiting for in-flight queue to shrink",
2728 __FUNCTION__);
2729 return TIMED_OUT;
2730 }
2731 loopCount++;
2732 }
2733 return OK;
2734}
2735
2736status_t EmulatedFakeCamera3::ReadoutThread::setJpegCompressorListener(EmulatedFakeCamera3 *parent) {
2737 status_t res;
2738 res = mParent->mJpegCompressor->setlistener(this);
2739 if (res != NO_ERROR) {
2740 ALOGE("%s: set JpegCompressor Listner failed",__FUNCTION__);
2741 }
2742 return res;
2743}
2744
2745status_t EmulatedFakeCamera3::ReadoutThread::startJpegCompressor(EmulatedFakeCamera3 *parent) {
2746 status_t res;
2747 res = mParent->mJpegCompressor->start();
2748 if (res != NO_ERROR) {
2749 ALOGE("%s: JpegCompressor start failed",__FUNCTION__);
2750 }
2751 return res;
2752}
2753
2754status_t EmulatedFakeCamera3::ReadoutThread::shutdownJpegCompressor(EmulatedFakeCamera3 *parent) {
2755 status_t res;
2756 res = mParent->mJpegCompressor->cancel();
2757 if (res != OK) {
2758 ALOGE("%s: JpegCompressor cancel failed",__FUNCTION__);
2759 }
2760 return res;
2761}
2762
2763void EmulatedFakeCamera3::ReadoutThread::sendExitReadoutThreadSignal(void) {
2764 mExitReadoutThread = true;
2765 mInFlightSignal.signal();
2766}
2767
2768bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
2769 status_t res;
2770 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
2771
2772 // First wait for a request from the in-flight queue
2773 if (mExitReadoutThread) {
2774 return false;
2775 }
2776
2777 {
2778 Mutex::Autolock l(mLock);
2779 if ((mInFlightQueue.size() == 0) && (mFlushFlag) &&
2780 (mCurrentRequest.settings.isEmpty())) {
2781 mFlush.signal();
2782 }
2783 }
2784
2785 if (mCurrentRequest.settings.isEmpty()) {
2786 Mutex::Autolock l(mLock);
2787 if (mInFlightQueue.empty()) {
2788 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2789 if (res == TIMED_OUT) {
2790 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
2791 __FUNCTION__);
2792 return true;
2793 } else if (res != NO_ERROR) {
2794 ALOGE("%s: Error waiting for capture requests: %d",
2795 __FUNCTION__, res);
2796 return false;
2797 }
2798 }
2799
2800 if (mExitReadoutThread) {
2801 return false;
2802 }
2803
2804 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
2805 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
2806 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
2807 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
2808 mCurrentRequest.havethumbnail = mInFlightQueue.begin()->havethumbnail;
2809 mInFlightQueue.erase(mInFlightQueue.begin());
2810 mInFlightSignal.signal();
2811 mThreadActive = true;
2812 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
2813 mCurrentRequest.frameNumber);
2814 }
2815
2816 // Then wait for it to be delivered from the sensor
2817 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
2818 __FUNCTION__);
2819
2820 nsecs_t captureTime;
2821 status_t gotFrame =
2822 mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
2823 if (gotFrame == 0) {
2824 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
2825 __FUNCTION__);
2826 return true;
2827 }
2828
2829 if (gotFrame == -1) {
2830 DBG_LOGA("Sensor thread had exited , here should exit ReadoutThread Loop");
2831 return false;
2832 }
2833
2834 ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
2835 mCurrentRequest.frameNumber, captureTime);
2836
2837 // Check if we need to JPEG encode a buffer, and send it for async
2838 // compression if so. Otherwise prepare the buffer for return.
2839 bool needJpeg = false;
2840 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
2841 while (buf != mCurrentRequest.buffers->end()) {
2842 bool goodBuffer = true;
2843 if ( buf->stream->format ==
2844 HAL_PIXEL_FORMAT_BLOB) {
2845 Mutex::Autolock jl(mJpegLock);
2846 needJpeg = true;
2847 CaptureRequest currentcapture;
2848 currentcapture.frameNumber = mCurrentRequest.frameNumber;
2849 currentcapture.sensorBuffers = mCurrentRequest.sensorBuffers;
2850 currentcapture.buf = buf;
2851 currentcapture.mNeedThumbnail = mCurrentRequest.havethumbnail;
2852 mParent->mJpegCompressor->queueRequest(currentcapture);
2853 //this sensorBuffers delete in the jpegcompress;
2854 mCurrentRequest.sensorBuffers = NULL;
2855 buf = mCurrentRequest.buffers->erase(buf);
2856 continue;
2857 }
2858 GraphicBufferMapper::get().unlock(*(buf->buffer));
2859
2860 buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
2861 CAMERA3_BUFFER_STATUS_ERROR;
2862 buf->acquire_fence = -1;
2863 buf->release_fence = -1;
2864
2865 ++buf;
2866 } // end while
2867
2868 // Construct result for all completed buffers and results
2869
2870 camera3_capture_result result;
2871
2872 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
2873 &captureTime, 1);
2874
2875 memset(&result, 0, sizeof(result));
2876 result.frame_number = mCurrentRequest.frameNumber;
2877 result.result = mCurrentRequest.settings.getAndLock();
2878 result.num_output_buffers = mCurrentRequest.buffers->size();
2879 result.output_buffers = mCurrentRequest.buffers->array();
2880 result.partial_result = 1;
2881
2882 // Go idle if queue is empty, before sending result
2883
2884 bool signalIdle = false;
2885 {
2886 Mutex::Autolock l(mLock);
2887 if (mInFlightQueue.empty()) {
2888 mThreadActive = false;
2889 signalIdle = true;
2890 }
2891 }
2892
2893 if (signalIdle) mParent->signalReadoutIdle();
2894
2895 // Send it off to the framework
2896 ALOGVV("%s: ReadoutThread: Send result to framework",
2897 __FUNCTION__);
2898 mParent->sendCaptureResult(&result);
2899
2900 // Clean up
2901 mCurrentRequest.settings.unlock(result.result);
2902
2903 delete mCurrentRequest.buffers;
2904 mCurrentRequest.buffers = NULL;
2905 if (!needJpeg) {
2906 delete mCurrentRequest.sensorBuffers;
2907 mCurrentRequest.sensorBuffers = NULL;
2908 }
2909 mCurrentRequest.settings.clear();
2910 CAMHAL_LOGVB("%s , X " , __FUNCTION__);
2911 return true;
2912}
2913
2914void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
2915 const StreamBuffer &jpegBuffer, bool success , CaptureRequest &r) {
2916 Mutex::Autolock jl(mJpegLock);
2917 GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer));
2918
2919 mJpegHalBuffer = *(r.buf);
2920 mJpegHalBuffer.status = success ?
2921 CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2922 mJpegHalBuffer.acquire_fence = -1;
2923 mJpegHalBuffer.release_fence = -1;
2924 mJpegWaiting = false;
2925
2926 camera3_capture_result result;
2927 result.frame_number = r.frameNumber;
2928 result.result = NULL;
2929 result.num_output_buffers = 1;
2930 result.output_buffers = &mJpegHalBuffer;
2931 result.partial_result = 1;
2932
2933 if (!success) {
2934 ALOGE("%s: Compression failure, returning error state buffer to"
2935 " framework", __FUNCTION__);
2936 } else {
2937 DBG_LOGB("%s: Compression complete, returning buffer to framework",
2938 __FUNCTION__);
2939 }
2940
2941 mParent->sendCaptureResult(&result);
2942
2943}
2944
2945void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
2946 const StreamBuffer &inputBuffer) {
2947 // Should never get here, since the input buffer has to be returned
2948 // by end of processCaptureRequest
2949 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2950}
2951
2952
2953}; // namespace android
2954