summaryrefslogtreecommitdiff
path: root/v3/EmulatedFakeCamera3.cpp (plain)
blob: c9ca6c18a092fdeab2a202dd04f014d5533b0815
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/*
18 * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22#include <inttypes.h>
23
24//#define LOG_NDEBUG 0
25//#define LOG_NNDEBUG 0
26#define LOG_TAG "EmulatedCamera_FakeCamera3"
27#include <utils/Log.h>
28
29#include "EmulatedFakeCamera3.h"
30#include "EmulatedCameraFactory.h"
31#include <ui/Fence.h>
32#include <ui/Rect.h>
33#include <ui/GraphicBufferMapper.h>
34#include <sys/types.h>
35
36#include <cutils/properties.h>
37#include "fake-pipeline2/Sensor.h"
38#include "fake-pipeline2/JpegCompressor.h"
39#include <cmath>
40#include <gralloc_priv.h>
41#include <binder/IPCThreadState.h>
42
43#if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
44#define ALOGVV ALOGV
45#else
46#define ALOGVV(...) ((void)0)
47#endif
48
49namespace android {
50
51/**
52 * Constants for camera capabilities
53 */
54
55const int64_t USEC = 1000LL;
56const int64_t MSEC = USEC * 1000LL;
57const int64_t SEC = MSEC * 1000LL;
58
59
60const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
61 //HAL_PIXEL_FORMAT_RAW_SENSOR,
62 HAL_PIXEL_FORMAT_BLOB,
63 //HAL_PIXEL_FORMAT_RGBA_8888,
64 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
65 // These are handled by YCbCr_420_888
66 HAL_PIXEL_FORMAT_YV12,
67 HAL_PIXEL_FORMAT_YCrCb_420_SP,
68 //HAL_PIXEL_FORMAT_YCbCr_422_I,
69 HAL_PIXEL_FORMAT_YCbCr_420_888
70};
71
72const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = {
73 640, 480
74 // Sensor::kResolution[0], Sensor::kResolution[1]
75};
76
77const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = {
78 (const uint64_t)Sensor::kFrameDurationRange[0]
79};
80
81const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[6] = {
82 640, 480, 320, 240,// 1280, 720
83 // Sensor::kResolution[0], Sensor::kResolution[1]
84};
85
86const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = {
87 640, 480, 320, 240
88 // Sensor::kResolution[0], Sensor::kResolution[1]
89};
90
91const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = {
92 (const uint64_t)Sensor::kFrameDurationRange[0]
93};
94
95const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = {
96 1280,720
97 // Sensor::kResolution[0], Sensor::kResolution[1]
98};
99
100const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = {
101 640, 480
102 // Sensor::kResolution[0], Sensor::kResolution[1]
103};
104
105
106const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = {
107 (const uint64_t)Sensor::kFrameDurationRange[0]
108};
109
110/**
111 * 3A constants
112 */
113
114// Default exposure and gain targets for different scenarios
115const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC;
116const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
117const int EmulatedFakeCamera3::kNormalSensitivity = 100;
118const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400;
119const float EmulatedFakeCamera3::kExposureTrackRate = 0.1;
120const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10;
121const int EmulatedFakeCamera3::kStableAeMaxFrames = 100;
122const float EmulatedFakeCamera3::kExposureWanderMin = -2;
123const float EmulatedFakeCamera3::kExposureWanderMax = 1;
124
125/**
126 * Camera device lifecycle methods
127 */
128static const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(camera3_jpeg_blob);
129jpegsize EmulatedFakeCamera3::getMaxJpegResolution(uint32_t picSizes[],int count) {
130 uint32_t maxJpegWidth = 0, maxJpegHeight = 0;
131 jpegsize maxJpegResolution;
132 for (int i=0; i < count; i+= 4) {
133 uint32_t width = picSizes[i+1];
134 uint32_t height = picSizes[i+2];
135 if (picSizes[i+0] == HAL_PIXEL_FORMAT_BLOB &&
136 (width * height > maxJpegWidth * maxJpegHeight)) {
137 maxJpegWidth = width;
138 maxJpegHeight = height;
139 }
140 }
141 maxJpegResolution.width = maxJpegWidth;
142 maxJpegResolution.height = maxJpegHeight;
143 return maxJpegResolution;
144}
145ssize_t EmulatedFakeCamera3::getJpegBufferSize(int width, int height) {
146 if (maxJpegResolution.width == 0) {
147 return BAD_VALUE;
148 }
149 ssize_t maxJpegBufferSize = JpegCompressor::kMaxJpegSize;
150
151 // Calculate final jpeg buffer size for the given resolution.
152 float scaleFactor = ((float) (width * height)) /
153 (maxJpegResolution.width * maxJpegResolution.height);
154 ssize_t jpegBufferSize = scaleFactor * maxJpegBufferSize;
155 // Bound the buffer size to [MIN_JPEG_BUFFER_SIZE, maxJpegBufferSize].
156 if (jpegBufferSize > maxJpegBufferSize) {
157 jpegBufferSize = maxJpegBufferSize;
158 } else if (jpegBufferSize < kMinJpegBufferSize) {
159 jpegBufferSize = kMinJpegBufferSize;
160 }
161 return jpegBufferSize;
162}
163
164EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, struct hw_module_t* module) :
165 EmulatedCamera3(cameraId, module) {
166 ALOGI("Constructing emulated fake camera 3 cameraID:%d", mCameraID);
167
168 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
169 mDefaultTemplates[i] = NULL;
170 }
171
172 /**
173 * Front cameras = limited mode
174 * Back cameras = full mode
175 */
176 //TODO limited or full mode, read this from camera driver
177 //mFullMode = facingBack;
178 mCameraStatus = CAMERA_INIT;
179 mSupportCap = 0;
180 mSupportRotate = 0;
181 mFullMode = 0;
182
183 gLoadXml.parseXMLFile();
184}
185
186EmulatedFakeCamera3::~EmulatedFakeCamera3() {
187 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
188 if (mDefaultTemplates[i] != NULL) {
189 free_camera_metadata(mDefaultTemplates[i]);
190 }
191 }
192
193 if (mCameraInfo != NULL) {
194 CAMHAL_LOGIA("free mCameraInfo");
195 free_camera_metadata(mCameraInfo);
196 mCameraInfo = NULL;
197 }
198}
199
200status_t EmulatedFakeCamera3::Initialize() {
201 DBG_LOGB("mCameraID=%d,mStatus=%d,ddd\n", mCameraID, mStatus);
202 status_t res;
203
204#ifdef HAVE_VERSION_INFO
205 CAMHAL_LOGIB("\n--------------------------------\n"
206 "author:aml.sh multi-media team\n"
207 "branch name: %s\n"
208 "git version: %s \n"
209 "last changed: %s\n"
210 "build-time: %s\n"
211 "build-name: %s\n"
212 "uncommitted-file-num:%d\n"
213 "ssh user@%s, cd %s\n"
214 "hostname %s\n"
215 "--------------------------------\n",
216 CAMHAL_BRANCH_NAME,
217 CAMHAL_GIT_VERSION,
218 CAMHAL_LAST_CHANGED,
219 CAMHAL_BUILD_TIME,
220 CAMHAL_BUILD_NAME,
221 CAMHAL_GIT_UNCOMMIT_FILE_NUM,
222 CAMHAL_IP, CAMHAL_PATH, CAMHAL_HOSTNAME
223 );
224#endif
225
226
227 if (mStatus != STATUS_ERROR) {
228 ALOGE("%s: Already initialized!", __FUNCTION__);
229 return INVALID_OPERATION;
230 }
231
232 res = constructStaticInfo();
233 if (res != OK) {
234 ALOGE("%s: Unable to allocate static info: %s (%d)",
235 __FUNCTION__, strerror(-res), res);
236 return res;
237 }
238
239 return EmulatedCamera3::Initialize();
240}
241
242status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) {
243 ALOGV("%s: E", __FUNCTION__);
244 DBG_LOGA("ddd");
245 Mutex::Autolock l(mLock);
246 status_t res;
247
248 if ((mStatus != STATUS_CLOSED) || !mPlugged) {
249 ALOGE("%s: Can't connect in state %d, mPlugged=%d",
250 __FUNCTION__, mStatus, mPlugged);
251 return INVALID_OPERATION;
252 }
253
254 mSensor = new Sensor();
255 mSensor->setSensorListener(this);
256
257 res = mSensor->startUp(mCameraID);
258 DBG_LOGB("mSensor startUp, mCameraID=%d\n", mCameraID);
259 if (res != NO_ERROR) return res;
260
261 mSupportCap = mSensor->IoctlStateProbe();
262 if (mSupportCap & IOCTL_MASK_ROTATE) {
263 mSupportRotate = true;
264 }
265
266 mReadoutThread = new ReadoutThread(this);
267 mJpegCompressor = new JpegCompressor();
268
269 res = mReadoutThread->setJpegCompressorListener(this);
270 if (res != NO_ERROR) {
271 return res;
272 }
273 res = mReadoutThread->startJpegCompressor(this);
274 if (res != NO_ERROR) {
275 return res;
276 }
277
278 res = mReadoutThread->run("EmuCam3::readoutThread");
279 if (res != NO_ERROR) return res;
280
281 // Initialize fake 3A
282
283 mControlMode = ANDROID_CONTROL_MODE_AUTO;
284 mFacePriority = false;
285 mAeMode = ANDROID_CONTROL_AE_MODE_ON;
286 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
287 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
288 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;//ANDROID_CONTROL_AE_STATE_INACTIVE;
289 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
290 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
291 mAfTriggerId = 0;
292 mAeCurrentExposureTime = kNormalExposureTime;
293 mAeCurrentSensitivity = kNormalSensitivity;
294
295 return EmulatedCamera3::connectCamera(device);
296}
297
298status_t EmulatedFakeCamera3::plugCamera() {
299 {
300 Mutex::Autolock l(mLock);
301
302 if (!mPlugged) {
303 CAMHAL_LOGIB("%s: Plugged back in", __FUNCTION__);
304 mPlugged = true;
305 }
306 }
307
308 return NO_ERROR;
309}
310
311status_t EmulatedFakeCamera3::unplugCamera() {
312 {
313 Mutex::Autolock l(mLock);
314
315 if (mPlugged) {
316 CAMHAL_LOGIB("%s: Unplugged camera", __FUNCTION__);
317 mPlugged = false;
318 }
319 }
320 return true;
321}
322
323camera_device_status_t EmulatedFakeCamera3::getHotplugStatus() {
324 Mutex::Autolock l(mLock);
325 return mPlugged ?
326 CAMERA_DEVICE_STATUS_PRESENT :
327 CAMERA_DEVICE_STATUS_NOT_PRESENT;
328}
329
330bool EmulatedFakeCamera3::getCameraStatus()
331{
332 CAMHAL_LOGVB("%s, mCameraStatus = %d",__FUNCTION__,mCameraStatus);
333 bool ret = false;
334 if (mStatus == STATUS_CLOSED) {
335 ret = true;
336 } else {
337 ret = false;
338 }
339 return ret;
340}
341
342status_t EmulatedFakeCamera3::closeCamera() {
343 CAMHAL_LOGVB("%s, %d\n", __FUNCTION__, __LINE__);
344 status_t res;
345 {
346 Mutex::Autolock l(mLock);
347 if (mStatus == STATUS_CLOSED) return OK;
348 //res = mSensor->streamOff();
349
350 res = mSensor->shutDown();
351 if (res != NO_ERROR) {
352 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
353 return res;
354 }
355 mSensor.clear();
356
357 res = mReadoutThread->shutdownJpegCompressor(this);
358 if (res != OK) {
359 ALOGE("%s: Unable to shut down JpegCompressor: %d", __FUNCTION__, res);
360 return res;
361 }
362
363 mReadoutThread->requestExit();
364 }
365
366 mReadoutThread->join();
367
368 {
369 Mutex::Autolock l(mLock);
370 // Clear out private stream information
371 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
372 PrivateStreamInfo *privStream =
373 static_cast<PrivateStreamInfo*>((*s)->priv);
374 delete privStream;
375 (*s)->priv = NULL;
376 }
377 mStreams.clear();
378 mReadoutThread.clear();
379 }
380
381 return EmulatedCamera3::closeCamera();
382}
383
384status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
385 char property[PROPERTY_VALUE_MAX];
386 char* tempApkName = gLoadXml.getApkPackageName(IPCThreadState::self()->getCallingPid());
387 List_Or * temp=new List_Or();
388 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
389 if (mSensorType == SENSOR_USB) {
390 if (mFacingBack) {
391 property_get("hw.camera.orientation.back", property, "0");
392 } else {
393 property_get("hw.camera.orientation.front", property, "0");
394 }
395 int32_t orientation = atoi(property);
396
397 if (gLoadXml.findApkCp(tempApkName, temp)) {
398 orientation = atoi(temp->pro);
399 }
400 if (temp != NULL) {
401 delete temp;
402 temp = NULL;
403 }
404
405 property_get("hw.camera.usb.orientation_offset", property, "0");
406 orientation += atoi(property);
407 orientation %= 360;
408 info->orientation = orientation ;
409 } else {
410 if (mFacingBack) {
411 property_get("hw.camera.orientation.back", property, "270");
412 } else {
413 property_get("hw.camera.orientation.front", property, "90");
414 }
415 info->orientation = atoi(property);
416 }
417 return EmulatedCamera3::getCameraInfo(info);
418}
419
420/**
421 * Camera3 interface methods
422 */
423
424void EmulatedFakeCamera3::getValidJpegSize(uint32_t picSizes[], uint32_t availablejpegsize[], int count) {
425 int i,j,k;
426 bool valid = true;
427 for (i=0,j=0; i < count; i+= 4) {
428 for (k= 0; k<=j ;k+=2) {
429 if ((availablejpegsize[k]*availablejpegsize[k+1]) == (picSizes[i+1]*picSizes[i+2])) {
430
431 valid = false;
432 }
433 }
434 if (valid) {
435 availablejpegsize[j] = picSizes[i+1];
436 availablejpegsize[j+1] = picSizes[i+2];
437 j+=2;
438 }
439 valid = true;
440 }
441}
442
443status_t EmulatedFakeCamera3::checkValidJpegSize(uint32_t width, uint32_t height) {
444
445 int validsizecount = 0;
446 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
447 for (uint32_t f = 0; f < count; f+=2) {
448 if (mAvailableJpegSize[f] != 0) {
449 if ((mAvailableJpegSize[f] == width)&&(mAvailableJpegSize[f+1] == height)) {
450 validsizecount++;
451 }
452 } else {
453 break;
454 }
455 }
456 if (validsizecount == 0)
457 return BAD_VALUE;
458 return OK;
459}
460
461status_t EmulatedFakeCamera3::configureStreams(
462 camera3_stream_configuration *streamList) {
463 Mutex::Autolock l(mLock);
464 uint32_t width, height, pixelfmt;
465 bool isRestart = false;
466 DBG_LOGB("%s: %d streams", __FUNCTION__, streamList->num_streams);
467
468 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
469 ALOGE("%s: Cannot configure streams in state %d",
470 __FUNCTION__, mStatus);
471 return NO_INIT;
472 }
473
474 /**
475 * Sanity-check input list.
476 */
477 if (streamList == NULL) {
478 ALOGE("%s: NULL stream configuration", __FUNCTION__);
479 return BAD_VALUE;
480 }
481
482 if (streamList->streams == NULL) {
483 ALOGE("%s: NULL stream list", __FUNCTION__);
484 return BAD_VALUE;
485 }
486
487 if (streamList->num_streams < 1) {
488 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
489 streamList->num_streams);
490 return BAD_VALUE;
491 }
492
493 camera3_stream_t *inputStream = NULL;
494 for (size_t i = 0; i < streamList->num_streams; i++) {
495 camera3_stream_t *newStream = streamList->streams[i];
496
497 if (newStream == NULL) {
498 ALOGE("%s: Stream index %zu was NULL",
499 __FUNCTION__, i);
500 return BAD_VALUE;
501 }
502
503 if (newStream->max_buffers <= 0) {
504 isRestart = true;//mSensor->isNeedRestart(newStream->width, newStream->height, newStream->format);
505 DBG_LOGB("format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d, isRestart=%d\n",
506 newStream->format, newStream->width, newStream->height,
507 newStream->stream_type, newStream->max_buffers,
508 isRestart);
509 }
510 ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
511 __FUNCTION__, newStream, i, newStream->stream_type,
512 newStream->usage,
513 newStream->format);
514
515 if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
516 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
517 if (inputStream != NULL) {
518
519 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
520 return BAD_VALUE;
521 }
522 inputStream = newStream;
523 }
524
525 bool validFormat = false;
526 for (size_t f = 0;
527 f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]);
528 f++) {
529 if (newStream->format == kAvailableFormats[f]) {
530 validFormat = true;
531 //HAL_PIXEL_FORMAT_YCrCb_420_SP,
532 if (HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format)
533 newStream->format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
534
535 break;
536 }
537 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
538 }
539 if (!validFormat) {
540 ALOGE("%s: Unsupported stream format 0x%x requested",
541 __FUNCTION__, newStream->format);
542 return BAD_VALUE;
543 }
544
545 status_t ret = checkValidJpegSize(newStream->width, newStream->height);
546 if (ret != OK) {
547 return BAD_VALUE;
548 }
549
550 }
551 mInputStream = inputStream;
552 width = 0;
553 height = 0;
554 for (size_t i = 0; i < streamList->num_streams; i++) {
555 camera3_stream_t *newStream = streamList->streams[i];
556 DBG_LOGB("find propert width and height, format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d\n",
557 newStream->format, newStream->width, newStream->height, newStream->stream_type, newStream->max_buffers);
558 if ((HAL_PIXEL_FORMAT_BLOB != newStream->format) &&
559 (CAMERA3_STREAM_OUTPUT == newStream->stream_type)) {
560
561 if (width < newStream->width)
562 width = newStream->width;
563
564 if (height < newStream->height)
565 height = newStream->height;
566
567 pixelfmt = (uint32_t)newStream->format;
568 if (HAL_PIXEL_FORMAT_YCbCr_420_888 == pixelfmt)
569 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
570 }
571
572 }
573
574 //TODO modify this ugly code
575 if (isRestart) {
576 isRestart = mSensor->isNeedRestart(width, height, pixelfmt);
577 }
578
579 if (isRestart) {
580 mSensor->streamOff();
581 pixelfmt = mSensor->halFormatToSensorFormat(pixelfmt);
582 mSensor->setOutputFormat(width, height, pixelfmt, 0);
583 mSensor->streamOn();
584 DBG_LOGB("width=%d, height=%d, pixelfmt=%.4s\n",
585 width, height, (char*)&pixelfmt);
586 }
587
588 /**
589 * Initially mark all existing streams as not alive
590 */
591 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
592 PrivateStreamInfo *privStream =
593 static_cast<PrivateStreamInfo*>((*s)->priv);
594 privStream->alive = false;
595 }
596
597 /**
598 * Find new streams and mark still-alive ones
599 */
600 for (size_t i = 0; i < streamList->num_streams; i++) {
601 camera3_stream_t *newStream = streamList->streams[i];
602 if (newStream->priv == NULL) {
603 // New stream, construct info
604 PrivateStreamInfo *privStream = new PrivateStreamInfo();
605 privStream->alive = true;
606 privStream->registered = false;
607
608 newStream->usage =
609 mSensor->getStreamUsage(newStream->stream_type);
610
611 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
612 newStream->max_buffers = kMaxBufferCount;
613 newStream->priv = privStream;
614 mStreams.push_back(newStream);
615 } else {
616 // Existing stream, mark as still alive.
617 PrivateStreamInfo *privStream =
618 static_cast<PrivateStreamInfo*>(newStream->priv);
619 CAMHAL_LOGDA("Existing stream ?");
620 privStream->alive = true;
621 }
622 DBG_LOGB("%d, newStream=%p, stream_type=%d, usage=%x, priv=%p, w*h=%dx%d\n",
623 i, newStream, newStream->stream_type, newStream->usage, newStream->priv, newStream->width, newStream->height);
624 }
625
626 /**
627 * Reap the dead streams
628 */
629 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
630 PrivateStreamInfo *privStream =
631 static_cast<PrivateStreamInfo*>((*s)->priv);
632 if (!privStream->alive) {
633 DBG_LOGA("delete not alive streams");
634 (*s)->priv = NULL;
635 delete privStream;
636 s = mStreams.erase(s);
637 } else {
638 ++s;
639 }
640 }
641
642 /**
643 * Can't reuse settings across configure call
644 */
645 mPrevSettings.clear();
646
647 return OK;
648}
649
650status_t EmulatedFakeCamera3::registerStreamBuffers(
651 const camera3_stream_buffer_set *bufferSet) {
652 DBG_LOGB("%s: E", __FUNCTION__);
653 Mutex::Autolock l(mLock);
654
655 /**
656 * Sanity checks
657 */
658 DBG_LOGA("==========sanity checks\n");
659
660 // OK: register streams at any time during configure
661 // (but only once per stream)
662 if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) {
663 ALOGE("%s: Cannot register buffers in state %d",
664 __FUNCTION__, mStatus);
665 return NO_INIT;
666 }
667
668 if (bufferSet == NULL) {
669 ALOGE("%s: NULL buffer set!", __FUNCTION__);
670 return BAD_VALUE;
671 }
672
673 StreamIterator s = mStreams.begin();
674 for (; s != mStreams.end(); ++s) {
675 if (bufferSet->stream == *s) break;
676 }
677 if (s == mStreams.end()) {
678 ALOGE("%s: Trying to register buffers for a non-configured stream!",
679 __FUNCTION__);
680 return BAD_VALUE;
681 }
682
683 /**
684 * Register the buffers. This doesn't mean anything to the emulator besides
685 * marking them off as registered.
686 */
687
688 PrivateStreamInfo *privStream =
689 static_cast<PrivateStreamInfo*>((*s)->priv);
690
691#if 0
692 if (privStream->registered) {
693 ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__);
694 return BAD_VALUE;
695 }
696#endif
697
698 privStream->registered = true;
699
700 return OK;
701}
702
703const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings(
704 int type) {
705 DBG_LOGB("%s: E", __FUNCTION__);
706 Mutex::Autolock l(mLock);
707
708 if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
709 ALOGE("%s: Unknown request settings template: %d",
710 __FUNCTION__, type);
711 return NULL;
712 }
713
714 /**
715 * Cache is not just an optimization - pointer returned has to live at
716 * least as long as the camera device instance does.
717 */
718 if (mDefaultTemplates[type] != NULL) {
719 return mDefaultTemplates[type];
720 }
721
722 CameraMetadata settings;
723
724 /** android.request */
725 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
726 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
727
728 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
729 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
730
731 static const int32_t id = 0;
732 settings.update(ANDROID_REQUEST_ID, &id, 1);
733
734 static const int32_t frameCount = 0;
735 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
736
737 /** android.lens */
738
739 static const float focusDistance = 0;
740 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
741
742 static const float aperture = 2.8f;
743 settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
744
745// static const float focalLength = 5.0f;
746 static const float focalLength = 3.299999952316284f;
747 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
748
749 static const float filterDensity = 0;
750 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
751
752 static const uint8_t opticalStabilizationMode =
753 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
754 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
755 &opticalStabilizationMode, 1);
756
757 // FOCUS_RANGE set only in frame
758
759 /** android.sensor */
760
761 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
762 settings.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
763 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
764 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
765 static const int64_t exposureTime = 10 * MSEC;
766 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
767
768 int64_t frameDuration = mSensor->getMinFrameDuration();
769 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
770
771 static const int32_t sensitivity = 100;
772 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
773
774 static const int64_t rollingShutterSkew = 0;
775 settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
776 // TIMESTAMP set only in frame
777
778 /** android.flash */
779
780 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
781 settings.update(ANDROID_FLASH_STATE, &flashstate, 1);
782
783 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
784 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
785
786 static const uint8_t flashPower = 10;
787 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
788
789 static const int64_t firingTime = 0;
790 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
791
792 /** Processing block modes */
793 uint8_t hotPixelMode = 0;
794 uint8_t demosaicMode = 0;
795 uint8_t noiseMode = 0;
796 uint8_t shadingMode = 0;
797 uint8_t colorMode = 0;
798 uint8_t tonemapMode = 0;
799 uint8_t edgeMode = 0;
800 switch (type) {
801
802 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
803 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
804 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
805 // fall-through
806 case CAMERA3_TEMPLATE_STILL_CAPTURE:
807 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
808 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
809 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
810 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
811 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
812 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
813 break;
814 case CAMERA3_TEMPLATE_PREVIEW:
815 // fall-through
816 case CAMERA3_TEMPLATE_VIDEO_RECORD:
817 // fall-through
818 case CAMERA3_TEMPLATE_MANUAL:
819 // fall-through
820 default:
821 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
822 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
823 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
824 shadingMode = ANDROID_SHADING_MODE_FAST;
825 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
826 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
827 edgeMode = ANDROID_EDGE_MODE_FAST;
828 break;
829 }
830 settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
831 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
832 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
833 settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
834 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
835 settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
836 settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
837
838 /** android.noise */
839 static const uint8_t noiseStrength = 5;
840 settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
841 static uint8_t availableNBModes[] = {
842 ANDROID_NOISE_REDUCTION_MODE_OFF,
843 ANDROID_NOISE_REDUCTION_MODE_FAST,
844 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
845 };
846 settings.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
847 availableNBModes, sizeof(availableNBModes)/sizeof(availableNBModes));
848
849
850 /** android.color */
851 static const float colorTransform[9] = {
852 1.0f, 0.f, 0.f,
853 0.f, 1.f, 0.f,
854 0.f, 0.f, 1.f
855 };
856 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
857
858 /** android.tonemap */
859 static const float tonemapCurve[4] = {
860 0.f, 0.f,
861 1.f, 1.f
862 };
863 settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
864 settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
865 settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
866
867 /** android.edge */
868 static const uint8_t edgeStrength = 5;
869 settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
870
871 /** android.scaler */
872 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
873 settings.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
874
875 static const int32_t cropRegion[] = {
876 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
877 };
878 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
879
880 /** android.jpeg */
881 static const uint8_t jpegQuality = 80;
882 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
883
884 static const int32_t thumbnailSize[2] = {
885 160, 120
886 };
887 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
888
889 static const uint8_t thumbnailQuality = 80;
890 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
891
892 static const double gpsCoordinates[3] = {
893 0, 0, 0
894 };
895 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3); //default 2 value
896
897 static const uint8_t gpsProcessingMethod[32] = "None";
898 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
899
900 static const int64_t gpsTimestamp = 0;
901 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
902
903 static const int32_t jpegOrientation = 0;
904 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
905
906 /** android.stats */
907
908 static const uint8_t faceDetectMode =
909 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
910 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
911
912 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
913 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
914
915 static const uint8_t sharpnessMapMode =
916 ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
917 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
918
919 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
920 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
921 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
922 settings.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
923 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
924 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
925 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
926 // sharpnessMap only in frames
927
928 /** android.control */
929
930 uint8_t controlIntent = 0;
931 uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; //default value
932 uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
933 uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
934 switch (type) {
935 case CAMERA3_TEMPLATE_PREVIEW:
936 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
937 break;
938 case CAMERA3_TEMPLATE_STILL_CAPTURE:
939 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
940 break;
941 case CAMERA3_TEMPLATE_VIDEO_RECORD:
942 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
943 break;
944 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
945 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
946 break;
947 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
948 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
949 break;
950 case CAMERA3_TEMPLATE_MANUAL:
951 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
952 controlMode = ANDROID_CONTROL_MODE_OFF;
953 aeMode = ANDROID_CONTROL_AE_MODE_OFF;
954 awbMode = ANDROID_CONTROL_AWB_MODE_OFF;
955 break;
956 default:
957 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
958 break;
959 }
960 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
961 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
962
963 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
964 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
965
966 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
967 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
968
969 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
970
971 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
972 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
973
974 static const uint8_t aePrecaptureTrigger =
975 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
976 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
977
978 static const int32_t mAfTriggerId = 0;
979 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,&mAfTriggerId, 1);
980 static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
981 settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
982
983 static const int32_t controlRegions[5] = {
984 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
985 1000
986 };
987// settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
988
989 static const int32_t aeExpCompensation = 0;
990 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
991
992 static const int32_t aeTargetFpsRange[2] = {
993 30, 30
994 };
995 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
996
997 static const uint8_t aeAntibandingMode =
998 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
999 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
1000
1001 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
1002
1003 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
1004 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
1005
1006// settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
1007
1008 uint8_t afMode = 0;
1009 switch (type) {
1010 case CAMERA3_TEMPLATE_PREVIEW:
1011 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1012 break;
1013 case CAMERA3_TEMPLATE_STILL_CAPTURE:
1014 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1015 break;
1016 case CAMERA3_TEMPLATE_VIDEO_RECORD:
1017 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1018 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
1019 break;
1020 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
1021 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1022 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
1023 break;
1024 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
1025 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1026 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
1027 break;
1028 case CAMERA3_TEMPLATE_MANUAL:
1029 afMode = ANDROID_CONTROL_AF_MODE_OFF;
1030 break;
1031 default:
1032 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1033 break;
1034 }
1035 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
1036
1037 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
1038 settings.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
1039
1040// settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
1041
1042 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
1043 settings.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
1044 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1045 settings.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
1046 static const uint8_t vstabMode =
1047 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
1048 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
1049
1050 // aeState, awbState, afState only in frame
1051
1052 mDefaultTemplates[type] = settings.release();
1053
1054 return mDefaultTemplates[type];
1055}
1056
1057status_t EmulatedFakeCamera3::processCaptureRequest(
1058 camera3_capture_request *request) {
1059
1060 Mutex::Autolock l(mLock);
1061 status_t res;
1062
1063 /** Validation */
1064
1065 if (mStatus < STATUS_READY) {
1066 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
1067 mStatus);
1068 return INVALID_OPERATION;
1069 }
1070
1071 if (request == NULL) {
1072 ALOGE("%s: NULL request!", __FUNCTION__);
1073 return BAD_VALUE;
1074 }
1075
1076 uint32_t frameNumber = request->frame_number;
1077
1078 if (request->settings == NULL && mPrevSettings.isEmpty()) {
1079 ALOGE("%s: Request %d: NULL settings for first request after"
1080 "configureStreams()", __FUNCTION__, frameNumber);
1081 return BAD_VALUE;
1082 }
1083
1084 if (request->input_buffer != NULL &&
1085 request->input_buffer->stream != mInputStream) {
1086 DBG_LOGB("%s: Request %d: Input buffer not from input stream!",
1087 __FUNCTION__, frameNumber);
1088 DBG_LOGB("%s: Bad stream %p, expected: %p",
1089 __FUNCTION__, request->input_buffer->stream,
1090 mInputStream);
1091 DBG_LOGB("%s: Bad stream type %d, expected stream type %d",
1092 __FUNCTION__, request->input_buffer->stream->stream_type,
1093 mInputStream ? mInputStream->stream_type : -1);
1094
1095 return BAD_VALUE;
1096 }
1097
1098 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1099 ALOGE("%s: Request %d: No output buffers provided!",
1100 __FUNCTION__, frameNumber);
1101 return BAD_VALUE;
1102 }
1103
1104 // Validate all buffers, starting with input buffer if it's given
1105
1106 ssize_t idx;
1107 const camera3_stream_buffer_t *b;
1108 if (request->input_buffer != NULL) {
1109 idx = -1;
1110 b = request->input_buffer;
1111 } else {
1112 idx = 0;
1113 b = request->output_buffers;
1114 }
1115 do {
1116 PrivateStreamInfo *priv =
1117 static_cast<PrivateStreamInfo*>(b->stream->priv);
1118 if (priv == NULL) {
1119 ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
1120 __FUNCTION__, frameNumber, idx);
1121 return BAD_VALUE;
1122 }
1123#if 0
1124 if (!priv->alive || !priv->registered) {
1125 ALOGE("%s: Request %d: Buffer %zu: Unregistered or dead stream! alive=%d, registered=%d\n",
1126 __FUNCTION__, frameNumber, idx,
1127 priv->alive, priv->registered);
1128 //return BAD_VALUE;
1129 }
1130#endif
1131 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1132 ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
1133 __FUNCTION__, frameNumber, idx);
1134 return BAD_VALUE;
1135 }
1136 if (b->release_fence != -1) {
1137 ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
1138 __FUNCTION__, frameNumber, idx);
1139 return BAD_VALUE;
1140 }
1141 if (b->buffer == NULL) {
1142 ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
1143 __FUNCTION__, frameNumber, idx);
1144 return BAD_VALUE;
1145 }
1146 idx++;
1147 b = &(request->output_buffers[idx]);
1148 } while (idx < (ssize_t)request->num_output_buffers);
1149
1150 // TODO: Validate settings parameters
1151
1152 /**
1153 * Start processing this request
1154 */
1155
1156 mStatus = STATUS_ACTIVE;
1157
1158 CameraMetadata settings;
1159 camera_metadata_entry e;
1160
1161 if (request->settings == NULL) {
1162 settings.acquire(mPrevSettings);
1163 } else {
1164 settings = request->settings;
1165
1166 uint8_t antiBanding = 0;
1167 uint8_t effectMode = 0;
1168 int exposureCmp = 0;
1169
1170 e = settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE);
1171 if (e.count == 0) {
1172 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1173 return BAD_VALUE;
1174 }
1175 antiBanding = e.data.u8[0];
1176 mSensor->setAntiBanding(antiBanding);
1177
1178 e = settings.find(ANDROID_CONTROL_EFFECT_MODE);
1179 if (e.count == 0) {
1180 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1181 return BAD_VALUE;
1182 }
1183 effectMode = e.data.u8[0];
1184 mSensor->setEffect(effectMode);
1185
1186
1187 e = settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION);
1188 if (e.count == 0) {
1189 ALOGE("%s: No exposure entry!", __FUNCTION__);
1190 //return BAD_VALUE;
1191 } else {
1192 exposureCmp = e.data.i32[0];
1193 DBG_LOGB("set expsore compensaton %d\n", exposureCmp);
1194 mSensor->setExposure(exposureCmp);
1195 }
1196
1197 int32_t cropRegion[4];
1198 int32_t cropWidth;
1199 int32_t outputWidth = request->output_buffers[0].stream->width;
1200
1201 e = settings.find(ANDROID_SCALER_CROP_REGION);
1202 if (e.count == 0) {
1203 ALOGE("%s: No corp region entry!", __FUNCTION__);
1204 //return BAD_VALUE;
1205 } else {
1206 cropRegion[0] = e.data.i32[0];
1207 cropRegion[1] = e.data.i32[1];
1208 cropWidth = cropRegion[2] = e.data.i32[2];
1209 cropRegion[3] = e.data.i32[3];
1210 for (int i = mZoomMin; i <= mZoomMax; i += mZoomStep) {
1211 //if ( (float) i / mZoomMin >= (float) outputWidth / cropWidth) {
1212 if ( i * cropWidth >= outputWidth * mZoomMin ) {
1213 mSensor->setZoom(i);
1214 break;
1215 }
1216 }
1217 DBG_LOGB("cropRegion:%d, %d, %d, %d\n", cropRegion[0], cropRegion[1],cropRegion[2],cropRegion[3]);
1218 }
1219 }
1220
1221 uint8_t len[] = {1};
1222 settings.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1);
1223
1224 uint8_t maxlen[] = {0};
1225 settings.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1);
1226
1227 res = process3A(settings);
1228 if (res != OK) {
1229 CAMHAL_LOGDB("%s: process3A failed!", __FUNCTION__);
1230 //return res;
1231 }
1232
1233 // TODO: Handle reprocessing
1234
1235 /**
1236 * Get ready for sensor config
1237 */
1238
1239 nsecs_t exposureTime;
1240 nsecs_t frameDuration;
1241 uint32_t sensitivity;
1242 bool needJpeg = false;
1243 struct ExifInfo info;
1244 ssize_t jpegbuffersize;
1245 uint32_t jpegpixelfmt;
1246 bool mHaveThumbnail = false;
1247
1248 exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
1249 frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
1250 sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
1251
1252 Buffers *sensorBuffers = new Buffers();
1253 HalBufferVector *buffers = new HalBufferVector();
1254
1255 sensorBuffers->setCapacity(request->num_output_buffers);
1256 buffers->setCapacity(request->num_output_buffers);
1257
1258 // Process all the buffers we got for output, constructing internal buffer
1259 // structures for them, and lock them for writing.
1260 for (size_t i = 0; i < request->num_output_buffers; i++) {
1261 const camera3_stream_buffer &srcBuf = request->output_buffers[i];
1262 const private_handle_t *privBuffer =
1263 (const private_handle_t*)(*srcBuf.buffer);
1264 StreamBuffer destBuf;
1265 destBuf.streamId = kGenericStreamId;
1266 destBuf.width = srcBuf.stream->width;
1267 destBuf.height = srcBuf.stream->height;
1268 destBuf.format = privBuffer->format; // Use real private format
1269 destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc
1270 destBuf.buffer = srcBuf.buffer;
1271 destBuf.share_fd = privBuffer->share_fd;
1272
1273 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
1274 needJpeg = true;
1275 memset(&info,0,sizeof(struct ExifInfo));
1276 info.orientation = settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
1277 jpegpixelfmt = mSensor->getOutputFormat();
1278 if (!mSupportRotate) {
1279 info.mainwidth = srcBuf.stream->width;
1280 info.mainheight = srcBuf.stream->height;
1281 } else {
1282 if ((info.orientation == 90) || (info.orientation == 270)) {
1283 info.mainwidth = srcBuf.stream->height;
1284 info.mainheight = srcBuf.stream->width;
1285 } else {
1286 info.mainwidth = srcBuf.stream->width;
1287 info.mainheight = srcBuf.stream->height;
1288 }
1289 }
1290 if ((jpegpixelfmt == V4L2_PIX_FMT_MJPEG)||(jpegpixelfmt == V4L2_PIX_FMT_YUYV)) {
1291 mSensor->setOutputFormat(info.mainwidth,info.mainheight,jpegpixelfmt,1);
1292 } else {
1293 mSensor->setOutputFormat(info.mainwidth,info.mainheight,V4L2_PIX_FMT_RGB24,1);
1294 }
1295 }
1296
1297 // Wait on fence
1298 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
1299 res = bufferAcquireFence->wait(kFenceTimeoutMs);
1300 if (res == TIMED_OUT) {
1301 ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
1302 __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
1303 }
1304 if (res == OK) {
1305 // Lock buffer for writing
1306 const Rect rect(destBuf.width, destBuf.height);
1307 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1308 if (privBuffer->format == HAL_PIXEL_FORMAT_YCbCr_420_888/*HAL_PIXEL_FORMAT_YCrCb_420_SP*/) {
1309 android_ycbcr ycbcr = android_ycbcr();
1310 res = GraphicBufferMapper::get().lockYCbCr(
1311 *(destBuf.buffer),
1312 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK
1313 , rect,
1314 &ycbcr);
1315 // This is only valid because we know that emulator's
1316 // YCbCr_420_888 is really contiguous NV21 under the hood
1317 destBuf.img = static_cast<uint8_t*>(ycbcr.y);
1318 } else {
1319 ALOGE("Unexpected private format for flexible YUV: 0x%x",
1320 privBuffer->format);
1321 res = INVALID_OPERATION;
1322 }
1323 } else {
1324 res = GraphicBufferMapper::get().lock(*(destBuf.buffer),
1325 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK
1326 , rect,
1327 (void**)&(destBuf.img));
1328 }
1329 if (res != OK) {
1330 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
1331 __FUNCTION__, frameNumber, i);
1332 }
1333 }
1334
1335 if (res != OK) {
1336 // Either waiting or locking failed. Unlock locked buffers and bail
1337 // out.
1338 for (size_t j = 0; j < i; j++) {
1339 GraphicBufferMapper::get().unlock(
1340 *(request->output_buffers[i].buffer));
1341 }
1342 ALOGE("line:%d, format for this usage: %d x %d, usage %x, format=%x, returned\n",
1343 __LINE__, destBuf.width, destBuf.height, privBuffer->usage, privBuffer->format);
1344 return NO_INIT;
1345 }
1346
1347 sensorBuffers->push_back(destBuf);
1348 buffers->push_back(srcBuf);
1349 }
1350
1351 if (needJpeg) {
1352 if (!mSupportRotate) {
1353 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1354 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1355 } else {
1356 if ((info.orientation == 90) || (info.orientation == 270)) {
1357 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1358 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1359 } else {
1360 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1361 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1362 }
1363 }
1364 if (settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
1365 info.latitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[0];
1366 info.longitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[1];
1367 info.altitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[2];
1368 info.has_latitude = true;
1369 info.has_longitude = true;
1370 info.has_altitude = true;
1371 } else {
1372 info.has_latitude = false;
1373 info.has_longitude = false;
1374 info.has_altitude = false;
1375 }
1376 if (settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
1377 info.gpsProcessingMethod = settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
1378 info.has_gpsProcessingMethod = true;
1379 } else {
1380 info.has_gpsProcessingMethod = false;
1381 }
1382 if (settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
1383 info.gpsTimestamp = settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
1384 info.has_gpsTimestamp = true;
1385 } else {
1386 info.has_gpsTimestamp = false;
1387 }
1388 if (settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
1389 info.focallen = settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
1390 info.has_focallen = true;
1391 } else {
1392 info.has_focallen = false;
1393 }
1394 jpegbuffersize = getJpegBufferSize(info.mainwidth,info.mainheight);
1395
1396 mJpegCompressor->SetMaxJpegBufferSize(jpegbuffersize);
1397 mJpegCompressor->SetExifInfo(info);
1398 mSensor->setPictureRotate(info.orientation);
1399 if ((info.thumbwidth > 0) && (info.thumbheight > 0)) {
1400 mHaveThumbnail = true;
1401 }
1402 DBG_LOGB("%s::thumbnailSize_width=%d,thumbnailSize_height=%d,mainsize_width=%d,mainsize_height=%d,jpegOrientation=%d",__FUNCTION__,
1403 info.thumbwidth,info.thumbheight,info.mainwidth,info.mainheight,info.orientation);
1404 }
1405 /**
1406 * Wait for JPEG compressor to not be busy, if needed
1407 */
1408#if 0
1409 if (needJpeg) {
1410 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1411 if (!ready) {
1412 ALOGE("%s: Timeout waiting for JPEG compression to complete!",
1413 __FUNCTION__);
1414 return NO_INIT;
1415 }
1416 }
1417#else
1418 while (needJpeg) {
1419 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1420 if (ready) {
1421 break;
1422 }
1423 }
1424#endif
1425 /**
1426 * Wait until the in-flight queue has room
1427 */
1428 res = mReadoutThread->waitForReadout();
1429 if (res != OK) {
1430 ALOGE("%s: Timeout waiting for previous requests to complete!",
1431 __FUNCTION__);
1432 return NO_INIT;
1433 }
1434
1435 /**
1436 * Wait until sensor's ready. This waits for lengthy amounts of time with
1437 * mLock held, but the interface spec is that no other calls may by done to
1438 * the HAL by the framework while process_capture_request is happening.
1439 */
1440 int syncTimeoutCount = 0;
1441 while(!mSensor->waitForVSync(kSyncWaitTimeout)) {
1442 if (mStatus == STATUS_ERROR) {
1443 return NO_INIT;
1444 }
1445 if (syncTimeoutCount == kMaxSyncTimeoutCount) {
1446 ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
1447 __FUNCTION__, frameNumber,
1448 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
1449 return NO_INIT;
1450 }
1451 syncTimeoutCount++;
1452 }
1453
1454 /**
1455 * Configure sensor and queue up the request to the readout thread
1456 */
1457 mSensor->setExposureTime(exposureTime);
1458 mSensor->setFrameDuration(frameDuration);
1459 mSensor->setSensitivity(sensitivity);
1460 mSensor->setDestinationBuffers(sensorBuffers);
1461 mSensor->setFrameNumber(request->frame_number);
1462
1463 ReadoutThread::Request r;
1464 r.frameNumber = request->frame_number;
1465 r.settings = settings;
1466 r.sensorBuffers = sensorBuffers;
1467 r.buffers = buffers;
1468 r.havethumbnail = mHaveThumbnail;
1469
1470 mReadoutThread->queueCaptureRequest(r);
1471 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
1472
1473 // Cache the settings for next time
1474 mPrevSettings.acquire(settings);
1475
1476 return OK;
1477}
1478
1479/** Debug methods */
1480
1481void EmulatedFakeCamera3::dump(int fd) {
1482
1483 String8 result;
1484 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
1485 result = String8::format("%s, valid resolution\n", __FILE__);
1486
1487 for (uint32_t f = 0; f < count; f+=2) {
1488 if (mAvailableJpegSize[f] == 0)
1489 break;
1490 result.appendFormat("width: %d , height =%d\n",
1491 mAvailableJpegSize[f], mAvailableJpegSize[f+1]);
1492 }
1493 result.appendFormat("\nmZoomMin: %d , mZoomMax =%d, mZoomStep=%d\n",
1494 mZoomMin, mZoomMax, mZoomStep);
1495
1496 if (mZoomStep <= 0) {
1497 result.appendFormat("!!!!!!!!!camera apk may have no picture out\n");
1498 }
1499
1500 write(fd, result.string(), result.size());
1501
1502 if (mSensor.get() != NULL) {
1503 mSensor->dump(fd);
1504 }
1505
1506}
1507//flush all request
1508//TODO returned buffers every request held immediately with
1509//CAMERA3_BUFFER_STATUS_ERROR flag.
1510int EmulatedFakeCamera3::flush_all_requests() {
1511 DBG_LOGA("flush all request");
1512 return 0;
1513}
1514/** Tag query methods */
1515const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) {
1516 return NULL;
1517}
1518
1519const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) {
1520 return NULL;
1521}
1522
1523int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) {
1524 return 0;
1525}
1526
1527/**
1528 * Private methods
1529 */
1530
1531camera_metadata_ro_entry_t EmulatedFakeCamera3::staticInfo(const CameraMetadata *info, uint32_t tag,
1532 size_t minCount, size_t maxCount, bool required) const {
1533
1534 camera_metadata_ro_entry_t entry = info->find(tag);
1535
1536 if (CC_UNLIKELY( entry.count == 0 ) && required) {
1537 const char* tagSection = get_camera_metadata_section_name(tag);
1538 if (tagSection == NULL) tagSection = "<unknown>";
1539 const char* tagName = get_camera_metadata_tag_name(tag);
1540 if (tagName == NULL) tagName = "<unknown>";
1541
1542 ALOGE("Error finding static metadata entry '%s.%s' (%x)",
1543 tagSection, tagName, tag);
1544 } else if (CC_UNLIKELY(
1545 (minCount != 0 && entry.count < minCount) ||
1546 (maxCount != 0 && entry.count > maxCount) ) ) {
1547 const char* tagSection = get_camera_metadata_section_name(tag);
1548 if (tagSection == NULL) tagSection = "<unknown>";
1549 const char* tagName = get_camera_metadata_tag_name(tag);
1550 if (tagName == NULL) tagName = "<unknown>";
1551 ALOGE("Malformed static metadata entry '%s.%s' (%x):"
1552 "Expected between %zu and %zu values, but got %zu values",
1553 tagSection, tagName, tag, minCount, maxCount, entry.count);
1554 }
1555
1556 return entry;
1557}
1558
1559//this is only for debug
1560void EmulatedFakeCamera3::getStreamConfigurationp(CameraMetadata *info) {
1561 const int STREAM_CONFIGURATION_SIZE = 4;
1562 const int STREAM_FORMAT_OFFSET = 0;
1563 const int STREAM_WIDTH_OFFSET = 1;
1564 const int STREAM_HEIGHT_OFFSET = 2;
1565 const int STREAM_IS_INPUT_OFFSET = 3;
1566
1567 camera_metadata_ro_entry_t availableStreamConfigs =
1568 staticInfo(info, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
1569 CAMHAL_LOGDB(" stream, availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1570
1571 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1572 int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
1573 int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
1574 int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
1575 int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
1576 CAMHAL_LOGDB("f=%x, w*h=%dx%d, du=%d\n", format, width, height, isInput);
1577 }
1578
1579}
1580
1581//this is only for debug
1582void EmulatedFakeCamera3::getStreamConfigurationDurations(CameraMetadata *info) {
1583 const int STREAM_CONFIGURATION_SIZE = 4;
1584 const int STREAM_FORMAT_OFFSET = 0;
1585 const int STREAM_WIDTH_OFFSET = 1;
1586 const int STREAM_HEIGHT_OFFSET = 2;
1587 const int STREAM_IS_INPUT_OFFSET = 3;
1588
1589 camera_metadata_ro_entry_t availableStreamConfigs =
1590 staticInfo(info, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
1591 CAMHAL_LOGDB("availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1592
1593 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1594 int64_t format = availableStreamConfigs.data.i64[i + STREAM_FORMAT_OFFSET];
1595 int64_t width = availableStreamConfigs.data.i64[i + STREAM_WIDTH_OFFSET];
1596 int64_t height = availableStreamConfigs.data.i64[i + STREAM_HEIGHT_OFFSET];
1597 int64_t isInput = availableStreamConfigs.data.i64[i + STREAM_IS_INPUT_OFFSET];
1598 CAMHAL_LOGDB("f=%llx, w*h=%lldx%lld, du=%lld\n", format, width, height, isInput);
1599 }
1600}
1601
1602void EmulatedFakeCamera3::updateCameraMetaData(CameraMetadata *info) {
1603
1604}
1605
1606status_t EmulatedFakeCamera3::constructStaticInfo() {
1607
1608 status_t ret = OK;
1609 CameraMetadata info;
1610 uint32_t picSizes[64 * 8];
1611 int64_t* duration = NULL;
1612 int count, duration_count, availablejpegsize;
1613 uint8_t maxCount = 10;
1614 char property[PROPERTY_VALUE_MAX];
1615 unsigned int supportrotate;
1616 availablejpegsize = ARRAY_SIZE(mAvailableJpegSize);
1617 memset(mAvailableJpegSize,0,(sizeof(uint32_t))*availablejpegsize);
1618 sp<Sensor> s = new Sensor();
1619 ret = s->startUp(mCameraID);
1620 if (ret != OK) {
1621 DBG_LOGA("sensor start up failed");
1622 return ret;
1623 }
1624
1625 mSensorType = s->getSensorType();
1626
1627 if ( mSensorType == SENSOR_USB) {
1628 char property[PROPERTY_VALUE_MAX];
1629 property_get("rw.camera.usb.faceback", property, "false");
1630 if (strstr(property, "true"))
1631 mFacingBack = 1;
1632 else
1633 mFacingBack = 0;
1634 ALOGI("Setting usb camera cameraID:%d to back camera:%s\n",
1635 mCameraID, property);
1636 } else {
1637 if (s->mSensorFace == SENSOR_FACE_FRONT) {
1638 mFacingBack = 0;
1639 } else if (s->mSensorFace == SENSOR_FACE_BACK) {
1640 mFacingBack = 1;
1641 } else if (s->mSensorFace == SENSOR_FACE_NONE) {
1642 if (gEmulatedCameraFactory.getEmulatedCameraNum() == 1) {
1643 mFacingBack = 1;
1644 } else if ( mCameraID == 0) {
1645 mFacingBack = 1;
1646 } else {
1647 mFacingBack = 0;
1648 }
1649 }
1650
1651 ALOGI("Setting on board camera cameraID:%d to back camera:%d[0 false, 1 true]\n",
1652 mCameraID, mFacingBack);
1653 }
1654
1655 mSupportCap = s->IoctlStateProbe();
1656 if (mSupportCap & IOCTL_MASK_ROTATE) {
1657 supportrotate = true;
1658 } else {
1659 supportrotate = false;
1660 }
1661 // android.lens
1662
1663 // 5 cm min focus distance for back camera, infinity (fixed focus) for front
1664 // TODO read this ioctl from camera driver
1665 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
1666 const float minFocusDistance = 0.0;
1667 info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1668 &minFocusDistance, 1);
1669
1670 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
1671 const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
1672 info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1673 &minFocusDistance, 1);
1674
1675 static const float focalLength = 3.30f; // mm
1676 info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1677 &focalLength, 1);
1678 static const float aperture = 2.8f;
1679 info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1680 &aperture, 1);
1681 static const float filterDensity = 0;
1682 info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1683 &filterDensity, 1);
1684 static const uint8_t availableOpticalStabilization =
1685 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1686 info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1687 &availableOpticalStabilization, 1);
1688
1689 static const int32_t lensShadingMapSize[] = {1, 1};
1690 info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1691 sizeof(lensShadingMapSize)/sizeof(int32_t));
1692
1693 uint8_t lensFacing = mFacingBack ?
1694 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1695 info.update(ANDROID_LENS_FACING, &lensFacing, 1);
1696
1697 float lensPosition[3];
1698 if (mFacingBack) {
1699 // Back-facing camera is center-top on device
1700 lensPosition[0] = 0;
1701 lensPosition[1] = 20;
1702 lensPosition[2] = -5;
1703 } else {
1704 // Front-facing camera is center-right on device
1705 lensPosition[0] = 20;
1706 lensPosition[1] = 20;
1707 lensPosition[2] = 0;
1708 }
1709 info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
1710 sizeof(float));
1711 static const uint8_t lensCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED;
1712 info.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,&lensCalibration,1);
1713
1714 // android.sensor
1715
1716 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1717 info.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
1718 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1719 info.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
1720 info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1721 Sensor::kExposureTimeRange, 2);
1722
1723 info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1724 &Sensor::kFrameDurationRange[1], 1);
1725
1726 info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
1727 Sensor::kSensitivityRange,
1728 sizeof(Sensor::kSensitivityRange)
1729 /sizeof(int32_t));
1730
1731 info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1732 &Sensor::kColorFilterArrangement, 1);
1733
1734 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1735 info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1736 sensorPhysicalSize, 2);
1737
1738 info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1739 (int32_t*)Sensor::kResolution, 2);
1740
1741 //(int32_t*)Sensor::kResolution, 2);
1742
1743 info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1744 (int32_t*)&Sensor::kMaxRawValue, 1);
1745
1746 static const int32_t blackLevelPattern[4] = {
1747 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
1748 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
1749 };
1750 info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1751 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
1752
1753 static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
1754 info.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
1755 if (mSensorType == SENSOR_USB) {
1756 if (mFacingBack) {
1757 property_get("hw.camera.orientation.back", property, "0");
1758 } else {
1759 property_get("hw.camera.orientation.front", property, "0");
1760 }
1761 int32_t orientation = atoi(property);
1762 property_get("hw.camera.usb.orientation_offset", property, "0");
1763 orientation += atoi(property);
1764 orientation %= 360;
1765 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1766 } else {
1767 if (mFacingBack) {
1768 property_get("hw.camera.orientation.back", property, "270");
1769 const int32_t orientation = atoi(property);
1770 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1771 } else {
1772 property_get("hw.camera.orientation.front", property, "90");
1773 const int32_t orientation = atoi(property);
1774 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1775 }
1776 }
1777
1778 static const int64_t rollingShutterSkew = 0;
1779 info.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
1780
1781 //TODO: sensor color calibration fields
1782
1783 // android.flash
1784 static const uint8_t flashAvailable = 0;
1785 info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1786
1787 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
1788 info.update(ANDROID_FLASH_STATE, &flashstate, 1);
1789
1790 static const int64_t flashChargeDuration = 0;
1791 info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
1792
1793 /** android.noise */
1794 static const uint8_t availableNBModes = ANDROID_NOISE_REDUCTION_MODE_OFF;
1795 info.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, &availableNBModes, 1);
1796
1797 // android.tonemap
1798
1799 static const int32_t tonemapCurvePoints = 128;
1800 info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
1801
1802 // android.scaler
1803
1804 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
1805 info.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
1806
1807 info.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1808 kAvailableFormats,
1809 sizeof(kAvailableFormats)/sizeof(int32_t));
1810
1811 info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1812 (int64_t*)kAvailableRawMinDurations,
1813 sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
1814
1815 //for version 3.2 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
1816 count = sizeof(picSizes)/sizeof(picSizes[0]);
1817 count = s->getStreamConfigurations(picSizes, kAvailableFormats, count);
1818
1819 info.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1820 (int32_t*)picSizes, count);
1821
1822 if (count < availablejpegsize) {
1823 availablejpegsize = count;
1824 }
1825 getValidJpegSize(picSizes,mAvailableJpegSize,availablejpegsize);
1826
1827 maxJpegResolution = getMaxJpegResolution(picSizes,count);
1828 int32_t full_size[4];
1829 if (mFacingBack) {
1830 full_size[0] = 0;
1831 full_size[1] = 0;
1832 full_size[2] = maxJpegResolution.width;
1833 full_size[3] = maxJpegResolution.height;
1834 } else {
1835 full_size[0] = 0;
1836 full_size[1] = 0;
1837 full_size[2] = maxJpegResolution.width;
1838 full_size[3] = maxJpegResolution.height;
1839 }
1840 info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1841 (int32_t*)full_size,
1842 sizeof(full_size)/sizeof(full_size[0]));
1843 duration = new int64_t[count];
1844 if (duration == NULL) {
1845 DBG_LOGA("allocate memory for duration failed");
1846 return NO_MEMORY;
1847 } else {
1848 memset(duration,0,sizeof(int64_t)*count);
1849 }
1850 duration_count = s->getStreamConfigurationDurations(picSizes, duration , count);
1851
1852 info.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1853 duration, duration_count);
1854 info.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1855 duration, duration_count);
1856
1857 info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1858 (int64_t*)kAvailableProcessedMinDurations,
1859 sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
1860
1861 info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
1862 (int64_t*)kAvailableJpegMinDurations,
1863 sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
1864
1865
1866 // android.jpeg
1867
1868 static const int32_t jpegThumbnailSizes[] = {
1869 0, 0,
1870 160, 120,
1871 320, 240
1872 };
1873 info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1874 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
1875
1876 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1877 info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1878
1879 // android.stats
1880
1881 static const uint8_t availableFaceDetectModes[] = {
1882 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1883 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
1884 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
1885 };
1886
1887 info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1888 availableFaceDetectModes,
1889 sizeof(availableFaceDetectModes));
1890
1891 static const int32_t maxFaceCount = 8;
1892 info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1893 &maxFaceCount, 1);
1894
1895 static const int32_t histogramSize = 64;
1896 info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1897 &histogramSize, 1);
1898
1899 static const int32_t maxHistogramCount = 1000;
1900 info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1901 &maxHistogramCount, 1);
1902
1903 static const int32_t sharpnessMapSize[2] = {64, 64};
1904 info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1905 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
1906
1907 static const int32_t maxSharpnessMapValue = 1000;
1908 info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1909 &maxSharpnessMapValue, 1);
1910 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
1911 info.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
1912
1913 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
1914 info.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
1915 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
1916 info.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
1917 // android.control
1918
1919 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
1920 info.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
1921
1922 static const uint8_t availableSceneModes[] = {
1923 // ANDROID_CONTROL_SCENE_MODE_DISABLED,
1924 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY
1925 };
1926 info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1927 availableSceneModes, sizeof(availableSceneModes));
1928
1929 static const uint8_t availableEffects[] = {
1930 ANDROID_CONTROL_EFFECT_MODE_OFF
1931 };
1932 info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1933 availableEffects, sizeof(availableEffects));
1934
1935 static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0};
1936 info.update(ANDROID_CONTROL_MAX_REGIONS,
1937 max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
1938
1939 static const uint8_t availableAeModes[] = {
1940 ANDROID_CONTROL_AE_MODE_OFF,
1941 ANDROID_CONTROL_AE_MODE_ON
1942 };
1943 info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1944 availableAeModes, sizeof(availableAeModes));
1945
1946
1947 static const int32_t availableTargetFpsRanges[] = {
1948 5, 15, 15, 15, 5, 30, 30, 30,
1949 };
1950 info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1951 availableTargetFpsRanges,
1952 sizeof(availableTargetFpsRanges)/sizeof(int32_t));
1953
1954 uint8_t awbModes[maxCount];
1955 count = s->getAWB(awbModes, maxCount);
1956 if (count < 0) {
1957 static const uint8_t availableAwbModes[] = {
1958 ANDROID_CONTROL_AWB_MODE_OFF,
1959 ANDROID_CONTROL_AWB_MODE_AUTO,
1960 ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1961 ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1962 ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1963 ANDROID_CONTROL_AWB_MODE_SHADE
1964 };
1965 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1966 availableAwbModes, sizeof(availableAwbModes));
1967 } else {
1968 DBG_LOGB("getAWB %d ",count);
1969 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1970 awbModes, count);
1971 }
1972
1973 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
1974 info.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
1975
1976 static const uint8_t availableAfModesFront[] = {
1977 ANDROID_CONTROL_AF_MODE_OFF
1978 };
1979
1980 if (mFacingBack) {
1981 uint8_t afMode[maxCount];
1982 count = s->getAutoFocus(afMode, maxCount);
1983 if (count < 0) {
1984 static const uint8_t availableAfModesBack[] = {
1985 ANDROID_CONTROL_AF_MODE_OFF,
1986 //ANDROID_CONTROL_AF_MODE_AUTO,
1987 //ANDROID_CONTROL_AF_MODE_MACRO,
1988 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
1989 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE,
1990 };
1991
1992 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1993 availableAfModesBack, sizeof(availableAfModesBack));
1994 } else {
1995 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1996 afMode, count);
1997 }
1998 } else {
1999 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2000 availableAfModesFront, sizeof(availableAfModesFront));
2001 }
2002
2003 uint8_t antiBanding[maxCount];
2004 count = s->getAntiBanding(antiBanding, maxCount);
2005 if (count < 0) {
2006 static const uint8_t availableAntibanding[] = {
2007 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
2008 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
2009 };
2010 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2011 availableAntibanding, sizeof(availableAntibanding));
2012 } else {
2013 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2014 antiBanding, count);
2015 }
2016
2017 camera_metadata_rational step;
2018 int maxExp, minExp, def;
2019 ret = s->getExposure(&maxExp, &minExp, &def, &step);
2020 if (ret < 0) {
2021 static const int32_t aeExpCompensation = 0;
2022 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
2023
2024 static const camera_metadata_rational exposureCompensationStep = {
2025 1, 3
2026 };
2027 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2028 &exposureCompensationStep, 1);
2029
2030 int32_t exposureCompensationRange[] = {0, 0};
2031 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2032 exposureCompensationRange,
2033 sizeof(exposureCompensationRange)/sizeof(int32_t));
2034 } else {
2035 DBG_LOGB("exposure compensation support:(%d, %d)\n", minExp, maxExp);
2036 int32_t exposureCompensationRange[] = {minExp, maxExp};
2037 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2038 exposureCompensationRange,
2039 sizeof(exposureCompensationRange)/sizeof(int32_t));
2040 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2041 &step, 1);
2042 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &def, 1);
2043 }
2044
2045 ret = s->getZoom(&mZoomMin, &mZoomMax, &mZoomStep);
2046 if (ret < 0) {
2047 float maxZoom = 1.0;
2048 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2049 &maxZoom, 1);
2050 } else {
2051 float maxZoom = mZoomMax / mZoomMin;
2052 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2053 &maxZoom, 1);
2054 }
2055
2056 static const uint8_t availableVstabModes[] = {
2057 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
2058 };
2059 info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2060 availableVstabModes, sizeof(availableVstabModes));
2061
2062 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
2063 info.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
2064 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2065 info.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
2066 // android.info
2067 const uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2068 //mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
2069 // ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2070 info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2071 &supportedHardwareLevel,
2072 /*count*/1);
2073
2074 int32_t android_sync_max_latency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
2075 info.update(ANDROID_SYNC_MAX_LATENCY, &android_sync_max_latency, 1);
2076
2077 uint8_t len[] = {1};
2078 info.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1);
2079
2080 uint8_t maxlen[] = {2};
2081 info.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1);
2082 uint8_t cap[] = {
2083 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
2084 };
2085 info.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2086 (uint8_t *)cap, sizeof(cap)/sizeof(cap[0]));
2087
2088
2089 int32_t partialResultCount = 1;
2090 info.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,&partialResultCount,1);
2091 int32_t maxNumOutputStreams[3] = {0,2,1};
2092 info.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,maxNumOutputStreams,3);
2093 uint8_t aberrationMode[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF};
2094 info.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
2095 aberrationMode, 1);
2096 info.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
2097 aberrationMode, 1);
2098
2099 getAvailableChKeys(&info, supportedHardwareLevel);
2100
2101 if (mCameraInfo != NULL) {
2102 CAMHAL_LOGDA("mCameraInfo is not null, mem leak?");
2103 }
2104 mCameraInfo = info.release();
2105 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
2106
2107 if (duration != NULL) {
2108 delete [] duration;
2109 }
2110
2111 s->shutDown();
2112 s.clear();
2113 mPlugged = true;
2114
2115 return OK;
2116}
2117
2118status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
2119 /**
2120 * Extract top-level 3A controls
2121 */
2122 status_t res;
2123
2124 bool facePriority = false;
2125
2126 camera_metadata_entry e;
2127
2128 e = settings.find(ANDROID_CONTROL_MODE);
2129 if (e.count == 0) {
2130 ALOGE("%s: No control mode entry!", __FUNCTION__);
2131 return BAD_VALUE;
2132 }
2133 uint8_t controlMode = e.data.u8[0];
2134
2135 e = settings.find(ANDROID_CONTROL_SCENE_MODE);
2136 if (e.count == 0) {
2137 ALOGE("%s: No scene mode entry!", __FUNCTION__);
2138 return BAD_VALUE;
2139 }
2140 uint8_t sceneMode = e.data.u8[0];
2141
2142 if (controlMode == ANDROID_CONTROL_MODE_OFF) {
2143 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2144 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2145 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2146 update3A(settings);
2147 return OK;
2148 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2149 switch(sceneMode) {
2150 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
2151 mFacePriority = true;
2152 break;
2153 default:
2154 ALOGE("%s: Emulator doesn't support scene mode %d",
2155 __FUNCTION__, sceneMode);
2156 return BAD_VALUE;
2157 }
2158 } else {
2159 mFacePriority = false;
2160 }
2161
2162 // controlMode == AUTO or sceneMode = FACE_PRIORITY
2163 // Process individual 3A controls
2164
2165 res = doFakeAE(settings);
2166 if (res != OK) return res;
2167
2168 res = doFakeAF(settings);
2169 if (res != OK) return res;
2170
2171 res = doFakeAWB(settings);
2172 if (res != OK) return res;
2173
2174 update3A(settings);
2175 return OK;
2176}
2177
2178status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
2179 camera_metadata_entry e;
2180
2181 e = settings.find(ANDROID_CONTROL_AE_MODE);
2182 if (e.count == 0) {
2183 ALOGE("%s: No AE mode entry!", __FUNCTION__);
2184 return BAD_VALUE;
2185 }
2186 uint8_t aeMode = e.data.u8[0];
2187
2188 switch (aeMode) {
2189 case ANDROID_CONTROL_AE_MODE_OFF:
2190 // AE is OFF
2191 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2192 return OK;
2193 case ANDROID_CONTROL_AE_MODE_ON:
2194 // OK for AUTO modes
2195 break;
2196 default:
2197 ALOGE("%s: Emulator doesn't support AE mode %d",
2198 __FUNCTION__, aeMode);
2199 return BAD_VALUE;
2200 }
2201
2202 e = settings.find(ANDROID_CONTROL_AE_LOCK);
2203 if (e.count == 0) {
2204 ALOGE("%s: No AE lock entry!", __FUNCTION__);
2205 return BAD_VALUE;
2206 }
2207 bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON);
2208
2209 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
2210 bool precaptureTrigger = false;
2211 if (e.count != 0) {
2212 precaptureTrigger =
2213 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
2214 }
2215
2216 if (precaptureTrigger) {
2217 ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
2218 } else if (e.count > 0) {
2219 ALOGV("%s: Pre capture trigger was present? %zu",
2220 __FUNCTION__,
2221 e.count);
2222 }
2223
2224 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2225 // Run precapture sequence
2226 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2227 mAeCounter = 0;
2228 }
2229
2230 if (mFacePriority) {
2231 mAeTargetExposureTime = kFacePriorityExposureTime;
2232 } else {
2233 mAeTargetExposureTime = kNormalExposureTime;
2234 }
2235
2236 if (mAeCounter > kPrecaptureMinFrames &&
2237 (mAeTargetExposureTime - mAeCurrentExposureTime) <
2238 mAeTargetExposureTime / 10) {
2239 // Done with precapture
2240 mAeCounter = 0;
2241 mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
2242 ANDROID_CONTROL_AE_STATE_CONVERGED;
2243 } else {
2244 // Converge some more
2245 mAeCurrentExposureTime +=
2246 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2247 kExposureTrackRate;
2248 mAeCounter++;
2249 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
2250 }
2251
2252 } else if (!aeLocked) {
2253 // Run standard occasional AE scan
2254 switch (mAeState) {
2255 case ANDROID_CONTROL_AE_STATE_CONVERGED:
2256 case ANDROID_CONTROL_AE_STATE_INACTIVE:
2257 mAeCounter++;
2258 if (mAeCounter > kStableAeMaxFrames) {
2259 mAeTargetExposureTime =
2260 mFacePriority ? kFacePriorityExposureTime :
2261 kNormalExposureTime;
2262 float exposureStep = ((double)rand() / RAND_MAX) *
2263 (kExposureWanderMax - kExposureWanderMin) +
2264 kExposureWanderMin;
2265 mAeTargetExposureTime *= std::pow(2, exposureStep);
2266 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2267 }
2268 break;
2269 case ANDROID_CONTROL_AE_STATE_SEARCHING:
2270 mAeCurrentExposureTime +=
2271 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2272 kExposureTrackRate;
2273 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
2274 mAeTargetExposureTime / 10) {
2275 // Close enough
2276 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2277 mAeCounter = 0;
2278 }
2279 break;
2280 case ANDROID_CONTROL_AE_STATE_LOCKED:
2281 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2282 mAeCounter = 0;
2283 break;
2284 default:
2285 ALOGE("%s: Emulator in unexpected AE state %d",
2286 __FUNCTION__, mAeState);
2287 return INVALID_OPERATION;
2288 }
2289 } else {
2290 // AE is locked
2291 mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
2292 }
2293
2294 return OK;
2295}
2296
2297status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
2298 camera_metadata_entry e;
2299
2300 e = settings.find(ANDROID_CONTROL_AF_MODE);
2301 if (e.count == 0) {
2302 ALOGE("%s: No AF mode entry!", __FUNCTION__);
2303 return BAD_VALUE;
2304 }
2305 uint8_t afMode = e.data.u8[0];
2306
2307 e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
2308 typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
2309 af_trigger_t afTrigger;
2310 // If we have an afTrigger, afTriggerId should be set too
2311 if (e.count != 0) {
2312 afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
2313
2314 e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID);
2315
2316 if (e.count == 0) {
2317 ALOGE("%s: When android.control.afTrigger is set "
2318 " in the request, afTriggerId needs to be set as well",
2319 __FUNCTION__);
2320 return BAD_VALUE;
2321 }
2322
2323 mAfTriggerId = e.data.i32[0];
2324
2325 ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
2326 ALOGV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId);
2327 ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
2328 } else {
2329 afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
2330 }
2331 if (!mFacingBack) {
2332 afMode = ANDROID_CONTROL_AF_MODE_OFF;
2333 }
2334
2335 switch (afMode) {
2336 case ANDROID_CONTROL_AF_MODE_OFF:
2337 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2338 return OK;
2339 case ANDROID_CONTROL_AF_MODE_AUTO:
2340 case ANDROID_CONTROL_AF_MODE_MACRO:
2341 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2342 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2343 if (!mFacingBack) {
2344 ALOGE("%s: Front camera doesn't support AF mode %d",
2345 __FUNCTION__, afMode);
2346 return BAD_VALUE;
2347 }
2348 mSensor->setAutoFocuas(afMode);
2349 // OK, handle transitions lower on
2350 break;
2351 default:
2352 ALOGE("%s: Emulator doesn't support AF mode %d",
2353 __FUNCTION__, afMode);
2354 return BAD_VALUE;
2355 }
2356#if 0
2357 e = settings.find(ANDROID_CONTROL_AF_REGIONS);
2358 if (e.count == 0) {
2359 ALOGE("%s:Get ANDROID_CONTROL_AF_REGIONS failed\n", __FUNCTION__);
2360 return BAD_VALUE;
2361 }
2362 int32_t x0 = e.data.i32[0];
2363 int32_t y0 = e.data.i32[1];
2364 int32_t x1 = e.data.i32[2];
2365 int32_t y1 = e.data.i32[3];
2366 mSensor->setFocuasArea(x0, y0, x1, y1);
2367 DBG_LOGB(" x0:%d, y0:%d,x1:%d,y1:%d,\n", x0, y0, x1, y1);
2368#endif
2369
2370
2371 bool afModeChanged = mAfMode != afMode;
2372 mAfMode = afMode;
2373
2374 /**
2375 * Simulate AF triggers. Transition at most 1 state per frame.
2376 * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
2377 */
2378
2379 bool afTriggerStart = false;
2380 bool afTriggerCancel = false;
2381 switch (afTrigger) {
2382 case ANDROID_CONTROL_AF_TRIGGER_IDLE:
2383 break;
2384 case ANDROID_CONTROL_AF_TRIGGER_START:
2385 afTriggerStart = true;
2386 break;
2387 case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
2388 afTriggerCancel = true;
2389 // Cancel trigger always transitions into INACTIVE
2390 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2391
2392 ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
2393
2394 // Stay in 'inactive' until at least next frame
2395 return OK;
2396 default:
2397 ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
2398 return BAD_VALUE;
2399 }
2400
2401 // If we get down here, we're either in an autofocus mode
2402 // or in a continuous focus mode (and no other modes)
2403
2404 int oldAfState = mAfState;
2405 switch (mAfState) {
2406 case ANDROID_CONTROL_AF_STATE_INACTIVE:
2407 if (afTriggerStart) {
2408 switch (afMode) {
2409 case ANDROID_CONTROL_AF_MODE_AUTO:
2410 // fall-through
2411 case ANDROID_CONTROL_AF_MODE_MACRO:
2412 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2413 break;
2414 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2415 // fall-through
2416 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2417 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2418 break;
2419 }
2420 } else {
2421 // At least one frame stays in INACTIVE
2422 if (!afModeChanged) {
2423 switch (afMode) {
2424 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2425 // fall-through
2426 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2427 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
2428 break;
2429 }
2430 }
2431 }
2432 break;
2433 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2434 /**
2435 * When the AF trigger is activated, the algorithm should finish
2436 * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
2437 * or AF_NOT_FOCUSED as appropriate
2438 */
2439 if (afTriggerStart) {
2440 // Randomly transition to focused or not focused
2441 if (rand() % 3) {
2442 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2443 } else {
2444 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2445 }
2446 }
2447 /**
2448 * When the AF trigger is not involved, the AF algorithm should
2449 * start in INACTIVE state, and then transition into PASSIVE_SCAN
2450 * and PASSIVE_FOCUSED states
2451 */
2452 else if (!afTriggerCancel) {
2453 // Randomly transition to passive focus
2454 if (rand() % 3 == 0) {
2455 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
2456 }
2457 }
2458
2459 break;
2460 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2461 if (afTriggerStart) {
2462 // Randomly transition to focused or not focused
2463 if (rand() % 3) {
2464 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2465 } else {
2466 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2467 }
2468 }
2469 // TODO: initiate passive scan (PASSIVE_SCAN)
2470 break;
2471 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2472 // Simulate AF sweep completing instantaneously
2473
2474 // Randomly transition to focused or not focused
2475 if (rand() % 3) {
2476 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2477 } else {
2478 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2479 }
2480 break;
2481 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2482 if (afTriggerStart) {
2483 switch (afMode) {
2484 case ANDROID_CONTROL_AF_MODE_AUTO:
2485 // fall-through
2486 case ANDROID_CONTROL_AF_MODE_MACRO:
2487 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2488 break;
2489 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2490 // fall-through
2491 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2492 // continuous autofocus => trigger start has no effect
2493 break;
2494 }
2495 }
2496 break;
2497 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2498 if (afTriggerStart) {
2499 switch (afMode) {
2500 case ANDROID_CONTROL_AF_MODE_AUTO:
2501 // fall-through
2502 case ANDROID_CONTROL_AF_MODE_MACRO:
2503 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2504 break;
2505 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2506 // fall-through
2507 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2508 // continuous autofocus => trigger start has no effect
2509 break;
2510 }
2511 }
2512 break;
2513 default:
2514 ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
2515 }
2516
2517 {
2518 char afStateString[100] = {0,};
2519 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2520 oldAfState,
2521 afStateString,
2522 sizeof(afStateString));
2523
2524 char afNewStateString[100] = {0,};
2525 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2526 mAfState,
2527 afNewStateString,
2528 sizeof(afNewStateString));
2529 ALOGVV("%s: AF state transitioned from %s to %s",
2530 __FUNCTION__, afStateString, afNewStateString);
2531 }
2532
2533
2534 return OK;
2535}
2536
2537status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
2538 camera_metadata_entry e;
2539
2540 e = settings.find(ANDROID_CONTROL_AWB_MODE);
2541 if (e.count == 0) {
2542 ALOGE("%s: No AWB mode entry!", __FUNCTION__);
2543 return BAD_VALUE;
2544 }
2545 uint8_t awbMode = e.data.u8[0];
2546 //DBG_LOGB(" awbMode%d\n", awbMode);
2547
2548 // TODO: Add white balance simulation
2549
2550 switch (awbMode) {
2551 case ANDROID_CONTROL_AWB_MODE_OFF:
2552 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2553 return OK;
2554 case ANDROID_CONTROL_AWB_MODE_AUTO:
2555 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
2556 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
2557 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
2558 case ANDROID_CONTROL_AWB_MODE_SHADE:
2559 mAwbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; //add for cts
2560 return mSensor->setAWB(awbMode);
2561 // OK
2562 break;
2563 default:
2564 ALOGE("%s: Emulator doesn't support AWB mode %d",
2565 __FUNCTION__, awbMode);
2566 return BAD_VALUE;
2567 }
2568
2569 return OK;
2570}
2571
2572
2573void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
2574 if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) {
2575 settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
2576 &mAeCurrentExposureTime, 1);
2577 settings.update(ANDROID_SENSOR_SENSITIVITY,
2578 &mAeCurrentSensitivity, 1);
2579 }
2580
2581 settings.update(ANDROID_CONTROL_AE_STATE,
2582 &mAeState, 1);
2583 settings.update(ANDROID_CONTROL_AF_STATE,
2584 &mAfState, 1);
2585 settings.update(ANDROID_CONTROL_AWB_STATE,
2586 &mAwbState, 1);
2587 /**
2588 * TODO: Trigger IDs need a think-through
2589 */
2590 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,
2591 &mAfTriggerId, 1);
2592}
2593
2594void EmulatedFakeCamera3::signalReadoutIdle() {
2595 Mutex::Autolock l(mLock);
2596 // Need to chek isIdle again because waiting on mLock may have allowed
2597 // something to be placed in the in-flight queue.
2598 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
2599 ALOGV("Now idle");
2600 mStatus = STATUS_READY;
2601 }
2602}
2603
2604void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
2605 nsecs_t timestamp) {
2606 switch(e) {
2607 case Sensor::SensorListener::EXPOSURE_START: {
2608 ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
2609 __FUNCTION__, frameNumber, timestamp);
2610 // Trigger shutter notify to framework
2611 camera3_notify_msg_t msg;
2612 msg.type = CAMERA3_MSG_SHUTTER;
2613 msg.message.shutter.frame_number = frameNumber;
2614 msg.message.shutter.timestamp = timestamp;
2615 sendNotify(&msg);
2616 break;
2617 }
2618 case Sensor::SensorListener::ERROR_CAMERA_DEVICE: {
2619 camera3_notify_msg_t msg;
2620 msg.type = CAMERA3_MSG_ERROR;
2621 msg.message.error.frame_number = frameNumber;
2622 msg.message.error.error_stream = NULL;
2623 msg.message.error.error_code = 1;
2624 sendNotify(&msg);
2625 break;
2626 }
2627 default:
2628 ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
2629 e, timestamp);
2630 break;
2631 }
2632}
2633
2634EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) :
2635 mParent(parent), mJpegWaiting(false) {
2636}
2637
2638EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
2639 for (List<Request>::iterator i = mInFlightQueue.begin();
2640 i != mInFlightQueue.end(); i++) {
2641 delete i->buffers;
2642 delete i->sensorBuffers;
2643 }
2644}
2645
2646void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
2647 Mutex::Autolock l(mLock);
2648
2649 mInFlightQueue.push_back(r);
2650 mInFlightSignal.signal();
2651}
2652
2653bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
2654 Mutex::Autolock l(mLock);
2655 return mInFlightQueue.empty() && !mThreadActive;
2656}
2657
2658status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
2659 status_t res;
2660 Mutex::Autolock l(mLock);
2661 int loopCount = 0;
2662 while (mInFlightQueue.size() >= kMaxQueueSize) {
2663 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2664 if (res != OK && res != TIMED_OUT) {
2665 ALOGE("%s: Error waiting for in-flight queue to shrink",
2666 __FUNCTION__);
2667 return INVALID_OPERATION;
2668 }
2669 if (loopCount == kMaxWaitLoops) {
2670 ALOGE("%s: Timed out waiting for in-flight queue to shrink",
2671 __FUNCTION__);
2672 return TIMED_OUT;
2673 }
2674 loopCount++;
2675 }
2676 return OK;
2677}
2678
2679status_t EmulatedFakeCamera3::ReadoutThread::setJpegCompressorListener(EmulatedFakeCamera3 *parent) {
2680 status_t res;
2681 res = mParent->mJpegCompressor->setlistener(this);
2682 if (res != NO_ERROR) {
2683 ALOGE("%s: set JpegCompressor Listner failed",__FUNCTION__);
2684 }
2685 return res;
2686}
2687
2688status_t EmulatedFakeCamera3::ReadoutThread::startJpegCompressor(EmulatedFakeCamera3 *parent) {
2689 status_t res;
2690 res = mParent->mJpegCompressor->start();
2691 if (res != NO_ERROR) {
2692 ALOGE("%s: JpegCompressor start failed",__FUNCTION__);
2693 }
2694 return res;
2695}
2696
2697status_t EmulatedFakeCamera3::ReadoutThread::shutdownJpegCompressor(EmulatedFakeCamera3 *parent) {
2698 status_t res;
2699 res = mParent->mJpegCompressor->cancel();
2700 if (res != OK) {
2701 ALOGE("%s: JpegCompressor cancel failed",__FUNCTION__);
2702 }
2703 return res;
2704}
2705
2706bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
2707 status_t res;
2708 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
2709
2710 // First wait for a request from the in-flight queue
2711
2712 if (mCurrentRequest.settings.isEmpty()) {
2713 Mutex::Autolock l(mLock);
2714 if (mInFlightQueue.empty()) {
2715 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2716 if (res == TIMED_OUT) {
2717 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
2718 __FUNCTION__);
2719 return true;
2720 } else if (res != NO_ERROR) {
2721 ALOGE("%s: Error waiting for capture requests: %d",
2722 __FUNCTION__, res);
2723 return false;
2724 }
2725 }
2726 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
2727 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
2728 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
2729 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
2730 mCurrentRequest.havethumbnail = mInFlightQueue.begin()->havethumbnail;
2731 mInFlightQueue.erase(mInFlightQueue.begin());
2732 mInFlightSignal.signal();
2733 mThreadActive = true;
2734 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
2735 mCurrentRequest.frameNumber);
2736 }
2737
2738 // Then wait for it to be delivered from the sensor
2739 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
2740 __FUNCTION__);
2741
2742 nsecs_t captureTime;
2743 bool gotFrame =
2744 mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
2745 if (!gotFrame) {
2746 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
2747 __FUNCTION__);
2748 return true;
2749 }
2750
2751 ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
2752 mCurrentRequest.frameNumber, captureTime);
2753
2754 // Check if we need to JPEG encode a buffer, and send it for async
2755 // compression if so. Otherwise prepare the buffer for return.
2756 bool needJpeg = false;
2757 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
2758 while (buf != mCurrentRequest.buffers->end()) {
2759 bool goodBuffer = true;
2760 if ( buf->stream->format ==
2761 HAL_PIXEL_FORMAT_BLOB) {
2762 Mutex::Autolock jl(mJpegLock);
2763 needJpeg = true;
2764 CaptureRequest currentcapture;
2765 currentcapture.frameNumber = mCurrentRequest.frameNumber;
2766 currentcapture.sensorBuffers = mCurrentRequest.sensorBuffers;
2767 currentcapture.buf = buf;
2768 currentcapture.mNeedThumbnail = mCurrentRequest.havethumbnail;
2769 mParent->mJpegCompressor->queueRequest(currentcapture);
2770 //this sensorBuffers delete in the jpegcompress;
2771 mCurrentRequest.sensorBuffers = NULL;
2772 buf = mCurrentRequest.buffers->erase(buf);
2773 continue;
2774 }
2775 GraphicBufferMapper::get().unlock(*(buf->buffer));
2776
2777 buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
2778 CAMERA3_BUFFER_STATUS_ERROR;
2779 buf->acquire_fence = -1;
2780 buf->release_fence = -1;
2781
2782 ++buf;
2783 } // end while
2784
2785 // Construct result for all completed buffers and results
2786
2787 camera3_capture_result result;
2788
2789 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
2790 &captureTime, 1);
2791
2792 memset(&result, 0, sizeof(result));
2793 result.frame_number = mCurrentRequest.frameNumber;
2794 result.result = mCurrentRequest.settings.getAndLock();
2795 result.num_output_buffers = mCurrentRequest.buffers->size();
2796 result.output_buffers = mCurrentRequest.buffers->array();
2797 result.partial_result = 1;
2798
2799 // Go idle if queue is empty, before sending result
2800 bool signalIdle = false;
2801 {
2802 Mutex::Autolock l(mLock);
2803 if (mInFlightQueue.empty()) {
2804 mThreadActive = false;
2805 signalIdle = true;
2806 }
2807 }
2808 if (signalIdle) mParent->signalReadoutIdle();
2809
2810 // Send it off to the framework
2811 ALOGVV("%s: ReadoutThread: Send result to framework",
2812 __FUNCTION__);
2813 mParent->sendCaptureResult(&result);
2814
2815 // Clean up
2816 mCurrentRequest.settings.unlock(result.result);
2817
2818 delete mCurrentRequest.buffers;
2819 mCurrentRequest.buffers = NULL;
2820 if (!needJpeg) {
2821 delete mCurrentRequest.sensorBuffers;
2822 mCurrentRequest.sensorBuffers = NULL;
2823 }
2824 mCurrentRequest.settings.clear();
2825
2826 return true;
2827}
2828
2829void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
2830 const StreamBuffer &jpegBuffer, bool success , CaptureRequest &r) {
2831 Mutex::Autolock jl(mJpegLock);
2832 GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer));
2833
2834 mJpegHalBuffer = *(r.buf);
2835 mJpegHalBuffer.status = success ?
2836 CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2837 mJpegHalBuffer.acquire_fence = -1;
2838 mJpegHalBuffer.release_fence = -1;
2839 mJpegWaiting = false;
2840
2841 camera3_capture_result result;
2842 result.frame_number = r.frameNumber;
2843 result.result = NULL;
2844 result.num_output_buffers = 1;
2845 result.output_buffers = &mJpegHalBuffer;
2846 result.partial_result = 1;
2847
2848 if (!success) {
2849 ALOGE("%s: Compression failure, returning error state buffer to"
2850 " framework", __FUNCTION__);
2851 } else {
2852 DBG_LOGB("%s: Compression complete, returning buffer to framework",
2853 __FUNCTION__);
2854 }
2855
2856 mParent->sendCaptureResult(&result);
2857
2858}
2859
2860void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
2861 const StreamBuffer &inputBuffer) {
2862 // Should never get here, since the input buffer has to be returned
2863 // by end of processCaptureRequest
2864 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2865}
2866
2867
2868}; // namespace android
2869