summaryrefslogtreecommitdiff
path: root/v3/EmulatedFakeCamera3.cpp (plain)
blob: 1ebcaeca16117a9df3439d71876ad75e0337dc30
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/*
18 * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22#include <inttypes.h>
23
24//#define LOG_NDEBUG 0
25//#define LOG_NNDEBUG 0
26#define LOG_TAG "EmulatedCamera_FakeCamera3"
27#include <utils/Log.h>
28
29#include "EmulatedFakeCamera3.h"
30#include "EmulatedCameraFactory.h"
31#include <ui/Fence.h>
32#include <ui/Rect.h>
33#include <ui/GraphicBufferMapper.h>
34#include <sys/types.h>
35
36#include <cutils/properties.h>
37#include "fake-pipeline2/Sensor.h"
38#include "fake-pipeline2/JpegCompressor.h"
39#include <cmath>
40#include <gralloc_priv.h>
41
42#if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
43#define ALOGVV ALOGV
44#else
45#define ALOGVV(...) ((void)0)
46#endif
47
48namespace android {
49
50/**
51 * Constants for camera capabilities
52 */
53
54const int64_t USEC = 1000LL;
55const int64_t MSEC = USEC * 1000LL;
56const int64_t SEC = MSEC * 1000LL;
57
58
59const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
60 //HAL_PIXEL_FORMAT_RAW_SENSOR,
61 HAL_PIXEL_FORMAT_BLOB,
62 //HAL_PIXEL_FORMAT_RGBA_8888,
63 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
64 // These are handled by YCbCr_420_888
65 HAL_PIXEL_FORMAT_YV12,
66 HAL_PIXEL_FORMAT_YCrCb_420_SP,
67 //HAL_PIXEL_FORMAT_YCbCr_422_I,
68 HAL_PIXEL_FORMAT_YCbCr_420_888
69};
70
71const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = {
72 640, 480
73 // Sensor::kResolution[0], Sensor::kResolution[1]
74};
75
76const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = {
77 (const uint64_t)Sensor::kFrameDurationRange[0]
78};
79
80const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[6] = {
81 640, 480, 320, 240,// 1280, 720
82 // Sensor::kResolution[0], Sensor::kResolution[1]
83};
84
85const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = {
86 640, 480, 320, 240
87 // Sensor::kResolution[0], Sensor::kResolution[1]
88};
89
90const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = {
91 (const uint64_t)Sensor::kFrameDurationRange[0]
92};
93
94const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = {
95 1280,720
96 // Sensor::kResolution[0], Sensor::kResolution[1]
97};
98
99const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = {
100 640, 480
101 // Sensor::kResolution[0], Sensor::kResolution[1]
102};
103
104
105const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = {
106 (const uint64_t)Sensor::kFrameDurationRange[0]
107};
108
109/**
110 * 3A constants
111 */
112
113// Default exposure and gain targets for different scenarios
114const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC;
115const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
116const int EmulatedFakeCamera3::kNormalSensitivity = 100;
117const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400;
118const float EmulatedFakeCamera3::kExposureTrackRate = 0.1;
119const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10;
120const int EmulatedFakeCamera3::kStableAeMaxFrames = 100;
121const float EmulatedFakeCamera3::kExposureWanderMin = -2;
122const float EmulatedFakeCamera3::kExposureWanderMax = 1;
123
124/**
125 * Camera device lifecycle methods
126 */
127static const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(camera3_jpeg_blob);
128jpegsize EmulatedFakeCamera3::getMaxJpegResolution(uint32_t picSizes[],int count) {
129 uint32_t maxJpegWidth = 0, maxJpegHeight = 0;
130 jpegsize maxJpegResolution;
131 for (int i=0; i < count; i+= 4) {
132 uint32_t width = picSizes[i+1];
133 uint32_t height = picSizes[i+2];
134 if (picSizes[i+0] == HAL_PIXEL_FORMAT_BLOB &&
135 (width * height > maxJpegWidth * maxJpegHeight)) {
136 maxJpegWidth = width;
137 maxJpegHeight = height;
138 }
139 }
140 maxJpegResolution.width = maxJpegWidth;
141 maxJpegResolution.height = maxJpegHeight;
142 return maxJpegResolution;
143}
144ssize_t EmulatedFakeCamera3::getJpegBufferSize(int width, int height) {
145 if (maxJpegResolution.width == 0) {
146 return BAD_VALUE;
147 }
148 ssize_t maxJpegBufferSize = JpegCompressor::kMaxJpegSize;
149
150 // Calculate final jpeg buffer size for the given resolution.
151 float scaleFactor = ((float) (width * height)) /
152 (maxJpegResolution.width * maxJpegResolution.height);
153 ssize_t jpegBufferSize = scaleFactor * maxJpegBufferSize;
154 // Bound the buffer size to [MIN_JPEG_BUFFER_SIZE, maxJpegBufferSize].
155 if (jpegBufferSize > maxJpegBufferSize) {
156 jpegBufferSize = maxJpegBufferSize;
157 } else if (jpegBufferSize < kMinJpegBufferSize) {
158 jpegBufferSize = kMinJpegBufferSize;
159 }
160 return jpegBufferSize;
161}
162
163EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, struct hw_module_t* module) :
164 EmulatedCamera3(cameraId, module) {
165 ALOGI("Constructing emulated fake camera 3 cameraID:%d", mCameraID);
166
167 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
168 mDefaultTemplates[i] = NULL;
169 }
170
171 /**
172 * Front cameras = limited mode
173 * Back cameras = full mode
174 */
175 //TODO limited or full mode, read this from camera driver
176 //mFullMode = facingBack;
177 mSupportCap = 0;
178 mSupportRotate = 0;
179 mFullMode = 0;
180}
181
182EmulatedFakeCamera3::~EmulatedFakeCamera3() {
183 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
184 if (mDefaultTemplates[i] != NULL) {
185 free_camera_metadata(mDefaultTemplates[i]);
186 }
187 }
188
189 if (mCameraInfo != NULL) {
190 CAMHAL_LOGIA("free mCameraInfo");
191 free_camera_metadata(mCameraInfo);
192 mCameraInfo = NULL;
193 }
194}
195
196status_t EmulatedFakeCamera3::Initialize() {
197 DBG_LOGB("mCameraID=%d,mStatus=%d,ddd\n", mCameraID, mStatus);
198 status_t res;
199
200#ifdef HAVE_VERSION_INFO
201 CAMHAL_LOGIB("\n--------------------------------\n"
202 "author:aml.sh multi-media team\n"
203 "branch name: %s\n"
204 "git version: %s \n"
205 "last changed: %s\n"
206 "build-time: %s\n"
207 "build-name: %s\n"
208 "uncommitted-file-num:%d\n"
209 "ssh user@%s, cd %s\n"
210 "hostname %s\n"
211 "--------------------------------\n",
212 CAMHAL_BRANCH_NAME,
213 CAMHAL_GIT_VERSION,
214 CAMHAL_LAST_CHANGED,
215 CAMHAL_BUILD_TIME,
216 CAMHAL_BUILD_NAME,
217 CAMHAL_GIT_UNCOMMIT_FILE_NUM,
218 CAMHAL_IP, CAMHAL_PATH, CAMHAL_HOSTNAME
219 );
220#endif
221
222
223 if (mStatus != STATUS_ERROR) {
224 ALOGE("%s: Already initialized!", __FUNCTION__);
225 return INVALID_OPERATION;
226 }
227
228 res = constructStaticInfo();
229 if (res != OK) {
230 ALOGE("%s: Unable to allocate static info: %s (%d)",
231 __FUNCTION__, strerror(-res), res);
232 return res;
233 }
234
235 return EmulatedCamera3::Initialize();
236}
237
238status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) {
239 ALOGV("%s: E", __FUNCTION__);
240 DBG_LOGA("ddd");
241 Mutex::Autolock l(mLock);
242 status_t res;
243
244 if ((mStatus != STATUS_CLOSED) || !mPlugged) {
245 ALOGE("%s: Can't connect in state %d, mPlugged=%d",
246 __FUNCTION__, mStatus, mPlugged);
247 return INVALID_OPERATION;
248 }
249
250 mSensor = new Sensor();
251 mSensor->setSensorListener(this);
252
253 res = mSensor->startUp(mCameraID);
254 DBG_LOGB("mSensor startUp, mCameraID=%d\n", mCameraID);
255 if (res != NO_ERROR) return res;
256
257 mSupportCap = mSensor->IoctlStateProbe();
258 if (mSupportCap & IOCTL_MASK_ROTATE) {
259 mSupportRotate = true;
260 }
261
262 mReadoutThread = new ReadoutThread(this);
263 mJpegCompressor = new JpegCompressor();
264
265 res = mReadoutThread->setJpegCompressorListener(this);
266 if (res != NO_ERROR) {
267 return res;
268 }
269 res = mReadoutThread->startJpegCompressor(this);
270 if (res != NO_ERROR) {
271 return res;
272 }
273
274 res = mReadoutThread->run("EmuCam3::readoutThread");
275 if (res != NO_ERROR) return res;
276
277 // Initialize fake 3A
278
279 mControlMode = ANDROID_CONTROL_MODE_AUTO;
280 mFacePriority = false;
281 mAeMode = ANDROID_CONTROL_AE_MODE_ON;
282 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
283 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
284 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;//ANDROID_CONTROL_AE_STATE_INACTIVE;
285 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
286 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
287 mAfTriggerId = 0;
288 mAeCurrentExposureTime = kNormalExposureTime;
289 mAeCurrentSensitivity = kNormalSensitivity;
290
291 return EmulatedCamera3::connectCamera(device);
292}
293
294status_t EmulatedFakeCamera3::plugCamera() {
295 {
296 Mutex::Autolock l(mLock);
297
298 if (!mPlugged) {
299 CAMHAL_LOGIB("%s: Plugged back in", __FUNCTION__);
300 mPlugged = true;
301 }
302 }
303
304 return NO_ERROR;
305}
306
307status_t EmulatedFakeCamera3::unplugCamera() {
308 {
309 Mutex::Autolock l(mLock);
310
311 if (mPlugged) {
312 CAMHAL_LOGIB("%s: Unplugged camera", __FUNCTION__);
313 mPlugged = false;
314 }
315 }
316
317 return closeCamera();
318}
319
320camera_device_status_t EmulatedFakeCamera3::getHotplugStatus() {
321 Mutex::Autolock l(mLock);
322 return mPlugged ?
323 CAMERA_DEVICE_STATUS_PRESENT :
324 CAMERA_DEVICE_STATUS_NOT_PRESENT;
325}
326
327status_t EmulatedFakeCamera3::closeCamera() {
328 CAMHAL_LOGVB("%s, %d\n", __FUNCTION__, __LINE__);
329 status_t res;
330 {
331 Mutex::Autolock l(mLock);
332 if (mStatus == STATUS_CLOSED) return OK;
333 res = mSensor->streamOff();
334
335 res = mSensor->shutDown();
336 if (res != NO_ERROR) {
337 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
338 return res;
339 }
340 mSensor.clear();
341
342 res = mReadoutThread->shutdownJpegCompressor(this);
343 if (res != OK) {
344 ALOGE("%s: Unable to shut down JpegCompressor: %d", __FUNCTION__, res);
345 return res;
346 }
347
348 mReadoutThread->requestExit();
349 }
350
351 mReadoutThread->join();
352
353 {
354 Mutex::Autolock l(mLock);
355 // Clear out private stream information
356 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
357 PrivateStreamInfo *privStream =
358 static_cast<PrivateStreamInfo*>((*s)->priv);
359 delete privStream;
360 (*s)->priv = NULL;
361 }
362 mStreams.clear();
363 mReadoutThread.clear();
364 }
365
366 return EmulatedCamera3::closeCamera();
367}
368
369status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
370 char property[PROPERTY_VALUE_MAX];
371 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
372 if (mSensorType == SENSOR_USB) {
373 if (mFacingBack) {
374 property_get("hw.camera.orientation.back", property, "0");
375 } else {
376 property_get("hw.camera.orientation.front", property, "0");
377 }
378 int32_t orientation = atoi(property);
379 property_get("hw.camera.usb.orientation_offset", property, "0");
380 orientation += atoi(property);
381 orientation %= 360;
382 info->orientation = orientation ;
383 } else {
384 if (mFacingBack) {
385 property_get("hw.camera.orientation.back", property, "270");
386 } else {
387 property_get("hw.camera.orientation.front", property, "90");
388 }
389 info->orientation = atoi(property);
390 }
391 return EmulatedCamera3::getCameraInfo(info);
392}
393
394/**
395 * Camera3 interface methods
396 */
397
398void EmulatedFakeCamera3::getValidJpegSize(uint32_t picSizes[], uint32_t availablejpegsize[], int count) {
399 int i,j,k;
400 bool valid = true;
401 for (i=0,j=0; i < count; i+= 4) {
402 for (k= 0; k<=j ;k+=2) {
403 if ((availablejpegsize[k]*availablejpegsize[k+1]) == (picSizes[i+1]*picSizes[i+2])) {
404
405 valid = false;
406 }
407 }
408 if (valid) {
409 availablejpegsize[j] = picSizes[i+1];
410 availablejpegsize[j+1] = picSizes[i+2];
411 j+=2;
412 }
413 valid = true;
414 }
415}
416
417status_t EmulatedFakeCamera3::checkValidJpegSize(uint32_t width, uint32_t height) {
418
419 int validsizecount = 0;
420 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
421 for (uint32_t f = 0; f < count; f+=2) {
422 if (mAvailableJpegSize[f] != 0) {
423 if ((mAvailableJpegSize[f] == width)&&(mAvailableJpegSize[f+1] == height)) {
424 validsizecount++;
425 }
426 } else {
427 break;
428 }
429 }
430 if (validsizecount == 0)
431 return BAD_VALUE;
432 return OK;
433}
434
435status_t EmulatedFakeCamera3::configureStreams(
436 camera3_stream_configuration *streamList) {
437 Mutex::Autolock l(mLock);
438 uint32_t width, height, pixelfmt;
439 bool isRestart = false;
440 DBG_LOGB("%s: %d streams", __FUNCTION__, streamList->num_streams);
441
442 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
443 ALOGE("%s: Cannot configure streams in state %d",
444 __FUNCTION__, mStatus);
445 return NO_INIT;
446 }
447
448 /**
449 * Sanity-check input list.
450 */
451 if (streamList == NULL) {
452 ALOGE("%s: NULL stream configuration", __FUNCTION__);
453 return BAD_VALUE;
454 }
455
456 if (streamList->streams == NULL) {
457 ALOGE("%s: NULL stream list", __FUNCTION__);
458 return BAD_VALUE;
459 }
460
461 if (streamList->num_streams < 1) {
462 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
463 streamList->num_streams);
464 return BAD_VALUE;
465 }
466
467 camera3_stream_t *inputStream = NULL;
468 for (size_t i = 0; i < streamList->num_streams; i++) {
469 camera3_stream_t *newStream = streamList->streams[i];
470
471 if (newStream == NULL) {
472 ALOGE("%s: Stream index %zu was NULL",
473 __FUNCTION__, i);
474 return BAD_VALUE;
475 }
476
477 if (newStream->max_buffers <= 0) {
478 isRestart = true;//mSensor->isNeedRestart(newStream->width, newStream->height, newStream->format);
479 DBG_LOGB("format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d, isRestart=%d\n",
480 newStream->format, newStream->width, newStream->height,
481 newStream->stream_type, newStream->max_buffers,
482 isRestart);
483 }
484 ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
485 __FUNCTION__, newStream, i, newStream->stream_type,
486 newStream->usage,
487 newStream->format);
488
489 if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
490 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
491 if (inputStream != NULL) {
492
493 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
494 return BAD_VALUE;
495 }
496 inputStream = newStream;
497 }
498
499 bool validFormat = false;
500 for (size_t f = 0;
501 f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]);
502 f++) {
503 if (newStream->format == kAvailableFormats[f]) {
504 validFormat = true;
505 //HAL_PIXEL_FORMAT_YCrCb_420_SP,
506 if (HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format)
507 newStream->format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
508
509 break;
510 }
511 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
512 }
513 if (!validFormat) {
514 ALOGE("%s: Unsupported stream format 0x%x requested",
515 __FUNCTION__, newStream->format);
516 return BAD_VALUE;
517 }
518
519 status_t ret = checkValidJpegSize(newStream->width, newStream->height);
520 if (ret != OK) {
521 return BAD_VALUE;
522 }
523
524 }
525 mInputStream = inputStream;
526 width = 0;
527 height = 0;
528 for (size_t i = 0; i < streamList->num_streams; i++) {
529 camera3_stream_t *newStream = streamList->streams[i];
530 DBG_LOGB("find propert width and height, format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d\n",
531 newStream->format, newStream->width, newStream->height, newStream->stream_type, newStream->max_buffers);
532 if ((HAL_PIXEL_FORMAT_BLOB != newStream->format) &&
533 (CAMERA3_STREAM_OUTPUT == newStream->stream_type)) {
534
535 if (width < newStream->width)
536 width = newStream->width;
537
538 if (height < newStream->height)
539 height = newStream->height;
540
541 pixelfmt = (uint32_t)newStream->format;
542 if (HAL_PIXEL_FORMAT_YCbCr_420_888 == pixelfmt)
543 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
544 }
545
546 }
547
548 //TODO modify this ugly code
549 if (isRestart) {
550 isRestart = mSensor->isNeedRestart(width, height, pixelfmt);
551 }
552
553 if (isRestart) {
554 mSensor->streamOff();
555 pixelfmt = mSensor->halFormatToSensorFormat(pixelfmt);
556 mSensor->setOutputFormat(width, height, pixelfmt, 0);
557 mSensor->streamOn();
558 DBG_LOGB("width=%d, height=%d, pixelfmt=%.4s\n",
559 width, height, (char*)&pixelfmt);
560 }
561
562 /**
563 * Initially mark all existing streams as not alive
564 */
565 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
566 PrivateStreamInfo *privStream =
567 static_cast<PrivateStreamInfo*>((*s)->priv);
568 privStream->alive = false;
569 }
570
571 /**
572 * Find new streams and mark still-alive ones
573 */
574 for (size_t i = 0; i < streamList->num_streams; i++) {
575 camera3_stream_t *newStream = streamList->streams[i];
576 if (newStream->priv == NULL) {
577 // New stream, construct info
578 PrivateStreamInfo *privStream = new PrivateStreamInfo();
579 privStream->alive = true;
580 privStream->registered = false;
581
582 newStream->usage =
583 mSensor->getStreamUsage(newStream->stream_type);
584
585 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
586 newStream->max_buffers = kMaxBufferCount;
587 newStream->priv = privStream;
588 mStreams.push_back(newStream);
589 } else {
590 // Existing stream, mark as still alive.
591 PrivateStreamInfo *privStream =
592 static_cast<PrivateStreamInfo*>(newStream->priv);
593 CAMHAL_LOGDA("Existing stream ?");
594 privStream->alive = true;
595 }
596 DBG_LOGB("%d, newStream=%p, stream_type=%d, usage=%x, priv=%p, w*h=%dx%d\n",
597 i, newStream, newStream->stream_type, newStream->usage, newStream->priv, newStream->width, newStream->height);
598 }
599
600 /**
601 * Reap the dead streams
602 */
603 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
604 PrivateStreamInfo *privStream =
605 static_cast<PrivateStreamInfo*>((*s)->priv);
606 if (!privStream->alive) {
607 DBG_LOGA("delete not alive streams");
608 (*s)->priv = NULL;
609 delete privStream;
610 s = mStreams.erase(s);
611 } else {
612 ++s;
613 }
614 }
615
616 /**
617 * Can't reuse settings across configure call
618 */
619 mPrevSettings.clear();
620
621 return OK;
622}
623
624status_t EmulatedFakeCamera3::registerStreamBuffers(
625 const camera3_stream_buffer_set *bufferSet) {
626 DBG_LOGB("%s: E", __FUNCTION__);
627 Mutex::Autolock l(mLock);
628
629 /**
630 * Sanity checks
631 */
632 DBG_LOGA("==========sanity checks\n");
633
634 // OK: register streams at any time during configure
635 // (but only once per stream)
636 if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) {
637 ALOGE("%s: Cannot register buffers in state %d",
638 __FUNCTION__, mStatus);
639 return NO_INIT;
640 }
641
642 if (bufferSet == NULL) {
643 ALOGE("%s: NULL buffer set!", __FUNCTION__);
644 return BAD_VALUE;
645 }
646
647 StreamIterator s = mStreams.begin();
648 for (; s != mStreams.end(); ++s) {
649 if (bufferSet->stream == *s) break;
650 }
651 if (s == mStreams.end()) {
652 ALOGE("%s: Trying to register buffers for a non-configured stream!",
653 __FUNCTION__);
654 return BAD_VALUE;
655 }
656
657 /**
658 * Register the buffers. This doesn't mean anything to the emulator besides
659 * marking them off as registered.
660 */
661
662 PrivateStreamInfo *privStream =
663 static_cast<PrivateStreamInfo*>((*s)->priv);
664
665#if 0
666 if (privStream->registered) {
667 ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__);
668 return BAD_VALUE;
669 }
670#endif
671
672 privStream->registered = true;
673
674 return OK;
675}
676
677const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings(
678 int type) {
679 DBG_LOGB("%s: E", __FUNCTION__);
680 Mutex::Autolock l(mLock);
681
682 if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
683 ALOGE("%s: Unknown request settings template: %d",
684 __FUNCTION__, type);
685 return NULL;
686 }
687
688 /**
689 * Cache is not just an optimization - pointer returned has to live at
690 * least as long as the camera device instance does.
691 */
692 if (mDefaultTemplates[type] != NULL) {
693 return mDefaultTemplates[type];
694 }
695
696 CameraMetadata settings;
697
698 /** android.request */
699 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
700 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
701
702 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
703 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
704
705 static const int32_t id = 0;
706 settings.update(ANDROID_REQUEST_ID, &id, 1);
707
708 static const int32_t frameCount = 0;
709 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
710
711 /** android.lens */
712
713 static const float focusDistance = 0;
714 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
715
716 static const float aperture = 2.8f;
717 settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
718
719// static const float focalLength = 5.0f;
720 static const float focalLength = 3.299999952316284f;
721 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
722
723 static const float filterDensity = 0;
724 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
725
726 static const uint8_t opticalStabilizationMode =
727 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
728 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
729 &opticalStabilizationMode, 1);
730
731 // FOCUS_RANGE set only in frame
732
733 /** android.sensor */
734
735 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
736 settings.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
737 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
738 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
739 static const int64_t exposureTime = 10 * MSEC;
740 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
741
742 int64_t frameDuration = mSensor->getMinFrameDuration();
743 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
744
745 static const int32_t sensitivity = 100;
746 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
747
748 static const int64_t rollingShutterSkew = 0;
749 settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
750 // TIMESTAMP set only in frame
751
752 /** android.flash */
753
754 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
755 settings.update(ANDROID_FLASH_STATE, &flashstate, 1);
756
757 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
758 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
759
760 static const uint8_t flashPower = 10;
761 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
762
763 static const int64_t firingTime = 0;
764 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
765
766 /** Processing block modes */
767 uint8_t hotPixelMode = 0;
768 uint8_t demosaicMode = 0;
769 uint8_t noiseMode = 0;
770 uint8_t shadingMode = 0;
771 uint8_t colorMode = 0;
772 uint8_t tonemapMode = 0;
773 uint8_t edgeMode = 0;
774 switch (type) {
775
776 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
777 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
778 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
779 // fall-through
780 case CAMERA3_TEMPLATE_STILL_CAPTURE:
781 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
782 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
783 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
784 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
785 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
786 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
787 break;
788 case CAMERA3_TEMPLATE_PREVIEW:
789 // fall-through
790 case CAMERA3_TEMPLATE_VIDEO_RECORD:
791 // fall-through
792 case CAMERA3_TEMPLATE_MANUAL:
793 // fall-through
794 default:
795 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
796 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
797 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
798 shadingMode = ANDROID_SHADING_MODE_FAST;
799 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
800 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
801 edgeMode = ANDROID_EDGE_MODE_FAST;
802 break;
803 }
804 settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
805 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
806 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
807 settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
808 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
809 settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
810 settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
811
812 /** android.noise */
813 static const uint8_t noiseStrength = 5;
814 settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
815 static uint8_t availableNBModes[] = {
816 ANDROID_NOISE_REDUCTION_MODE_OFF,
817 ANDROID_NOISE_REDUCTION_MODE_FAST,
818 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
819 };
820 settings.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
821 availableNBModes, sizeof(availableNBModes)/sizeof(availableNBModes));
822
823
824 /** android.color */
825 static const float colorTransform[9] = {
826 1.0f, 0.f, 0.f,
827 0.f, 1.f, 0.f,
828 0.f, 0.f, 1.f
829 };
830 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
831
832 /** android.tonemap */
833 static const float tonemapCurve[4] = {
834 0.f, 0.f,
835 1.f, 1.f
836 };
837 settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
838 settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
839 settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
840
841 /** android.edge */
842 static const uint8_t edgeStrength = 5;
843 settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
844
845 /** android.scaler */
846 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
847 settings.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
848
849 static const int32_t cropRegion[] = {
850 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
851 };
852 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
853
854 /** android.jpeg */
855 static const uint8_t jpegQuality = 80;
856 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
857
858 static const int32_t thumbnailSize[2] = {
859 160, 120
860 };
861 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
862
863 static const uint8_t thumbnailQuality = 80;
864 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
865
866 static const double gpsCoordinates[3] = {
867 0, 0, 0
868 };
869 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3); //default 2 value
870
871 static const uint8_t gpsProcessingMethod[32] = "None";
872 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
873
874 static const int64_t gpsTimestamp = 0;
875 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
876
877 static const int32_t jpegOrientation = 0;
878 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
879
880 /** android.stats */
881
882 static const uint8_t faceDetectMode =
883 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
884 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
885
886 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
887 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
888
889 static const uint8_t sharpnessMapMode =
890 ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
891 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
892
893 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
894 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
895 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
896 settings.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
897 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
898 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
899 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
900 // sharpnessMap only in frames
901
902 /** android.control */
903
904 uint8_t controlIntent = 0;
905 uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; //default value
906 uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
907 uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
908 switch (type) {
909 case CAMERA3_TEMPLATE_PREVIEW:
910 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
911 break;
912 case CAMERA3_TEMPLATE_STILL_CAPTURE:
913 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
914 break;
915 case CAMERA3_TEMPLATE_VIDEO_RECORD:
916 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
917 break;
918 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
919 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
920 break;
921 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
922 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
923 break;
924 case CAMERA3_TEMPLATE_MANUAL:
925 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
926 controlMode = ANDROID_CONTROL_MODE_OFF;
927 aeMode = ANDROID_CONTROL_AE_MODE_OFF;
928 awbMode = ANDROID_CONTROL_AWB_MODE_OFF;
929 break;
930 default:
931 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
932 break;
933 }
934 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
935 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
936
937 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
938 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
939
940 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
941 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
942
943 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
944
945 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
946 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
947
948 static const uint8_t aePrecaptureTrigger =
949 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
950 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
951
952 static const int32_t mAfTriggerId = 0;
953 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,&mAfTriggerId, 1);
954 static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
955 settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
956
957 static const int32_t controlRegions[5] = {
958 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
959 1000
960 };
961// settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
962
963 static const int32_t aeExpCompensation = 0;
964 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
965
966 static const int32_t aeTargetFpsRange[2] = {
967 30, 30
968 };
969 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
970
971 static const uint8_t aeAntibandingMode =
972 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
973 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
974
975 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
976
977 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
978 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
979
980// settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
981
982 uint8_t afMode = 0;
983 switch (type) {
984 case CAMERA3_TEMPLATE_PREVIEW:
985 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
986 break;
987 case CAMERA3_TEMPLATE_STILL_CAPTURE:
988 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
989 break;
990 case CAMERA3_TEMPLATE_VIDEO_RECORD:
991 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
992 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
993 break;
994 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
995 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
996 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
997 break;
998 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
999 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1000 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
1001 break;
1002 case CAMERA3_TEMPLATE_MANUAL:
1003 afMode = ANDROID_CONTROL_AF_MODE_OFF;
1004 break;
1005 default:
1006 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1007 break;
1008 }
1009 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
1010
1011 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
1012 settings.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
1013
1014// settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
1015
1016 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
1017 settings.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
1018 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1019 settings.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
1020 static const uint8_t vstabMode =
1021 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
1022 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
1023
1024 // aeState, awbState, afState only in frame
1025
1026 mDefaultTemplates[type] = settings.release();
1027
1028 return mDefaultTemplates[type];
1029}
1030
1031status_t EmulatedFakeCamera3::processCaptureRequest(
1032 camera3_capture_request *request) {
1033
1034 Mutex::Autolock l(mLock);
1035 status_t res;
1036
1037 /** Validation */
1038
1039 if (mStatus < STATUS_READY) {
1040 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
1041 mStatus);
1042 return INVALID_OPERATION;
1043 }
1044
1045 if (request == NULL) {
1046 ALOGE("%s: NULL request!", __FUNCTION__);
1047 return BAD_VALUE;
1048 }
1049
1050 uint32_t frameNumber = request->frame_number;
1051
1052 if (request->settings == NULL && mPrevSettings.isEmpty()) {
1053 ALOGE("%s: Request %d: NULL settings for first request after"
1054 "configureStreams()", __FUNCTION__, frameNumber);
1055 return BAD_VALUE;
1056 }
1057
1058 if (request->input_buffer != NULL &&
1059 request->input_buffer->stream != mInputStream) {
1060 DBG_LOGB("%s: Request %d: Input buffer not from input stream!",
1061 __FUNCTION__, frameNumber);
1062 DBG_LOGB("%s: Bad stream %p, expected: %p",
1063 __FUNCTION__, request->input_buffer->stream,
1064 mInputStream);
1065 DBG_LOGB("%s: Bad stream type %d, expected stream type %d",
1066 __FUNCTION__, request->input_buffer->stream->stream_type,
1067 mInputStream ? mInputStream->stream_type : -1);
1068
1069 return BAD_VALUE;
1070 }
1071
1072 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1073 ALOGE("%s: Request %d: No output buffers provided!",
1074 __FUNCTION__, frameNumber);
1075 return BAD_VALUE;
1076 }
1077
1078 // Validate all buffers, starting with input buffer if it's given
1079
1080 ssize_t idx;
1081 const camera3_stream_buffer_t *b;
1082 if (request->input_buffer != NULL) {
1083 idx = -1;
1084 b = request->input_buffer;
1085 } else {
1086 idx = 0;
1087 b = request->output_buffers;
1088 }
1089 do {
1090 PrivateStreamInfo *priv =
1091 static_cast<PrivateStreamInfo*>(b->stream->priv);
1092 if (priv == NULL) {
1093 ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
1094 __FUNCTION__, frameNumber, idx);
1095 return BAD_VALUE;
1096 }
1097#if 0
1098 if (!priv->alive || !priv->registered) {
1099 ALOGE("%s: Request %d: Buffer %zu: Unregistered or dead stream! alive=%d, registered=%d\n",
1100 __FUNCTION__, frameNumber, idx,
1101 priv->alive, priv->registered);
1102 //return BAD_VALUE;
1103 }
1104#endif
1105 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1106 ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
1107 __FUNCTION__, frameNumber, idx);
1108 return BAD_VALUE;
1109 }
1110 if (b->release_fence != -1) {
1111 ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
1112 __FUNCTION__, frameNumber, idx);
1113 return BAD_VALUE;
1114 }
1115 if (b->buffer == NULL) {
1116 ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
1117 __FUNCTION__, frameNumber, idx);
1118 return BAD_VALUE;
1119 }
1120 idx++;
1121 b = &(request->output_buffers[idx]);
1122 } while (idx < (ssize_t)request->num_output_buffers);
1123
1124 // TODO: Validate settings parameters
1125
1126 /**
1127 * Start processing this request
1128 */
1129
1130 mStatus = STATUS_ACTIVE;
1131
1132 CameraMetadata settings;
1133 camera_metadata_entry e;
1134
1135 if (request->settings == NULL) {
1136 settings.acquire(mPrevSettings);
1137 } else {
1138 settings = request->settings;
1139
1140 uint8_t antiBanding = 0;
1141 uint8_t effectMode = 0;
1142 int exposureCmp = 0;
1143
1144 e = settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE);
1145 if (e.count == 0) {
1146 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1147 return BAD_VALUE;
1148 }
1149 antiBanding = e.data.u8[0];
1150 mSensor->setAntiBanding(antiBanding);
1151
1152 e = settings.find(ANDROID_CONTROL_EFFECT_MODE);
1153 if (e.count == 0) {
1154 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1155 return BAD_VALUE;
1156 }
1157 effectMode = e.data.u8[0];
1158 mSensor->setEffect(effectMode);
1159
1160
1161 e = settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION);
1162 if (e.count == 0) {
1163 ALOGE("%s: No exposure entry!", __FUNCTION__);
1164 //return BAD_VALUE;
1165 } else {
1166 exposureCmp = e.data.i32[0];
1167 DBG_LOGB("set expsore compensaton %d\n", exposureCmp);
1168 mSensor->setExposure(exposureCmp);
1169 }
1170
1171 int32_t cropRegion[4];
1172 int32_t cropWidth;
1173 int32_t outputWidth = request->output_buffers[0].stream->width;
1174
1175 e = settings.find(ANDROID_SCALER_CROP_REGION);
1176 if (e.count == 0) {
1177 ALOGE("%s: No corp region entry!", __FUNCTION__);
1178 //return BAD_VALUE;
1179 } else {
1180 cropRegion[0] = e.data.i32[0];
1181 cropRegion[1] = e.data.i32[1];
1182 cropWidth = cropRegion[2] = e.data.i32[2];
1183 cropRegion[3] = e.data.i32[3];
1184 for (int i = mZoomMin; i <= mZoomMax; i += mZoomStep) {
1185 //if ( (float) i / mZoomMin >= (float) outputWidth / cropWidth) {
1186 if ( i * cropWidth >= outputWidth * mZoomMin ) {
1187 mSensor->setZoom(i);
1188 break;
1189 }
1190 }
1191 DBG_LOGB("cropRegion:%d, %d, %d, %d\n", cropRegion[0], cropRegion[1],cropRegion[2],cropRegion[3]);
1192 }
1193 }
1194
1195 uint8_t len[] = {1};
1196 settings.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1);
1197
1198 uint8_t maxlen[] = {0};
1199 settings.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1);
1200
1201 res = process3A(settings);
1202 if (res != OK) {
1203 CAMHAL_LOGDB("%s: process3A failed!", __FUNCTION__);
1204 //return res;
1205 }
1206
1207 // TODO: Handle reprocessing
1208
1209 /**
1210 * Get ready for sensor config
1211 */
1212
1213 nsecs_t exposureTime;
1214 nsecs_t frameDuration;
1215 uint32_t sensitivity;
1216 bool needJpeg = false;
1217 struct ExifInfo info;
1218 ssize_t jpegbuffersize;
1219 uint32_t jpegpixelfmt;
1220 bool mHaveThumbnail = false;
1221
1222 exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
1223 frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
1224 sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
1225
1226 Buffers *sensorBuffers = new Buffers();
1227 HalBufferVector *buffers = new HalBufferVector();
1228
1229 sensorBuffers->setCapacity(request->num_output_buffers);
1230 buffers->setCapacity(request->num_output_buffers);
1231
1232 // Process all the buffers we got for output, constructing internal buffer
1233 // structures for them, and lock them for writing.
1234 for (size_t i = 0; i < request->num_output_buffers; i++) {
1235 const camera3_stream_buffer &srcBuf = request->output_buffers[i];
1236 const private_handle_t *privBuffer =
1237 (const private_handle_t*)(*srcBuf.buffer);
1238 StreamBuffer destBuf;
1239 destBuf.streamId = kGenericStreamId;
1240 destBuf.width = srcBuf.stream->width;
1241 destBuf.height = srcBuf.stream->height;
1242 destBuf.format = privBuffer->format; // Use real private format
1243 destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc
1244 destBuf.buffer = srcBuf.buffer;
1245 destBuf.share_fd = privBuffer->share_fd;
1246
1247 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
1248 needJpeg = true;
1249 memset(&info,0,sizeof(struct ExifInfo));
1250 info.orientation = settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
1251 jpegpixelfmt = mSensor->getOutputFormat();
1252 if (!mSupportRotate) {
1253 info.mainwidth = srcBuf.stream->width;
1254 info.mainheight = srcBuf.stream->height;
1255 } else {
1256 if ((info.orientation == 90) || (info.orientation == 270)) {
1257 info.mainwidth = srcBuf.stream->height;
1258 info.mainheight = srcBuf.stream->width;
1259 } else {
1260 info.mainwidth = srcBuf.stream->width;
1261 info.mainheight = srcBuf.stream->height;
1262 }
1263 }
1264 if ((jpegpixelfmt == V4L2_PIX_FMT_MJPEG)||(jpegpixelfmt == V4L2_PIX_FMT_YUYV)) {
1265 mSensor->setOutputFormat(info.mainwidth,info.mainheight,jpegpixelfmt,1);
1266 } else {
1267 mSensor->setOutputFormat(info.mainwidth,info.mainheight,V4L2_PIX_FMT_RGB24,1);
1268 }
1269 }
1270
1271 // Wait on fence
1272 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
1273 res = bufferAcquireFence->wait(kFenceTimeoutMs);
1274 if (res == TIMED_OUT) {
1275 ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
1276 __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
1277 }
1278 if (res == OK) {
1279 // Lock buffer for writing
1280 const Rect rect(destBuf.width, destBuf.height);
1281 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1282 if (privBuffer->format == HAL_PIXEL_FORMAT_YCbCr_420_888/*HAL_PIXEL_FORMAT_YCrCb_420_SP*/) {
1283 android_ycbcr ycbcr = android_ycbcr();
1284 res = GraphicBufferMapper::get().lockYCbCr(
1285 *(destBuf.buffer),
1286 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK
1287 , rect,
1288 &ycbcr);
1289 // This is only valid because we know that emulator's
1290 // YCbCr_420_888 is really contiguous NV21 under the hood
1291 destBuf.img = static_cast<uint8_t*>(ycbcr.y);
1292 } else {
1293 ALOGE("Unexpected private format for flexible YUV: 0x%x",
1294 privBuffer->format);
1295 res = INVALID_OPERATION;
1296 }
1297 } else {
1298 res = GraphicBufferMapper::get().lock(*(destBuf.buffer),
1299 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK
1300 , rect,
1301 (void**)&(destBuf.img));
1302 }
1303 if (res != OK) {
1304 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
1305 __FUNCTION__, frameNumber, i);
1306 }
1307 }
1308
1309 if (res != OK) {
1310 // Either waiting or locking failed. Unlock locked buffers and bail
1311 // out.
1312 for (size_t j = 0; j < i; j++) {
1313 GraphicBufferMapper::get().unlock(
1314 *(request->output_buffers[i].buffer));
1315 }
1316 ALOGE("line:%d, format for this usage: %d x %d, usage %x, format=%x, returned\n",
1317 __LINE__, destBuf.width, destBuf.height, privBuffer->usage, privBuffer->format);
1318 return NO_INIT;
1319 }
1320
1321 sensorBuffers->push_back(destBuf);
1322 buffers->push_back(srcBuf);
1323 }
1324
1325 if (needJpeg) {
1326 if (!mSupportRotate) {
1327 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1328 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1329 } else {
1330 if ((info.orientation == 90) || (info.orientation == 270)) {
1331 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1332 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1333 } else {
1334 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1335 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1336 }
1337 }
1338 if (settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
1339 info.latitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[0];
1340 info.longitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[1];
1341 info.altitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[2];
1342 info.has_latitude = true;
1343 info.has_longitude = true;
1344 info.has_altitude = true;
1345 } else {
1346 info.has_latitude = false;
1347 info.has_longitude = false;
1348 info.has_altitude = false;
1349 }
1350 if (settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
1351 info.gpsProcessingMethod = settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
1352 info.has_gpsProcessingMethod = true;
1353 } else {
1354 info.has_gpsProcessingMethod = false;
1355 }
1356 if (settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
1357 info.gpsTimestamp = settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
1358 info.has_gpsTimestamp = true;
1359 } else {
1360 info.has_gpsTimestamp = false;
1361 }
1362 if (settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
1363 info.focallen = settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
1364 info.has_focallen = true;
1365 } else {
1366 info.has_focallen = false;
1367 }
1368 jpegbuffersize = getJpegBufferSize(info.mainwidth,info.mainheight);
1369
1370 mJpegCompressor->SetMaxJpegBufferSize(jpegbuffersize);
1371 mJpegCompressor->SetExifInfo(info);
1372 mSensor->setPictureRotate(info.orientation);
1373 if ((info.thumbwidth > 0) && (info.thumbheight > 0)) {
1374 mHaveThumbnail = true;
1375 }
1376 DBG_LOGB("%s::thumbnailSize_width=%d,thumbnailSize_height=%d,mainsize_width=%d,mainsize_height=%d,jpegOrientation=%d",__FUNCTION__,
1377 info.thumbwidth,info.thumbheight,info.mainwidth,info.mainheight,info.orientation);
1378 }
1379 /**
1380 * Wait for JPEG compressor to not be busy, if needed
1381 */
1382#if 0
1383 if (needJpeg) {
1384 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1385 if (!ready) {
1386 ALOGE("%s: Timeout waiting for JPEG compression to complete!",
1387 __FUNCTION__);
1388 return NO_INIT;
1389 }
1390 }
1391#else
1392 while (needJpeg) {
1393 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1394 if (ready) {
1395 break;
1396 }
1397 }
1398#endif
1399 /**
1400 * Wait until the in-flight queue has room
1401 */
1402 res = mReadoutThread->waitForReadout();
1403 if (res != OK) {
1404 ALOGE("%s: Timeout waiting for previous requests to complete!",
1405 __FUNCTION__);
1406 return NO_INIT;
1407 }
1408
1409 /**
1410 * Wait until sensor's ready. This waits for lengthy amounts of time with
1411 * mLock held, but the interface spec is that no other calls may by done to
1412 * the HAL by the framework while process_capture_request is happening.
1413 */
1414 int syncTimeoutCount = 0;
1415 while(!mSensor->waitForVSync(kSyncWaitTimeout)) {
1416 if (mStatus == STATUS_ERROR) {
1417 return NO_INIT;
1418 }
1419 if (syncTimeoutCount == kMaxSyncTimeoutCount) {
1420 ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
1421 __FUNCTION__, frameNumber,
1422 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
1423 return NO_INIT;
1424 }
1425 syncTimeoutCount++;
1426 }
1427
1428 /**
1429 * Configure sensor and queue up the request to the readout thread
1430 */
1431 mSensor->setExposureTime(exposureTime);
1432 mSensor->setFrameDuration(frameDuration);
1433 mSensor->setSensitivity(sensitivity);
1434 mSensor->setDestinationBuffers(sensorBuffers);
1435 mSensor->setFrameNumber(request->frame_number);
1436
1437 ReadoutThread::Request r;
1438 r.frameNumber = request->frame_number;
1439 r.settings = settings;
1440 r.sensorBuffers = sensorBuffers;
1441 r.buffers = buffers;
1442 r.havethumbnail = mHaveThumbnail;
1443
1444 mReadoutThread->queueCaptureRequest(r);
1445 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
1446
1447 // Cache the settings for next time
1448 mPrevSettings.acquire(settings);
1449
1450 return OK;
1451}
1452
1453/** Debug methods */
1454
1455void EmulatedFakeCamera3::dump(int fd) {
1456
1457 String8 result;
1458 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
1459 result = String8::format("%s, valid resolution\n", __FILE__);
1460
1461 for (uint32_t f = 0; f < count; f+=2) {
1462 if (mAvailableJpegSize[f] == 0)
1463 break;
1464 result.appendFormat("width: %d , height =%d\n",
1465 mAvailableJpegSize[f], mAvailableJpegSize[f+1]);
1466 }
1467 result.appendFormat("\nmZoomMin: %d , mZoomMax =%d, mZoomStep=%d\n",
1468 mZoomMin, mZoomMax, mZoomStep);
1469
1470 if (mZoomStep <= 0) {
1471 result.appendFormat("!!!!!!!!!camera apk may have no picture out\n");
1472 }
1473
1474 write(fd, result.string(), result.size());
1475
1476 if (mSensor.get() != NULL) {
1477 mSensor->dump(fd);
1478 }
1479
1480}
1481//flush all request
1482//TODO returned buffers every request held immediately with
1483//CAMERA3_BUFFER_STATUS_ERROR flag.
1484int EmulatedFakeCamera3::flush_all_requests() {
1485 DBG_LOGA("flush all request");
1486 return 0;
1487}
1488/** Tag query methods */
1489const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) {
1490 return NULL;
1491}
1492
1493const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) {
1494 return NULL;
1495}
1496
1497int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) {
1498 return 0;
1499}
1500
1501/**
1502 * Private methods
1503 */
1504
1505camera_metadata_ro_entry_t EmulatedFakeCamera3::staticInfo(const CameraMetadata *info, uint32_t tag,
1506 size_t minCount, size_t maxCount, bool required) const {
1507
1508 camera_metadata_ro_entry_t entry = info->find(tag);
1509
1510 if (CC_UNLIKELY( entry.count == 0 ) && required) {
1511 const char* tagSection = get_camera_metadata_section_name(tag);
1512 if (tagSection == NULL) tagSection = "<unknown>";
1513 const char* tagName = get_camera_metadata_tag_name(tag);
1514 if (tagName == NULL) tagName = "<unknown>";
1515
1516 ALOGE("Error finding static metadata entry '%s.%s' (%x)",
1517 tagSection, tagName, tag);
1518 } else if (CC_UNLIKELY(
1519 (minCount != 0 && entry.count < minCount) ||
1520 (maxCount != 0 && entry.count > maxCount) ) ) {
1521 const char* tagSection = get_camera_metadata_section_name(tag);
1522 if (tagSection == NULL) tagSection = "<unknown>";
1523 const char* tagName = get_camera_metadata_tag_name(tag);
1524 if (tagName == NULL) tagName = "<unknown>";
1525 ALOGE("Malformed static metadata entry '%s.%s' (%x):"
1526 "Expected between %zu and %zu values, but got %zu values",
1527 tagSection, tagName, tag, minCount, maxCount, entry.count);
1528 }
1529
1530 return entry;
1531}
1532
1533//this is only for debug
1534void EmulatedFakeCamera3::getStreamConfigurationp(CameraMetadata *info) {
1535 const int STREAM_CONFIGURATION_SIZE = 4;
1536 const int STREAM_FORMAT_OFFSET = 0;
1537 const int STREAM_WIDTH_OFFSET = 1;
1538 const int STREAM_HEIGHT_OFFSET = 2;
1539 const int STREAM_IS_INPUT_OFFSET = 3;
1540
1541 camera_metadata_ro_entry_t availableStreamConfigs =
1542 staticInfo(info, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
1543 CAMHAL_LOGDB(" stream, availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1544
1545 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1546 int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
1547 int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
1548 int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
1549 int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
1550 CAMHAL_LOGDB("f=%x, w*h=%dx%d, du=%d\n", format, width, height, isInput);
1551 }
1552
1553}
1554
1555//this is only for debug
1556void EmulatedFakeCamera3::getStreamConfigurationDurations(CameraMetadata *info) {
1557 const int STREAM_CONFIGURATION_SIZE = 4;
1558 const int STREAM_FORMAT_OFFSET = 0;
1559 const int STREAM_WIDTH_OFFSET = 1;
1560 const int STREAM_HEIGHT_OFFSET = 2;
1561 const int STREAM_IS_INPUT_OFFSET = 3;
1562
1563 camera_metadata_ro_entry_t availableStreamConfigs =
1564 staticInfo(info, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
1565 CAMHAL_LOGDB("availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1566
1567 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1568 int64_t format = availableStreamConfigs.data.i64[i + STREAM_FORMAT_OFFSET];
1569 int64_t width = availableStreamConfigs.data.i64[i + STREAM_WIDTH_OFFSET];
1570 int64_t height = availableStreamConfigs.data.i64[i + STREAM_HEIGHT_OFFSET];
1571 int64_t isInput = availableStreamConfigs.data.i64[i + STREAM_IS_INPUT_OFFSET];
1572 CAMHAL_LOGDB("f=%llx, w*h=%lldx%lld, du=%lld\n", format, width, height, isInput);
1573 }
1574}
1575
1576void EmulatedFakeCamera3::updateCameraMetaData(CameraMetadata *info) {
1577
1578}
1579
1580status_t EmulatedFakeCamera3::constructStaticInfo() {
1581
1582 CameraMetadata info;
1583 uint32_t picSizes[64 * 8];
1584 int64_t* duration = NULL;
1585 int count, duration_count, availablejpegsize;
1586 uint8_t maxCount = 10;
1587 char property[PROPERTY_VALUE_MAX];
1588 unsigned int supportrotate;
1589 availablejpegsize = ARRAY_SIZE(mAvailableJpegSize);
1590 memset(mAvailableJpegSize,0,(sizeof(uint32_t))*availablejpegsize);
1591 sp<Sensor> s = new Sensor();
1592 s->startUp(mCameraID);
1593 mSensorType = s->getSensorType();
1594
1595 if ( mSensorType == SENSOR_USB) {
1596 char property[PROPERTY_VALUE_MAX];
1597 property_get("rw.camera.usb.faceback", property, "false");
1598 if (strstr(property, "true"))
1599 mFacingBack = 1;
1600 else
1601 mFacingBack = 0;
1602 ALOGI("Setting usb camera cameraID:%d to back camera:%s\n",
1603 mCameraID, property);
1604 } else {
1605 if (s->mSensorFace == SENSOR_FACE_FRONT) {
1606 mFacingBack = 0;
1607 } else if (s->mSensorFace == SENSOR_FACE_BACK) {
1608 mFacingBack = 1;
1609 } else if (s->mSensorFace == SENSOR_FACE_NONE) {
1610 if (gEmulatedCameraFactory.getEmulatedCameraNum() == 1) {
1611 mFacingBack = 1;
1612 } else if ( mCameraID == 0) {
1613 mFacingBack = 1;
1614 } else {
1615 mFacingBack = 0;
1616 }
1617 }
1618
1619 ALOGI("Setting on board camera cameraID:%d to back camera:%d[0 false, 1 true]\n",
1620 mCameraID, mFacingBack);
1621 }
1622
1623 mSupportCap = s->IoctlStateProbe();
1624 if (mSupportCap & IOCTL_MASK_ROTATE) {
1625 supportrotate = true;
1626 } else {
1627 supportrotate = false;
1628 }
1629 // android.lens
1630
1631 // 5 cm min focus distance for back camera, infinity (fixed focus) for front
1632 // TODO read this ioctl from camera driver
1633 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
1634 const float minFocusDistance = 0.0;
1635 info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1636 &minFocusDistance, 1);
1637
1638 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
1639 const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
1640 info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1641 &minFocusDistance, 1);
1642
1643 static const float focalLength = 3.30f; // mm
1644 info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1645 &focalLength, 1);
1646 static const float aperture = 2.8f;
1647 info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1648 &aperture, 1);
1649 static const float filterDensity = 0;
1650 info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1651 &filterDensity, 1);
1652 static const uint8_t availableOpticalStabilization =
1653 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1654 info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1655 &availableOpticalStabilization, 1);
1656
1657 static const int32_t lensShadingMapSize[] = {1, 1};
1658 info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1659 sizeof(lensShadingMapSize)/sizeof(int32_t));
1660
1661 uint8_t lensFacing = mFacingBack ?
1662 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1663 info.update(ANDROID_LENS_FACING, &lensFacing, 1);
1664
1665 float lensPosition[3];
1666 if (mFacingBack) {
1667 // Back-facing camera is center-top on device
1668 lensPosition[0] = 0;
1669 lensPosition[1] = 20;
1670 lensPosition[2] = -5;
1671 } else {
1672 // Front-facing camera is center-right on device
1673 lensPosition[0] = 20;
1674 lensPosition[1] = 20;
1675 lensPosition[2] = 0;
1676 }
1677 info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
1678 sizeof(float));
1679 static const uint8_t lensCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED;
1680 info.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,&lensCalibration,1);
1681
1682 // android.sensor
1683
1684 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1685 info.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
1686 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1687 info.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
1688 info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1689 Sensor::kExposureTimeRange, 2);
1690
1691 info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1692 &Sensor::kFrameDurationRange[1], 1);
1693
1694 info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
1695 Sensor::kSensitivityRange,
1696 sizeof(Sensor::kSensitivityRange)
1697 /sizeof(int32_t));
1698
1699 info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1700 &Sensor::kColorFilterArrangement, 1);
1701
1702 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1703 info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1704 sensorPhysicalSize, 2);
1705
1706 info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1707 (int32_t*)Sensor::kResolution, 2);
1708
1709 //(int32_t*)Sensor::kResolution, 2);
1710
1711 info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1712 (int32_t*)&Sensor::kMaxRawValue, 1);
1713
1714 static const int32_t blackLevelPattern[4] = {
1715 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
1716 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
1717 };
1718 info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1719 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
1720
1721 static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
1722 info.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
1723 if (mSensorType == SENSOR_USB) {
1724 if (mFacingBack) {
1725 property_get("hw.camera.orientation.back", property, "0");
1726 } else {
1727 property_get("hw.camera.orientation.front", property, "0");
1728 }
1729 int32_t orientation = atoi(property);
1730 property_get("hw.camera.usb.orientation_offset", property, "0");
1731 orientation += atoi(property);
1732 orientation %= 360;
1733 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1734 } else {
1735 if (mFacingBack) {
1736 property_get("hw.camera.orientation.back", property, "270");
1737 const int32_t orientation = atoi(property);
1738 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1739 } else {
1740 property_get("hw.camera.orientation.front", property, "90");
1741 const int32_t orientation = atoi(property);
1742 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1743 }
1744 }
1745
1746 static const int64_t rollingShutterSkew = 0;
1747 info.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
1748
1749 //TODO: sensor color calibration fields
1750
1751 // android.flash
1752 static const uint8_t flashAvailable = 0;
1753 info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1754
1755 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
1756 info.update(ANDROID_FLASH_STATE, &flashstate, 1);
1757
1758 static const int64_t flashChargeDuration = 0;
1759 info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
1760
1761 /** android.noise */
1762 static const uint8_t availableNBModes = ANDROID_NOISE_REDUCTION_MODE_OFF;
1763 info.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, &availableNBModes, 1);
1764
1765 // android.tonemap
1766
1767 static const int32_t tonemapCurvePoints = 128;
1768 info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
1769
1770 // android.scaler
1771
1772 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
1773 info.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
1774
1775 info.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1776 kAvailableFormats,
1777 sizeof(kAvailableFormats)/sizeof(int32_t));
1778
1779 info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1780 (int64_t*)kAvailableRawMinDurations,
1781 sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
1782
1783 //for version 3.2 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
1784 count = sizeof(picSizes)/sizeof(picSizes[0]);
1785 count = s->getStreamConfigurations(picSizes, kAvailableFormats, count);
1786
1787 info.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1788 (int32_t*)picSizes, count);
1789
1790 if (count < availablejpegsize) {
1791 availablejpegsize = count;
1792 }
1793 getValidJpegSize(picSizes,mAvailableJpegSize,availablejpegsize);
1794
1795 maxJpegResolution = getMaxJpegResolution(picSizes,count);
1796 int32_t full_size[4];
1797 if (mFacingBack) {
1798 full_size[0] = 0;
1799 full_size[1] = 0;
1800 full_size[2] = maxJpegResolution.width;
1801 full_size[3] = maxJpegResolution.height;
1802 } else {
1803 full_size[0] = 0;
1804 full_size[1] = 0;
1805 full_size[2] = maxJpegResolution.width;
1806 full_size[3] = maxJpegResolution.height;
1807 }
1808 info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1809 (int32_t*)full_size,
1810 sizeof(full_size)/sizeof(full_size[0]));
1811 duration = new int64_t[count];
1812 if (duration == NULL) {
1813 DBG_LOGA("allocate memory for duration failed");
1814 return NO_MEMORY;
1815 } else {
1816 memset(duration,0,sizeof(int64_t)*count);
1817 }
1818 duration_count = s->getStreamConfigurationDurations(picSizes, duration , count);
1819
1820 info.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1821 duration, duration_count);
1822 info.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1823 duration, duration_count);
1824
1825 info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1826 (int64_t*)kAvailableProcessedMinDurations,
1827 sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
1828
1829 info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
1830 (int64_t*)kAvailableJpegMinDurations,
1831 sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
1832
1833
1834 // android.jpeg
1835
1836 static const int32_t jpegThumbnailSizes[] = {
1837 0, 0,
1838 160, 120,
1839 320, 240
1840 };
1841 info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1842 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
1843
1844 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1845 info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1846
1847 // android.stats
1848
1849 static const uint8_t availableFaceDetectModes[] = {
1850 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1851 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
1852 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
1853 };
1854
1855 info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1856 availableFaceDetectModes,
1857 sizeof(availableFaceDetectModes));
1858
1859 static const int32_t maxFaceCount = 8;
1860 info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1861 &maxFaceCount, 1);
1862
1863 static const int32_t histogramSize = 64;
1864 info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1865 &histogramSize, 1);
1866
1867 static const int32_t maxHistogramCount = 1000;
1868 info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1869 &maxHistogramCount, 1);
1870
1871 static const int32_t sharpnessMapSize[2] = {64, 64};
1872 info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1873 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
1874
1875 static const int32_t maxSharpnessMapValue = 1000;
1876 info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1877 &maxSharpnessMapValue, 1);
1878 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
1879 info.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
1880
1881 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
1882 info.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
1883 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
1884 info.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
1885 // android.control
1886
1887 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
1888 info.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
1889
1890 static const uint8_t availableSceneModes[] = {
1891 // ANDROID_CONTROL_SCENE_MODE_DISABLED,
1892 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY
1893 };
1894 info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1895 availableSceneModes, sizeof(availableSceneModes));
1896
1897 static const uint8_t availableEffects[] = {
1898 ANDROID_CONTROL_EFFECT_MODE_OFF
1899 };
1900 info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1901 availableEffects, sizeof(availableEffects));
1902
1903 static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0};
1904 info.update(ANDROID_CONTROL_MAX_REGIONS,
1905 max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
1906
1907 static const uint8_t availableAeModes[] = {
1908 ANDROID_CONTROL_AE_MODE_OFF,
1909 ANDROID_CONTROL_AE_MODE_ON
1910 };
1911 info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1912 availableAeModes, sizeof(availableAeModes));
1913
1914
1915 static const int32_t availableTargetFpsRanges[] = {
1916 5, 15, 15, 15, 5, 30, 30, 30,
1917 };
1918 info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1919 availableTargetFpsRanges,
1920 sizeof(availableTargetFpsRanges)/sizeof(int32_t));
1921
1922 uint8_t awbModes[maxCount];
1923 count = s->getAWB(awbModes, maxCount);
1924 if (count < 0) {
1925 static const uint8_t availableAwbModes[] = {
1926 ANDROID_CONTROL_AWB_MODE_OFF,
1927 ANDROID_CONTROL_AWB_MODE_AUTO,
1928 ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1929 ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1930 ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1931 ANDROID_CONTROL_AWB_MODE_SHADE
1932 };
1933 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1934 availableAwbModes, sizeof(availableAwbModes));
1935 } else {
1936 DBG_LOGB("getAWB %d ",count);
1937 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1938 awbModes, count);
1939 }
1940
1941 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
1942 info.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
1943
1944 static const uint8_t availableAfModesFront[] = {
1945 ANDROID_CONTROL_AF_MODE_OFF
1946 };
1947
1948 if (mFacingBack) {
1949 uint8_t afMode[maxCount];
1950 count = s->getAutoFocus(afMode, maxCount);
1951 if (count < 0) {
1952 static const uint8_t availableAfModesBack[] = {
1953 ANDROID_CONTROL_AF_MODE_OFF,
1954 //ANDROID_CONTROL_AF_MODE_AUTO,
1955 //ANDROID_CONTROL_AF_MODE_MACRO,
1956 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
1957 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE,
1958 };
1959
1960 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1961 availableAfModesBack, sizeof(availableAfModesBack));
1962 } else {
1963 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1964 afMode, count);
1965 }
1966 } else {
1967 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1968 availableAfModesFront, sizeof(availableAfModesFront));
1969 }
1970
1971 uint8_t antiBanding[maxCount];
1972 count = s->getAntiBanding(antiBanding, maxCount);
1973 if (count < 0) {
1974 static const uint8_t availableAntibanding[] = {
1975 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
1976 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
1977 };
1978 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1979 availableAntibanding, sizeof(availableAntibanding));
1980 } else {
1981 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1982 antiBanding, count);
1983 }
1984
1985 camera_metadata_rational step;
1986 int maxExp, minExp, def, ret;
1987 ret = s->getExposure(&maxExp, &minExp, &def, &step);
1988 if (ret < 0) {
1989 static const int32_t aeExpCompensation = 0;
1990 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
1991
1992 static const camera_metadata_rational exposureCompensationStep = {
1993 1, 3
1994 };
1995 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1996 &exposureCompensationStep, 1);
1997
1998 int32_t exposureCompensationRange[] = {0, 0};
1999 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2000 exposureCompensationRange,
2001 sizeof(exposureCompensationRange)/sizeof(int32_t));
2002 } else {
2003 DBG_LOGB("exposure compensation support:(%d, %d)\n", minExp, maxExp);
2004 int32_t exposureCompensationRange[] = {minExp, maxExp};
2005 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2006 exposureCompensationRange,
2007 sizeof(exposureCompensationRange)/sizeof(int32_t));
2008 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2009 &step, 1);
2010 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &def, 1);
2011 }
2012
2013 ret = s->getZoom(&mZoomMin, &mZoomMax, &mZoomStep);
2014 if (ret < 0) {
2015 float maxZoom = 1.0;
2016 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2017 &maxZoom, 1);
2018 } else {
2019 float maxZoom = mZoomMax / mZoomMin;
2020 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2021 &maxZoom, 1);
2022 }
2023
2024 static const uint8_t availableVstabModes[] = {
2025 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
2026 };
2027 info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2028 availableVstabModes, sizeof(availableVstabModes));
2029
2030 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
2031 info.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
2032 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2033 info.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
2034 // android.info
2035 const uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2036 //mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
2037 // ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2038 info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2039 &supportedHardwareLevel,
2040 /*count*/1);
2041
2042 int32_t android_sync_max_latency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
2043 info.update(ANDROID_SYNC_MAX_LATENCY, &android_sync_max_latency, 1);
2044
2045 uint8_t len[] = {1};
2046 info.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1);
2047
2048 uint8_t maxlen[] = {2};
2049 info.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1);
2050 uint8_t cap[] = {
2051 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
2052 };
2053 info.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2054 (uint8_t *)cap, sizeof(cap)/sizeof(cap[0]));
2055
2056
2057 int32_t partialResultCount = 1;
2058 info.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,&partialResultCount,1);
2059 int32_t maxNumOutputStreams[3] = {0,2,1};
2060 info.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,maxNumOutputStreams,3);
2061 uint8_t aberrationMode[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF};
2062 info.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
2063 aberrationMode, 1);
2064 info.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
2065 aberrationMode, 1);
2066
2067 getAvailableChKeys(&info, supportedHardwareLevel);
2068
2069 if (mCameraInfo != NULL) {
2070 CAMHAL_LOGDA("mCameraInfo is not null, mem leak?");
2071 }
2072 mCameraInfo = info.release();
2073 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
2074
2075 if (duration != NULL) {
2076 delete [] duration;
2077 }
2078
2079 s->shutDown();
2080 s.clear();
2081 mPlugged = true;
2082
2083 return OK;
2084}
2085
2086status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
2087 /**
2088 * Extract top-level 3A controls
2089 */
2090 status_t res;
2091
2092 bool facePriority = false;
2093
2094 camera_metadata_entry e;
2095
2096 e = settings.find(ANDROID_CONTROL_MODE);
2097 if (e.count == 0) {
2098 ALOGE("%s: No control mode entry!", __FUNCTION__);
2099 return BAD_VALUE;
2100 }
2101 uint8_t controlMode = e.data.u8[0];
2102
2103 e = settings.find(ANDROID_CONTROL_SCENE_MODE);
2104 if (e.count == 0) {
2105 ALOGE("%s: No scene mode entry!", __FUNCTION__);
2106 return BAD_VALUE;
2107 }
2108 uint8_t sceneMode = e.data.u8[0];
2109
2110 if (controlMode == ANDROID_CONTROL_MODE_OFF) {
2111 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2112 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2113 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2114 update3A(settings);
2115 return OK;
2116 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2117 switch(sceneMode) {
2118 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
2119 mFacePriority = true;
2120 break;
2121 default:
2122 ALOGE("%s: Emulator doesn't support scene mode %d",
2123 __FUNCTION__, sceneMode);
2124 return BAD_VALUE;
2125 }
2126 } else {
2127 mFacePriority = false;
2128 }
2129
2130 // controlMode == AUTO or sceneMode = FACE_PRIORITY
2131 // Process individual 3A controls
2132
2133 res = doFakeAE(settings);
2134 if (res != OK) return res;
2135
2136 res = doFakeAF(settings);
2137 if (res != OK) return res;
2138
2139 res = doFakeAWB(settings);
2140 if (res != OK) return res;
2141
2142 update3A(settings);
2143 return OK;
2144}
2145
2146status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
2147 camera_metadata_entry e;
2148
2149 e = settings.find(ANDROID_CONTROL_AE_MODE);
2150 if (e.count == 0) {
2151 ALOGE("%s: No AE mode entry!", __FUNCTION__);
2152 return BAD_VALUE;
2153 }
2154 uint8_t aeMode = e.data.u8[0];
2155
2156 switch (aeMode) {
2157 case ANDROID_CONTROL_AE_MODE_OFF:
2158 // AE is OFF
2159 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2160 return OK;
2161 case ANDROID_CONTROL_AE_MODE_ON:
2162 // OK for AUTO modes
2163 break;
2164 default:
2165 ALOGE("%s: Emulator doesn't support AE mode %d",
2166 __FUNCTION__, aeMode);
2167 return BAD_VALUE;
2168 }
2169
2170 e = settings.find(ANDROID_CONTROL_AE_LOCK);
2171 if (e.count == 0) {
2172 ALOGE("%s: No AE lock entry!", __FUNCTION__);
2173 return BAD_VALUE;
2174 }
2175 bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON);
2176
2177 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
2178 bool precaptureTrigger = false;
2179 if (e.count != 0) {
2180 precaptureTrigger =
2181 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
2182 }
2183
2184 if (precaptureTrigger) {
2185 ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
2186 } else if (e.count > 0) {
2187 ALOGV("%s: Pre capture trigger was present? %zu",
2188 __FUNCTION__,
2189 e.count);
2190 }
2191
2192 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2193 // Run precapture sequence
2194 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2195 mAeCounter = 0;
2196 }
2197
2198 if (mFacePriority) {
2199 mAeTargetExposureTime = kFacePriorityExposureTime;
2200 } else {
2201 mAeTargetExposureTime = kNormalExposureTime;
2202 }
2203
2204 if (mAeCounter > kPrecaptureMinFrames &&
2205 (mAeTargetExposureTime - mAeCurrentExposureTime) <
2206 mAeTargetExposureTime / 10) {
2207 // Done with precapture
2208 mAeCounter = 0;
2209 mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
2210 ANDROID_CONTROL_AE_STATE_CONVERGED;
2211 } else {
2212 // Converge some more
2213 mAeCurrentExposureTime +=
2214 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2215 kExposureTrackRate;
2216 mAeCounter++;
2217 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
2218 }
2219
2220 } else if (!aeLocked) {
2221 // Run standard occasional AE scan
2222 switch (mAeState) {
2223 case ANDROID_CONTROL_AE_STATE_CONVERGED:
2224 case ANDROID_CONTROL_AE_STATE_INACTIVE:
2225 mAeCounter++;
2226 if (mAeCounter > kStableAeMaxFrames) {
2227 mAeTargetExposureTime =
2228 mFacePriority ? kFacePriorityExposureTime :
2229 kNormalExposureTime;
2230 float exposureStep = ((double)rand() / RAND_MAX) *
2231 (kExposureWanderMax - kExposureWanderMin) +
2232 kExposureWanderMin;
2233 mAeTargetExposureTime *= std::pow(2, exposureStep);
2234 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2235 }
2236 break;
2237 case ANDROID_CONTROL_AE_STATE_SEARCHING:
2238 mAeCurrentExposureTime +=
2239 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2240 kExposureTrackRate;
2241 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
2242 mAeTargetExposureTime / 10) {
2243 // Close enough
2244 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2245 mAeCounter = 0;
2246 }
2247 break;
2248 case ANDROID_CONTROL_AE_STATE_LOCKED:
2249 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2250 mAeCounter = 0;
2251 break;
2252 default:
2253 ALOGE("%s: Emulator in unexpected AE state %d",
2254 __FUNCTION__, mAeState);
2255 return INVALID_OPERATION;
2256 }
2257 } else {
2258 // AE is locked
2259 mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
2260 }
2261
2262 return OK;
2263}
2264
2265status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
2266 camera_metadata_entry e;
2267
2268 e = settings.find(ANDROID_CONTROL_AF_MODE);
2269 if (e.count == 0) {
2270 ALOGE("%s: No AF mode entry!", __FUNCTION__);
2271 return BAD_VALUE;
2272 }
2273 uint8_t afMode = e.data.u8[0];
2274
2275 e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
2276 typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
2277 af_trigger_t afTrigger;
2278 // If we have an afTrigger, afTriggerId should be set too
2279 if (e.count != 0) {
2280 afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
2281
2282 e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID);
2283
2284 if (e.count == 0) {
2285 ALOGE("%s: When android.control.afTrigger is set "
2286 " in the request, afTriggerId needs to be set as well",
2287 __FUNCTION__);
2288 return BAD_VALUE;
2289 }
2290
2291 mAfTriggerId = e.data.i32[0];
2292
2293 ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
2294 ALOGV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId);
2295 ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
2296 } else {
2297 afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
2298 }
2299 if (!mFacingBack) {
2300 afMode = ANDROID_CONTROL_AF_MODE_OFF;
2301 }
2302
2303 switch (afMode) {
2304 case ANDROID_CONTROL_AF_MODE_OFF:
2305 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2306 return OK;
2307 case ANDROID_CONTROL_AF_MODE_AUTO:
2308 case ANDROID_CONTROL_AF_MODE_MACRO:
2309 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2310 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2311 if (!mFacingBack) {
2312 ALOGE("%s: Front camera doesn't support AF mode %d",
2313 __FUNCTION__, afMode);
2314 return BAD_VALUE;
2315 }
2316 mSensor->setAutoFocuas(afMode);
2317 // OK, handle transitions lower on
2318 break;
2319 default:
2320 ALOGE("%s: Emulator doesn't support AF mode %d",
2321 __FUNCTION__, afMode);
2322 return BAD_VALUE;
2323 }
2324#if 0
2325 e = settings.find(ANDROID_CONTROL_AF_REGIONS);
2326 if (e.count == 0) {
2327 ALOGE("%s:Get ANDROID_CONTROL_AF_REGIONS failed\n", __FUNCTION__);
2328 return BAD_VALUE;
2329 }
2330 int32_t x0 = e.data.i32[0];
2331 int32_t y0 = e.data.i32[1];
2332 int32_t x1 = e.data.i32[2];
2333 int32_t y1 = e.data.i32[3];
2334 mSensor->setFocuasArea(x0, y0, x1, y1);
2335 DBG_LOGB(" x0:%d, y0:%d,x1:%d,y1:%d,\n", x0, y0, x1, y1);
2336#endif
2337
2338
2339 bool afModeChanged = mAfMode != afMode;
2340 mAfMode = afMode;
2341
2342 /**
2343 * Simulate AF triggers. Transition at most 1 state per frame.
2344 * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
2345 */
2346
2347 bool afTriggerStart = false;
2348 bool afTriggerCancel = false;
2349 switch (afTrigger) {
2350 case ANDROID_CONTROL_AF_TRIGGER_IDLE:
2351 break;
2352 case ANDROID_CONTROL_AF_TRIGGER_START:
2353 afTriggerStart = true;
2354 break;
2355 case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
2356 afTriggerCancel = true;
2357 // Cancel trigger always transitions into INACTIVE
2358 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2359
2360 ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
2361
2362 // Stay in 'inactive' until at least next frame
2363 return OK;
2364 default:
2365 ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
2366 return BAD_VALUE;
2367 }
2368
2369 // If we get down here, we're either in an autofocus mode
2370 // or in a continuous focus mode (and no other modes)
2371
2372 int oldAfState = mAfState;
2373 switch (mAfState) {
2374 case ANDROID_CONTROL_AF_STATE_INACTIVE:
2375 if (afTriggerStart) {
2376 switch (afMode) {
2377 case ANDROID_CONTROL_AF_MODE_AUTO:
2378 // fall-through
2379 case ANDROID_CONTROL_AF_MODE_MACRO:
2380 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2381 break;
2382 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2383 // fall-through
2384 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2385 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2386 break;
2387 }
2388 } else {
2389 // At least one frame stays in INACTIVE
2390 if (!afModeChanged) {
2391 switch (afMode) {
2392 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2393 // fall-through
2394 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2395 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
2396 break;
2397 }
2398 }
2399 }
2400 break;
2401 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2402 /**
2403 * When the AF trigger is activated, the algorithm should finish
2404 * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
2405 * or AF_NOT_FOCUSED as appropriate
2406 */
2407 if (afTriggerStart) {
2408 // Randomly transition to focused or not focused
2409 if (rand() % 3) {
2410 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2411 } else {
2412 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2413 }
2414 }
2415 /**
2416 * When the AF trigger is not involved, the AF algorithm should
2417 * start in INACTIVE state, and then transition into PASSIVE_SCAN
2418 * and PASSIVE_FOCUSED states
2419 */
2420 else if (!afTriggerCancel) {
2421 // Randomly transition to passive focus
2422 if (rand() % 3 == 0) {
2423 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
2424 }
2425 }
2426
2427 break;
2428 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2429 if (afTriggerStart) {
2430 // Randomly transition to focused or not focused
2431 if (rand() % 3) {
2432 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2433 } else {
2434 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2435 }
2436 }
2437 // TODO: initiate passive scan (PASSIVE_SCAN)
2438 break;
2439 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2440 // Simulate AF sweep completing instantaneously
2441
2442 // Randomly transition to focused or not focused
2443 if (rand() % 3) {
2444 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2445 } else {
2446 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2447 }
2448 break;
2449 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2450 if (afTriggerStart) {
2451 switch (afMode) {
2452 case ANDROID_CONTROL_AF_MODE_AUTO:
2453 // fall-through
2454 case ANDROID_CONTROL_AF_MODE_MACRO:
2455 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2456 break;
2457 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2458 // fall-through
2459 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2460 // continuous autofocus => trigger start has no effect
2461 break;
2462 }
2463 }
2464 break;
2465 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2466 if (afTriggerStart) {
2467 switch (afMode) {
2468 case ANDROID_CONTROL_AF_MODE_AUTO:
2469 // fall-through
2470 case ANDROID_CONTROL_AF_MODE_MACRO:
2471 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2472 break;
2473 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2474 // fall-through
2475 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2476 // continuous autofocus => trigger start has no effect
2477 break;
2478 }
2479 }
2480 break;
2481 default:
2482 ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
2483 }
2484
2485 {
2486 char afStateString[100] = {0,};
2487 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2488 oldAfState,
2489 afStateString,
2490 sizeof(afStateString));
2491
2492 char afNewStateString[100] = {0,};
2493 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2494 mAfState,
2495 afNewStateString,
2496 sizeof(afNewStateString));
2497 ALOGVV("%s: AF state transitioned from %s to %s",
2498 __FUNCTION__, afStateString, afNewStateString);
2499 }
2500
2501
2502 return OK;
2503}
2504
2505status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
2506 camera_metadata_entry e;
2507
2508 e = settings.find(ANDROID_CONTROL_AWB_MODE);
2509 if (e.count == 0) {
2510 ALOGE("%s: No AWB mode entry!", __FUNCTION__);
2511 return BAD_VALUE;
2512 }
2513 uint8_t awbMode = e.data.u8[0];
2514 //DBG_LOGB(" awbMode%d\n", awbMode);
2515
2516 // TODO: Add white balance simulation
2517
2518 switch (awbMode) {
2519 case ANDROID_CONTROL_AWB_MODE_OFF:
2520 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2521 return OK;
2522 case ANDROID_CONTROL_AWB_MODE_AUTO:
2523 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
2524 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
2525 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
2526 case ANDROID_CONTROL_AWB_MODE_SHADE:
2527 mAwbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; //add for cts
2528 return mSensor->setAWB(awbMode);
2529 // OK
2530 break;
2531 default:
2532 ALOGE("%s: Emulator doesn't support AWB mode %d",
2533 __FUNCTION__, awbMode);
2534 return BAD_VALUE;
2535 }
2536
2537 return OK;
2538}
2539
2540
2541void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
2542 if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) {
2543 settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
2544 &mAeCurrentExposureTime, 1);
2545 settings.update(ANDROID_SENSOR_SENSITIVITY,
2546 &mAeCurrentSensitivity, 1);
2547 }
2548
2549 settings.update(ANDROID_CONTROL_AE_STATE,
2550 &mAeState, 1);
2551 settings.update(ANDROID_CONTROL_AF_STATE,
2552 &mAfState, 1);
2553 settings.update(ANDROID_CONTROL_AWB_STATE,
2554 &mAwbState, 1);
2555 /**
2556 * TODO: Trigger IDs need a think-through
2557 */
2558 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,
2559 &mAfTriggerId, 1);
2560}
2561
2562void EmulatedFakeCamera3::signalReadoutIdle() {
2563 Mutex::Autolock l(mLock);
2564 // Need to chek isIdle again because waiting on mLock may have allowed
2565 // something to be placed in the in-flight queue.
2566 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
2567 ALOGV("Now idle");
2568 mStatus = STATUS_READY;
2569 }
2570}
2571
2572void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
2573 nsecs_t timestamp) {
2574 switch(e) {
2575 case Sensor::SensorListener::EXPOSURE_START: {
2576 ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
2577 __FUNCTION__, frameNumber, timestamp);
2578 // Trigger shutter notify to framework
2579 camera3_notify_msg_t msg;
2580 msg.type = CAMERA3_MSG_SHUTTER;
2581 msg.message.shutter.frame_number = frameNumber;
2582 msg.message.shutter.timestamp = timestamp;
2583 sendNotify(&msg);
2584 break;
2585 }
2586 default:
2587 ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
2588 e, timestamp);
2589 break;
2590 }
2591}
2592
2593EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) :
2594 mParent(parent), mJpegWaiting(false) {
2595}
2596
2597EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
2598 for (List<Request>::iterator i = mInFlightQueue.begin();
2599 i != mInFlightQueue.end(); i++) {
2600 delete i->buffers;
2601 delete i->sensorBuffers;
2602 }
2603}
2604
2605void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
2606 Mutex::Autolock l(mLock);
2607
2608 mInFlightQueue.push_back(r);
2609 mInFlightSignal.signal();
2610}
2611
2612bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
2613 Mutex::Autolock l(mLock);
2614 return mInFlightQueue.empty() && !mThreadActive;
2615}
2616
2617status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
2618 status_t res;
2619 Mutex::Autolock l(mLock);
2620 int loopCount = 0;
2621 while (mInFlightQueue.size() >= kMaxQueueSize) {
2622 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2623 if (res != OK && res != TIMED_OUT) {
2624 ALOGE("%s: Error waiting for in-flight queue to shrink",
2625 __FUNCTION__);
2626 return INVALID_OPERATION;
2627 }
2628 if (loopCount == kMaxWaitLoops) {
2629 ALOGE("%s: Timed out waiting for in-flight queue to shrink",
2630 __FUNCTION__);
2631 return TIMED_OUT;
2632 }
2633 loopCount++;
2634 }
2635 return OK;
2636}
2637
2638status_t EmulatedFakeCamera3::ReadoutThread::setJpegCompressorListener(EmulatedFakeCamera3 *parent) {
2639 status_t res;
2640 res = mParent->mJpegCompressor->setlistener(this);
2641 if (res != NO_ERROR) {
2642 ALOGE("%s: set JpegCompressor Listner failed",__FUNCTION__);
2643 }
2644 return res;
2645}
2646
2647status_t EmulatedFakeCamera3::ReadoutThread::startJpegCompressor(EmulatedFakeCamera3 *parent) {
2648 status_t res;
2649 res = mParent->mJpegCompressor->start();
2650 if (res != NO_ERROR) {
2651 ALOGE("%s: JpegCompressor start failed",__FUNCTION__);
2652 }
2653 return res;
2654}
2655
2656status_t EmulatedFakeCamera3::ReadoutThread::shutdownJpegCompressor(EmulatedFakeCamera3 *parent) {
2657 status_t res;
2658 res = mParent->mJpegCompressor->cancel();
2659 if (res != OK) {
2660 ALOGE("%s: JpegCompressor cancel failed",__FUNCTION__);
2661 }
2662 return res;
2663}
2664
2665bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
2666 status_t res;
2667 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
2668
2669 // First wait for a request from the in-flight queue
2670
2671 if (mCurrentRequest.settings.isEmpty()) {
2672 Mutex::Autolock l(mLock);
2673 if (mInFlightQueue.empty()) {
2674 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2675 if (res == TIMED_OUT) {
2676 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
2677 __FUNCTION__);
2678 return true;
2679 } else if (res != NO_ERROR) {
2680 ALOGE("%s: Error waiting for capture requests: %d",
2681 __FUNCTION__, res);
2682 return false;
2683 }
2684 }
2685 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
2686 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
2687 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
2688 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
2689 mCurrentRequest.havethumbnail = mInFlightQueue.begin()->havethumbnail;
2690 mInFlightQueue.erase(mInFlightQueue.begin());
2691 mInFlightSignal.signal();
2692 mThreadActive = true;
2693 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
2694 mCurrentRequest.frameNumber);
2695 }
2696
2697 // Then wait for it to be delivered from the sensor
2698 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
2699 __FUNCTION__);
2700
2701 nsecs_t captureTime;
2702 bool gotFrame =
2703 mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
2704 if (!gotFrame) {
2705 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
2706 __FUNCTION__);
2707 return true;
2708 }
2709
2710 ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
2711 mCurrentRequest.frameNumber, captureTime);
2712
2713 // Check if we need to JPEG encode a buffer, and send it for async
2714 // compression if so. Otherwise prepare the buffer for return.
2715 bool needJpeg = false;
2716 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
2717 while (buf != mCurrentRequest.buffers->end()) {
2718 bool goodBuffer = true;
2719 if ( buf->stream->format ==
2720 HAL_PIXEL_FORMAT_BLOB) {
2721 Mutex::Autolock jl(mJpegLock);
2722 needJpeg = true;
2723 CaptureRequest currentcapture;
2724 currentcapture.frameNumber = mCurrentRequest.frameNumber;
2725 currentcapture.sensorBuffers = mCurrentRequest.sensorBuffers;
2726 currentcapture.buf = buf;
2727 currentcapture.mNeedThumbnail = mCurrentRequest.havethumbnail;
2728 mParent->mJpegCompressor->queueRequest(currentcapture);
2729 //this sensorBuffers delete in the jpegcompress;
2730 mCurrentRequest.sensorBuffers = NULL;
2731 buf = mCurrentRequest.buffers->erase(buf);
2732 continue;
2733 }
2734 GraphicBufferMapper::get().unlock(*(buf->buffer));
2735
2736 buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
2737 CAMERA3_BUFFER_STATUS_ERROR;
2738 buf->acquire_fence = -1;
2739 buf->release_fence = -1;
2740
2741 ++buf;
2742 } // end while
2743
2744 // Construct result for all completed buffers and results
2745
2746 camera3_capture_result result;
2747
2748 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
2749 &captureTime, 1);
2750
2751 memset(&result, 0, sizeof(result));
2752 result.frame_number = mCurrentRequest.frameNumber;
2753 result.result = mCurrentRequest.settings.getAndLock();
2754 result.num_output_buffers = mCurrentRequest.buffers->size();
2755 result.output_buffers = mCurrentRequest.buffers->array();
2756 result.partial_result = 1;
2757
2758 // Go idle if queue is empty, before sending result
2759 bool signalIdle = false;
2760 {
2761 Mutex::Autolock l(mLock);
2762 if (mInFlightQueue.empty()) {
2763 mThreadActive = false;
2764 signalIdle = true;
2765 }
2766 }
2767 if (signalIdle) mParent->signalReadoutIdle();
2768
2769 // Send it off to the framework
2770 ALOGVV("%s: ReadoutThread: Send result to framework",
2771 __FUNCTION__);
2772 mParent->sendCaptureResult(&result);
2773
2774 // Clean up
2775 mCurrentRequest.settings.unlock(result.result);
2776
2777 delete mCurrentRequest.buffers;
2778 mCurrentRequest.buffers = NULL;
2779 if (!needJpeg) {
2780 delete mCurrentRequest.sensorBuffers;
2781 mCurrentRequest.sensorBuffers = NULL;
2782 }
2783 mCurrentRequest.settings.clear();
2784
2785 return true;
2786}
2787
2788void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
2789 const StreamBuffer &jpegBuffer, bool success , CaptureRequest &r) {
2790 Mutex::Autolock jl(mJpegLock);
2791 GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer));
2792
2793 mJpegHalBuffer = *(r.buf);
2794 mJpegHalBuffer.status = success ?
2795 CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2796 mJpegHalBuffer.acquire_fence = -1;
2797 mJpegHalBuffer.release_fence = -1;
2798 mJpegWaiting = false;
2799
2800 camera3_capture_result result;
2801 result.frame_number = r.frameNumber;
2802 result.result = NULL;
2803 result.num_output_buffers = 1;
2804 result.output_buffers = &mJpegHalBuffer;
2805 result.partial_result = 1;
2806
2807 if (!success) {
2808 ALOGE("%s: Compression failure, returning error state buffer to"
2809 " framework", __FUNCTION__);
2810 } else {
2811 DBG_LOGB("%s: Compression complete, returning buffer to framework",
2812 __FUNCTION__);
2813 }
2814
2815 mParent->sendCaptureResult(&result);
2816
2817}
2818
2819void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
2820 const StreamBuffer &inputBuffer) {
2821 // Should never get here, since the input buffer has to be returned
2822 // by end of processCaptureRequest
2823 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2824}
2825
2826
2827}; // namespace android
2828