summaryrefslogtreecommitdiff
path: root/v3/EmulatedFakeCamera3.cpp (plain)
blob: 6e2b39f86631186f86062227ffe3c58e24e28f48
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/*
18 * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22#include <inttypes.h>
23
24#define LOG_NDEBUG 0
25//#define LOG_NNDEBUG 0
26#define LOG_TAG "EmulatedCamera_FakeCamera3"
27#include <utils/Log.h>
28
29#include "EmulatedFakeCamera3.h"
30#include "EmulatedCameraFactory.h"
31#include <ui/Fence.h>
32#include <ui/Rect.h>
33#include <ui/GraphicBufferMapper.h>
34#include <sys/types.h>
35
36#include <cutils/properties.h>
37#include "fake-pipeline2/Sensor.h"
38#include "fake-pipeline2/JpegCompressor.h"
39#include <cmath>
40#include <gralloc_priv.h>
41#include <binder/IPCThreadState.h>
42
43#if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
44#define ALOGVV ALOGV
45#else
46#define ALOGVV(...) ((void)0)
47#endif
48
49namespace android {
50
51/**
52 * Constants for camera capabilities
53 */
54
55const int64_t USEC = 1000LL;
56const int64_t MSEC = USEC * 1000LL;
57const int64_t SEC = MSEC * 1000LL;
58
59
60const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
61 //HAL_PIXEL_FORMAT_RAW_SENSOR,
62 HAL_PIXEL_FORMAT_BLOB,
63 //HAL_PIXEL_FORMAT_RGBA_8888,
64 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
65 // These are handled by YCbCr_420_888
66 HAL_PIXEL_FORMAT_YV12,
67 HAL_PIXEL_FORMAT_YCrCb_420_SP,
68 //HAL_PIXEL_FORMAT_YCbCr_422_I,
69 HAL_PIXEL_FORMAT_YCbCr_420_888
70};
71
72const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = {
73 640, 480
74 // Sensor::kResolution[0], Sensor::kResolution[1]
75};
76
77const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = {
78 (const uint64_t)Sensor::kFrameDurationRange[0]
79};
80
81const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[6] = {
82 640, 480, 320, 240,// 1280, 720
83 // Sensor::kResolution[0], Sensor::kResolution[1]
84};
85
86const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = {
87 640, 480, 320, 240
88 // Sensor::kResolution[0], Sensor::kResolution[1]
89};
90
91const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = {
92 (const uint64_t)Sensor::kFrameDurationRange[0]
93};
94
95const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = {
96 1280,720
97 // Sensor::kResolution[0], Sensor::kResolution[1]
98};
99
100const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = {
101 640, 480
102 // Sensor::kResolution[0], Sensor::kResolution[1]
103};
104
105
106const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = {
107 (const uint64_t)Sensor::kFrameDurationRange[0]
108};
109
110/**
111 * 3A constants
112 */
113
114// Default exposure and gain targets for different scenarios
115const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC;
116const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
117const int EmulatedFakeCamera3::kNormalSensitivity = 100;
118const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400;
119const float EmulatedFakeCamera3::kExposureTrackRate = 0.1;
120const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10;
121const int EmulatedFakeCamera3::kStableAeMaxFrames = 100;
122const float EmulatedFakeCamera3::kExposureWanderMin = -2;
123const float EmulatedFakeCamera3::kExposureWanderMax = 1;
124
125/**
126 * Camera device lifecycle methods
127 */
128static const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(camera3_jpeg_blob);
129jpegsize EmulatedFakeCamera3::getMaxJpegResolution(uint32_t picSizes[],int count) {
130 uint32_t maxJpegWidth = 0, maxJpegHeight = 0;
131 jpegsize maxJpegResolution;
132 for (int i=0; i < count; i+= 4) {
133 uint32_t width = picSizes[i+1];
134 uint32_t height = picSizes[i+2];
135 if (picSizes[i+0] == HAL_PIXEL_FORMAT_BLOB &&
136 (width * height > maxJpegWidth * maxJpegHeight)) {
137 maxJpegWidth = width;
138 maxJpegHeight = height;
139 }
140 }
141 maxJpegResolution.width = maxJpegWidth;
142 maxJpegResolution.height = maxJpegHeight;
143 return maxJpegResolution;
144}
145ssize_t EmulatedFakeCamera3::getJpegBufferSize(int width, int height) {
146 if (maxJpegResolution.width == 0) {
147 return BAD_VALUE;
148 }
149 ssize_t maxJpegBufferSize = JpegCompressor::kMaxJpegSize;
150
151#if PLATFORM_SDK_VERSION <= 22
152 // Calculate final jpeg buffer size for the given resolution.
153 float scaleFactor = ((float) (width * height)) /
154 (maxJpegResolution.width * maxJpegResolution.height);
155 ssize_t jpegBufferSize = scaleFactor * maxJpegBufferSize;
156 // Bound the buffer size to [MIN_JPEG_BUFFER_SIZE, maxJpegBufferSize].
157 if (jpegBufferSize > maxJpegBufferSize) {
158 jpegBufferSize = maxJpegBufferSize;
159 } else if (jpegBufferSize < kMinJpegBufferSize) {
160 jpegBufferSize = kMinJpegBufferSize;
161 }
162#else
163 assert(kMinJpegBufferSize < maxJpegBufferSize);
164 // Calculate final jpeg buffer size for the given resolution.
165 float scaleFactor = ((float) (width * height)) /
166 (maxJpegResolution.width * maxJpegResolution.height);
167 ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) +
168 kMinJpegBufferSize;
169 if (jpegBufferSize > maxJpegBufferSize)
170 jpegBufferSize = maxJpegBufferSize;
171#endif
172
173 return jpegBufferSize;
174}
175
176EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, struct hw_module_t* module) :
177 EmulatedCamera3(cameraId, module) {
178 ALOGI("Constructing emulated fake camera 3 cameraID:%d", mCameraID);
179
180 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
181 mDefaultTemplates[i] = NULL;
182 }
183
184 /**
185 * Front cameras = limited mode
186 * Back cameras = full mode
187 */
188 //TODO limited or full mode, read this from camera driver
189 //mFullMode = facingBack;
190 mCameraStatus = CAMERA_INIT;
191 mSupportCap = 0;
192 mSupportRotate = 0;
193 mFullMode = 0;
194 mFlushTag = false;
195 mPlugged = false;
196
197}
198
199EmulatedFakeCamera3::~EmulatedFakeCamera3() {
200 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
201 if (mDefaultTemplates[i] != NULL) {
202 free_camera_metadata(mDefaultTemplates[i]);
203 }
204 }
205
206 if (mCameraInfo != NULL) {
207 CAMHAL_LOGIA("free mCameraInfo");
208 free_camera_metadata(mCameraInfo);
209 mCameraInfo = NULL;
210 }
211}
212
213status_t EmulatedFakeCamera3::Initialize() {
214 DBG_LOGB("mCameraID=%d,mStatus=%d,ddd\n", mCameraID, mStatus);
215 status_t res;
216
217#ifdef HAVE_VERSION_INFO
218 CAMHAL_LOGIB("\n--------------------------------\n"
219 "author:aml.sh multi-media team\n"
220 "branch name: %s\n"
221 "git version: %s \n"
222 "last changed: %s\n"
223 "build-time: %s\n"
224 "build-name: %s\n"
225 "uncommitted-file-num:%d\n"
226 "ssh user@%s, cd %s\n"
227 "hostname %s\n"
228 "--------------------------------\n",
229 CAMHAL_BRANCH_NAME,
230 CAMHAL_GIT_VERSION,
231 CAMHAL_LAST_CHANGED,
232 CAMHAL_BUILD_TIME,
233 CAMHAL_BUILD_NAME,
234 CAMHAL_GIT_UNCOMMIT_FILE_NUM,
235 CAMHAL_IP, CAMHAL_PATH, CAMHAL_HOSTNAME
236 );
237#endif
238
239
240 if (mStatus != STATUS_ERROR) {
241 ALOGE("%s: Already initialized!", __FUNCTION__);
242 return INVALID_OPERATION;
243 }
244
245 res = constructStaticInfo();
246 if (res != OK) {
247 ALOGE("%s: Unable to allocate static info: %s (%d)",
248 __FUNCTION__, strerror(-res), res);
249 return res;
250 }
251
252 return EmulatedCamera3::Initialize();
253}
254
255status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) {
256 ALOGV("%s: E", __FUNCTION__);
257 DBG_LOGB("%s, ddd", __FUNCTION__);
258 Mutex::Autolock l(mLock);
259 status_t res;
260 DBG_LOGB("%s , mStatus = %d" , __FUNCTION__, mStatus);
261
262 if ((mStatus != STATUS_CLOSED) || !mPlugged) {
263 ALOGE("%s: Can't connect in state %d, mPlugged=%d",
264 __FUNCTION__, mStatus, mPlugged);
265 return INVALID_OPERATION;
266 }
267
268 mSensor = new Sensor();
269 mSensor->setSensorListener(this);
270
271 res = mSensor->startUp(mCameraID);
272 DBG_LOGB("mSensor startUp, mCameraID=%d\n", mCameraID);
273 if (res != NO_ERROR) return res;
274
275 mSupportCap = mSensor->IoctlStateProbe();
276 if (mSupportCap & IOCTL_MASK_ROTATE) {
277 mSupportRotate = true;
278 }
279
280 mReadoutThread = new ReadoutThread(this);
281 mJpegCompressor = new JpegCompressor();
282
283 res = mReadoutThread->setJpegCompressorListener(this);
284 if (res != NO_ERROR) {
285 return res;
286 }
287 res = mReadoutThread->startJpegCompressor(this);
288 if (res != NO_ERROR) {
289 return res;
290 }
291
292 res = mReadoutThread->run("EmuCam3::readoutThread");
293 if (res != NO_ERROR) return res;
294
295 // Initialize fake 3A
296
297 mControlMode = ANDROID_CONTROL_MODE_AUTO;
298 mFacePriority = false;
299 mAeMode = ANDROID_CONTROL_AE_MODE_ON;
300 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
301 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
302 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;//ANDROID_CONTROL_AE_STATE_INACTIVE;
303 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
304 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
305 mAfTriggerId = 0;
306 mAeCurrentExposureTime = kNormalExposureTime;
307 mAeCurrentSensitivity = kNormalSensitivity;
308
309 return EmulatedCamera3::connectCamera(device);
310}
311
312status_t EmulatedFakeCamera3::plugCamera() {
313 {
314 Mutex::Autolock l(mLock);
315
316 if (!mPlugged) {
317 CAMHAL_LOGIB("%s: Plugged back in", __FUNCTION__);
318 mPlugged = true;
319 }
320 }
321
322 return NO_ERROR;
323}
324
325status_t EmulatedFakeCamera3::unplugCamera() {
326 {
327 Mutex::Autolock l(mLock);
328
329 if (mPlugged) {
330 CAMHAL_LOGIB("%s: Unplugged camera", __FUNCTION__);
331 mPlugged = false;
332 }
333 }
334 return true;
335}
336
337camera_device_status_t EmulatedFakeCamera3::getHotplugStatus() {
338 Mutex::Autolock l(mLock);
339 return mPlugged ?
340 CAMERA_DEVICE_STATUS_PRESENT :
341 CAMERA_DEVICE_STATUS_NOT_PRESENT;
342}
343
344bool EmulatedFakeCamera3::getCameraStatus()
345{
346 CAMHAL_LOGVB("%s, mCameraStatus = %d",__FUNCTION__,mCameraStatus);
347 bool ret = false;
348 if (mStatus == STATUS_CLOSED) {
349 ret = true;
350 } else {
351 ret = false;
352 }
353 return ret;
354}
355
356status_t EmulatedFakeCamera3::closeCamera() {
357 DBG_LOGB("%s, %d\n", __FUNCTION__, __LINE__);
358 status_t res;
359 {
360 Mutex::Autolock l(mLock);
361 if (mStatus == STATUS_CLOSED) return OK;
362 }
363
364 CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__);
365 mReadoutThread->sendFlushSingnal();
366 mSensor->sendExitSingalToSensor();
367 res = mSensor->shutDown();
368 if (res != NO_ERROR) {
369 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
370 return res;
371 }
372 mSensor.clear();
373 CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__);
374
375 {
376 Mutex::Autolock l(mLock);
377 res = mReadoutThread->shutdownJpegCompressor(this);
378 if (res != OK) {
379 ALOGE("%s: Unable to shut down JpegCompressor: %d", __FUNCTION__, res);
380 return res;
381 }
382 mReadoutThread->sendExitReadoutThreadSignal();
383 mReadoutThread->requestExit();
384 }
385 CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__);
386
387 mReadoutThread->join();
388 DBG_LOGA("Sucess exit ReadOutThread");
389 {
390 Mutex::Autolock l(mLock);
391 // Clear out private stream information
392 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
393 PrivateStreamInfo *privStream =
394 static_cast<PrivateStreamInfo*>((*s)->priv);
395 delete privStream;
396 (*s)->priv = NULL;
397 }
398 mStreams.clear();
399 mReadoutThread.clear();
400 }
401 CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__);
402 return EmulatedCamera3::closeCamera();
403}
404
405status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
406 char property[PROPERTY_VALUE_MAX];
407 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
408 if (mSensorType == SENSOR_USB) {
409 if (mFacingBack) {
410 property_get("hw.camera.orientation.back", property, "0");
411 } else {
412 property_get("hw.camera.orientation.front", property, "0");
413 }
414 int32_t orientation = atoi(property);
415 property_get("hw.camera.usb.orientation_offset", property, "0");
416 orientation += atoi(property);
417 orientation %= 360;
418 info->orientation = orientation ;
419 } else {
420 if (mFacingBack) {
421 property_get("hw.camera.orientation.back", property, "270");
422 } else {
423 property_get("hw.camera.orientation.front", property, "90");
424 }
425 info->orientation = atoi(property);
426 }
427 return EmulatedCamera3::getCameraInfo(info);
428}
429
430/**
431 * Camera3 interface methods
432 */
433
434void EmulatedFakeCamera3::getValidJpegSize(uint32_t picSizes[], uint32_t availablejpegsize[], int count) {
435 int i,j,k;
436 bool valid = true;
437 for (i=0,j=0; i < count; i+= 4) {
438 for (k= 0; k<=j ;k+=2) {
439 if ((availablejpegsize[k]*availablejpegsize[k+1]) == (picSizes[i+1]*picSizes[i+2])) {
440
441 valid = false;
442 }
443 }
444 if (valid) {
445 availablejpegsize[j] = picSizes[i+1];
446 availablejpegsize[j+1] = picSizes[i+2];
447 j+=2;
448 }
449 valid = true;
450 }
451}
452
453status_t EmulatedFakeCamera3::checkValidJpegSize(uint32_t width, uint32_t height) {
454
455 int validsizecount = 0;
456 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
457 for (uint32_t f = 0; f < count; f+=2) {
458 if (mAvailableJpegSize[f] != 0) {
459 if ((mAvailableJpegSize[f] == width)&&(mAvailableJpegSize[f+1] == height)) {
460 validsizecount++;
461 }
462 } else {
463 break;
464 }
465 }
466 if (validsizecount == 0)
467 return BAD_VALUE;
468 return OK;
469}
470
471status_t EmulatedFakeCamera3::configureStreams(
472 camera3_stream_configuration *streamList) {
473 Mutex::Autolock l(mLock);
474 uint32_t width, height, pixelfmt;
475 bool isRestart = false;
476 mFlushTag = false;
477 DBG_LOGB("%s: %d streams", __FUNCTION__, streamList->num_streams);
478
479 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
480 ALOGE("%s: Cannot configure streams in state %d",
481 __FUNCTION__, mStatus);
482 return NO_INIT;
483 }
484
485 /**
486 * Sanity-check input list.
487 */
488 if (streamList == NULL) {
489 ALOGE("%s: NULL stream configuration", __FUNCTION__);
490 return BAD_VALUE;
491 }
492
493 if (streamList->streams == NULL) {
494 ALOGE("%s: NULL stream list", __FUNCTION__);
495 return BAD_VALUE;
496 }
497
498 if (streamList->num_streams < 1) {
499 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
500 streamList->num_streams);
501 return BAD_VALUE;
502 }
503
504 camera3_stream_t *inputStream = NULL;
505 for (size_t i = 0; i < streamList->num_streams; i++) {
506 camera3_stream_t *newStream = streamList->streams[i];
507
508 if (newStream == NULL) {
509 ALOGE("%s: Stream index %zu was NULL",
510 __FUNCTION__, i);
511 return BAD_VALUE;
512 }
513
514 if (newStream->max_buffers <= 0) {
515 isRestart = true;//mSensor->isNeedRestart(newStream->width, newStream->height, newStream->format);
516 DBG_LOGB("format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d, isRestart=%d\n",
517 newStream->format, newStream->width, newStream->height,
518 newStream->stream_type, newStream->max_buffers,
519 isRestart);
520 }
521
522 if ((newStream->width == 0) || (newStream->width == UINT32_MAX) ||
523 (newStream->height == 0) || (newStream->height == UINT32_MAX)) {
524 ALOGE("invalid width or height. \n");
525 return -EINVAL;
526 }
527
528 if (newStream->rotation == UINT32_MAX) {
529 ALOGE("invalid StreamRotation. \n");
530 return -EINVAL;
531 }
532
533 ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
534 __FUNCTION__, newStream, i, newStream->stream_type,
535 newStream->usage,
536 newStream->format);
537
538 if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
539 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
540 if (inputStream != NULL) {
541
542 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
543 return BAD_VALUE;
544 }
545 inputStream = newStream;
546 }
547
548 bool validFormat = false;
549 for (size_t f = 0;
550 f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]);
551 f++) {
552 if (newStream->format == kAvailableFormats[f]) {
553 validFormat = true;
554 //HAL_PIXEL_FORMAT_YCrCb_420_SP,
555 if (HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format)
556 newStream->format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
557
558 break;
559 }
560 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
561 }
562 if (!validFormat) {
563 ALOGE("%s: Unsupported stream format 0x%x requested",
564 __FUNCTION__, newStream->format);
565 return -EINVAL;
566 }
567
568 status_t ret = checkValidJpegSize(newStream->width, newStream->height);
569 if (ret != OK) {
570 ALOGE("Invalid Jpeg Size. \n");
571 return BAD_VALUE;
572 }
573
574 }
575 mInputStream = inputStream;
576 width = 0;
577 height = 0;
578 for (size_t i = 0; i < streamList->num_streams; i++) {
579 camera3_stream_t *newStream = streamList->streams[i];
580 DBG_LOGB("find propert width and height, format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d\n",
581 newStream->format, newStream->width, newStream->height, newStream->stream_type, newStream->max_buffers);
582 if ((HAL_PIXEL_FORMAT_BLOB != newStream->format) &&
583 (CAMERA3_STREAM_OUTPUT == newStream->stream_type)) {
584
585 if (width < newStream->width)
586 width = newStream->width;
587
588 if (height < newStream->height)
589 height = newStream->height;
590
591 pixelfmt = (uint32_t)newStream->format;
592 if (HAL_PIXEL_FORMAT_YCbCr_420_888 == pixelfmt)
593 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
594 }
595
596 }
597
598 //TODO modify this ugly code
599 if (isRestart) {
600 isRestart = mSensor->isNeedRestart(width, height, pixelfmt);
601 }
602
603 if (isRestart) {
604 mSensor->streamOff();
605 pixelfmt = mSensor->halFormatToSensorFormat(pixelfmt);
606 mSensor->setOutputFormat(width, height, pixelfmt, 0);
607 mSensor->streamOn();
608 DBG_LOGB("width=%d, height=%d, pixelfmt=%.4s\n",
609 width, height, (char*)&pixelfmt);
610 }
611
612 /**
613 * Initially mark all existing streams as not alive
614 */
615 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
616 PrivateStreamInfo *privStream =
617 static_cast<PrivateStreamInfo*>((*s)->priv);
618 privStream->alive = false;
619 }
620
621 /**
622 * Find new streams and mark still-alive ones
623 */
624 for (size_t i = 0; i < streamList->num_streams; i++) {
625 camera3_stream_t *newStream = streamList->streams[i];
626 if (newStream->priv == NULL) {
627 // New stream, construct info
628 PrivateStreamInfo *privStream = new PrivateStreamInfo();
629 privStream->alive = true;
630 privStream->registered = false;
631
632 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
633 newStream->max_buffers = kMaxBufferCount;
634 newStream->priv = privStream;
635 mStreams.push_back(newStream);
636 } else {
637 // Existing stream, mark as still alive.
638 PrivateStreamInfo *privStream =
639 static_cast<PrivateStreamInfo*>(newStream->priv);
640 CAMHAL_LOGDA("Existing stream ?");
641 privStream->alive = true;
642 }
643 // Always update usage and max buffers
644 /*for cts CameraDeviceTest -> testPrepare*/
645 newStream->max_buffers = kMaxBufferCount;
646 newStream->usage = mSensor->getStreamUsage(newStream->stream_type);
647 DBG_LOGB("%d, newStream=%p, stream_type=%d, usage=%x, priv=%p, w*h=%dx%d\n",
648 i, newStream, newStream->stream_type, newStream->usage, newStream->priv, newStream->width, newStream->height);
649 }
650
651 /**
652 * Reap the dead streams
653 */
654 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
655 PrivateStreamInfo *privStream =
656 static_cast<PrivateStreamInfo*>((*s)->priv);
657 if (!privStream->alive) {
658 DBG_LOGA("delete not alive streams");
659 (*s)->priv = NULL;
660 delete privStream;
661 s = mStreams.erase(s);
662 } else {
663 ++s;
664 }
665 }
666
667 /**
668 * Can't reuse settings across configure call
669 */
670 mPrevSettings.clear();
671
672 return OK;
673}
674
675status_t EmulatedFakeCamera3::registerStreamBuffers(
676 const camera3_stream_buffer_set *bufferSet) {
677 DBG_LOGB("%s: E", __FUNCTION__);
678 Mutex::Autolock l(mLock);
679
680 /**
681 * Sanity checks
682 */
683 DBG_LOGA("==========sanity checks\n");
684
685 // OK: register streams at any time during configure
686 // (but only once per stream)
687 if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) {
688 ALOGE("%s: Cannot register buffers in state %d",
689 __FUNCTION__, mStatus);
690 return NO_INIT;
691 }
692
693 if (bufferSet == NULL) {
694 ALOGE("%s: NULL buffer set!", __FUNCTION__);
695 return BAD_VALUE;
696 }
697
698 StreamIterator s = mStreams.begin();
699 for (; s != mStreams.end(); ++s) {
700 if (bufferSet->stream == *s) break;
701 }
702 if (s == mStreams.end()) {
703 ALOGE("%s: Trying to register buffers for a non-configured stream!",
704 __FUNCTION__);
705 return BAD_VALUE;
706 }
707
708 /**
709 * Register the buffers. This doesn't mean anything to the emulator besides
710 * marking them off as registered.
711 */
712
713 PrivateStreamInfo *privStream =
714 static_cast<PrivateStreamInfo*>((*s)->priv);
715
716#if 0
717 if (privStream->registered) {
718 ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__);
719 return BAD_VALUE;
720 }
721#endif
722
723 privStream->registered = true;
724
725 return OK;
726}
727
728const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings(
729 int type) {
730 DBG_LOGB("%s: E", __FUNCTION__);
731 Mutex::Autolock l(mLock);
732
733 if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
734 ALOGE("%s: Unknown request settings template: %d",
735 __FUNCTION__, type);
736 return NULL;
737 }
738
739 /**
740 * Cache is not just an optimization - pointer returned has to live at
741 * least as long as the camera device instance does.
742 */
743 if (mDefaultTemplates[type] != NULL) {
744 return mDefaultTemplates[type];
745 }
746
747 CameraMetadata settings;
748
749 /** android.request */
750 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
751 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
752
753 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
754 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
755
756 static const int32_t id = 0;
757 settings.update(ANDROID_REQUEST_ID, &id, 1);
758
759 static const int32_t frameCount = 0;
760 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
761
762 /** android.lens */
763
764 static const float focusDistance = 0;
765 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
766
767 static const float aperture = 2.8f;
768 settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
769
770// static const float focalLength = 5.0f;
771 static const float focalLength = 3.299999952316284f;
772 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
773
774 static const float filterDensity = 0;
775 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
776
777 static const uint8_t opticalStabilizationMode =
778 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
779 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
780 &opticalStabilizationMode, 1);
781
782 // FOCUS_RANGE set only in frame
783
784 /** android.sensor */
785
786 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
787 settings.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
788 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
789 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
790 static const int64_t exposureTime = 10 * MSEC;
791 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
792
793 int64_t frameDuration = mSensor->getMinFrameDuration();
794 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
795
796 static const int32_t sensitivity = 100;
797 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
798
799 static const int64_t rollingShutterSkew = 0;
800 settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
801 // TIMESTAMP set only in frame
802
803 /** android.flash */
804
805 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
806 settings.update(ANDROID_FLASH_STATE, &flashstate, 1);
807
808 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
809 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
810
811 static const uint8_t flashPower = 10;
812 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
813
814 static const int64_t firingTime = 0;
815 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
816
817 /** Processing block modes */
818 uint8_t hotPixelMode = 0;
819 uint8_t demosaicMode = 0;
820 uint8_t noiseMode = 0;
821 uint8_t shadingMode = 0;
822 uint8_t colorMode = 0;
823 uint8_t tonemapMode = 0;
824 uint8_t edgeMode = 0;
825 switch (type) {
826
827 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
828 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
829 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
830 // fall-through
831 case CAMERA3_TEMPLATE_STILL_CAPTURE:
832 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
833 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
834 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
835 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
836 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
837 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
838 break;
839 case CAMERA3_TEMPLATE_PREVIEW:
840 // fall-through
841 case CAMERA3_TEMPLATE_VIDEO_RECORD:
842 // fall-through
843 case CAMERA3_TEMPLATE_MANUAL:
844 // fall-through
845 default:
846 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
847 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
848 noiseMode = ANDROID_NOISE_REDUCTION_MODE_OFF;
849 shadingMode = ANDROID_SHADING_MODE_FAST;
850 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
851 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
852 edgeMode = ANDROID_EDGE_MODE_FAST;
853 break;
854 }
855 settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
856 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
857 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
858 settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
859 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
860 settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
861 settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
862
863 /** android.noise */
864 static const uint8_t noiseStrength = 5;
865 settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
866 static uint8_t availableNBModes[] = {
867 ANDROID_NOISE_REDUCTION_MODE_OFF,
868 ANDROID_NOISE_REDUCTION_MODE_FAST,
869 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
870 };
871 settings.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
872 availableNBModes, sizeof(availableNBModes)/sizeof(availableNBModes));
873
874
875 /** android.color */
876#if PLATFORM_SDK_VERSION >= 23
877 static const camera_metadata_rational colorTransform[9] = {
878 {1, 1}, {0, 1}, {0, 1},
879 {0, 1}, {1, 1}, {0, 1},
880 {0, 1}, {0, 1}, {1, 1}
881 };
882 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
883#else
884 static const float colorTransform[9] = {
885 1.0f, 0.f, 0.f,
886 0.f, 1.f, 0.f,
887 0.f, 0.f, 1.f
888 };
889 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
890#endif
891 /** android.tonemap */
892 static const float tonemapCurve[4] = {
893 0.f, 0.f,
894 1.f, 1.f
895 };
896 settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
897 settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
898 settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
899
900 /** android.edge */
901 static const uint8_t edgeStrength = 5;
902 settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
903
904 /** android.scaler */
905 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
906 settings.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
907
908 static const int32_t cropRegion[] = {
909 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
910 };
911 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
912
913 /** android.jpeg */
914 static const uint8_t jpegQuality = 80;
915 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
916
917 static const int32_t thumbnailSize[2] = {
918 320, 240
919 };
920 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
921
922 static const uint8_t thumbnailQuality = 80;
923 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
924
925 static const double gpsCoordinates[3] = {
926 0, 0, 0
927 };
928 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3); //default 2 value
929
930 static const uint8_t gpsProcessingMethod[32] = "None";
931 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
932
933 static const int64_t gpsTimestamp = 0;
934 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
935
936 static const int32_t jpegOrientation = 0;
937 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
938
939 /** android.stats */
940
941 static const uint8_t faceDetectMode =
942 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
943 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
944
945 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
946 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
947
948 static const uint8_t sharpnessMapMode =
949 ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
950 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
951
952 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
953 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
954 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
955 settings.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
956 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
957 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
958 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
959 // sharpnessMap only in frames
960
961 /** android.control */
962
963 uint8_t controlIntent = 0;
964 uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; //default value
965 uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
966 uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
967 switch (type) {
968 case CAMERA3_TEMPLATE_PREVIEW:
969 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
970 break;
971 case CAMERA3_TEMPLATE_STILL_CAPTURE:
972 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
973 break;
974 case CAMERA3_TEMPLATE_VIDEO_RECORD:
975 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
976 break;
977 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
978 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
979 break;
980 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
981 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
982 break;
983 case CAMERA3_TEMPLATE_MANUAL:
984 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
985 controlMode = ANDROID_CONTROL_MODE_OFF;
986 aeMode = ANDROID_CONTROL_AE_MODE_OFF;
987 awbMode = ANDROID_CONTROL_AWB_MODE_OFF;
988 break;
989 default:
990 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
991 break;
992 }
993 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
994 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
995
996 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
997 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
998
999 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
1000 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
1001
1002 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
1003
1004 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
1005 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
1006
1007 static const uint8_t aePrecaptureTrigger =
1008 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
1009 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
1010
1011 static const int32_t mAfTriggerId = 0;
1012 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,&mAfTriggerId, 1);
1013 static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
1014 settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
1015
1016 static const int32_t controlRegions[5] = {
1017 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
1018 1000
1019 };
1020// settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
1021
1022 static const int32_t aeExpCompensation = 0;
1023 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
1024
1025 static const int32_t aeTargetFpsRange[2] = {
1026 30, 30
1027 };
1028 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
1029
1030 static const uint8_t aeAntibandingMode =
1031 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
1032 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
1033
1034 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
1035
1036 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
1037 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
1038
1039// settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
1040
1041 uint8_t afMode = 0;
1042 switch (type) {
1043 case CAMERA3_TEMPLATE_PREVIEW:
1044 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1045 break;
1046 case CAMERA3_TEMPLATE_STILL_CAPTURE:
1047 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1048 break;
1049 case CAMERA3_TEMPLATE_VIDEO_RECORD:
1050 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1051 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
1052 break;
1053 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
1054 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1055 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
1056 break;
1057 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
1058 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1059 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
1060 break;
1061 case CAMERA3_TEMPLATE_MANUAL:
1062 afMode = ANDROID_CONTROL_AF_MODE_OFF;
1063 break;
1064 default:
1065 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1066 break;
1067 }
1068 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
1069
1070 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
1071 settings.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
1072
1073// settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
1074
1075 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
1076 settings.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
1077 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1078 settings.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
1079 static const uint8_t vstabMode =
1080 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
1081 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
1082
1083 // aeState, awbState, afState only in frame
1084
1085 uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
1086 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
1087 &aberrationMode, 1);
1088
1089 mDefaultTemplates[type] = settings.release();
1090
1091 return mDefaultTemplates[type];
1092}
1093
1094status_t EmulatedFakeCamera3::processCaptureRequest(
1095 camera3_capture_request *request) {
1096 status_t res;
1097 nsecs_t exposureTime;
1098 //nsecs_t frameDuration;
1099 uint32_t sensitivity;
1100 uint32_t frameNumber;
1101 bool mHaveThumbnail = false;
1102 CameraMetadata settings;
1103 Buffers *sensorBuffers = NULL;
1104 HalBufferVector *buffers = NULL;
1105
1106 if (mFlushTag) {
1107 DBG_LOGA("already flush, but still send Capture Request .\n");
1108 }
1109
1110 {
1111 Mutex::Autolock l(mLock);
1112
1113 /** Validation */
1114
1115 if (mStatus < STATUS_READY) {
1116 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
1117 mStatus);
1118 return INVALID_OPERATION;
1119 }
1120
1121 if (request == NULL) {
1122 ALOGE("%s: NULL request!", __FUNCTION__);
1123 return BAD_VALUE;
1124 }
1125
1126 frameNumber = request->frame_number;
1127
1128 if (request->settings == NULL && mPrevSettings.isEmpty()) {
1129 ALOGE("%s: Request %d: NULL settings for first request after"
1130 "configureStreams()", __FUNCTION__, frameNumber);
1131 return BAD_VALUE;
1132 }
1133
1134 if (request->input_buffer != NULL &&
1135 request->input_buffer->stream != mInputStream) {
1136 DBG_LOGB("%s: Request %d: Input buffer not from input stream!",
1137 __FUNCTION__, frameNumber);
1138 DBG_LOGB("%s: Bad stream %p, expected: %p",
1139 __FUNCTION__, request->input_buffer->stream,
1140 mInputStream);
1141 DBG_LOGB("%s: Bad stream type %d, expected stream type %d",
1142 __FUNCTION__, request->input_buffer->stream->stream_type,
1143 mInputStream ? mInputStream->stream_type : -1);
1144
1145 return BAD_VALUE;
1146 }
1147
1148 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1149 ALOGE("%s: Request %d: No output buffers provided!",
1150 __FUNCTION__, frameNumber);
1151 return BAD_VALUE;
1152 }
1153
1154 // Validate all buffers, starting with input buffer if it's given
1155
1156 ssize_t idx;
1157 const camera3_stream_buffer_t *b;
1158 if (request->input_buffer != NULL) {
1159 idx = -1;
1160 b = request->input_buffer;
1161 } else {
1162 idx = 0;
1163 b = request->output_buffers;
1164 }
1165 do {
1166 PrivateStreamInfo *priv =
1167 static_cast<PrivateStreamInfo*>(b->stream->priv);
1168 if (priv == NULL) {
1169 ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
1170 __FUNCTION__, frameNumber, idx);
1171 return BAD_VALUE;
1172 }
1173#if 0
1174 if (!priv->alive || !priv->registered) {
1175 ALOGE("%s: Request %d: Buffer %zu: Unregistered or dead stream! alive=%d, registered=%d\n",
1176 __FUNCTION__, frameNumber, idx,
1177 priv->alive, priv->registered);
1178 //return BAD_VALUE;
1179 }
1180#endif
1181 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1182 ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
1183 __FUNCTION__, frameNumber, idx);
1184 return BAD_VALUE;
1185 }
1186 if (b->release_fence != -1) {
1187 ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
1188 __FUNCTION__, frameNumber, idx);
1189 return BAD_VALUE;
1190 }
1191 if (b->buffer == NULL) {
1192 ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
1193 __FUNCTION__, frameNumber, idx);
1194 return BAD_VALUE;
1195 }
1196 idx++;
1197 b = &(request->output_buffers[idx]);
1198 } while (idx < (ssize_t)request->num_output_buffers);
1199
1200 // TODO: Validate settings parameters
1201
1202 /**
1203 * Start processing this request
1204 */
1205 mStatus = STATUS_ACTIVE;
1206
1207 camera_metadata_entry e;
1208
1209 if (request->settings == NULL) {
1210 settings.acquire(mPrevSettings);
1211 } else {
1212 settings = request->settings;
1213
1214 uint8_t antiBanding = 0;
1215 uint8_t effectMode = 0;
1216 int exposureCmp = 0;
1217 int32_t previewFpsRange[2];
1218
1219 e = settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE);
1220 if (e.count == 0) {
1221 ALOGE("%s: get ANDROID_CONTROL_AE_TARGET_FPS_RANGE failed!", __FUNCTION__);
1222 return BAD_VALUE;
1223 } else {
1224 previewFpsRange[0] = e.data.i32[0];
1225 previewFpsRange[1] = e.data.i32[1];
1226 mFrameDuration = 1000000000 / previewFpsRange[1];
1227 ALOGI("set ANDROID_CONTROL_AE_TARGET_FPS_RANGE :%d,%d", previewFpsRange[0], previewFpsRange[1]);
1228 }
1229
1230 e = settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE);
1231 if (e.count == 0) {
1232 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1233 return BAD_VALUE;
1234 }
1235 antiBanding = e.data.u8[0];
1236 mSensor->setAntiBanding(antiBanding);
1237
1238 e = settings.find(ANDROID_CONTROL_EFFECT_MODE);
1239 if (e.count == 0) {
1240 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1241 return BAD_VALUE;
1242 }
1243 effectMode = e.data.u8[0];
1244 mSensor->setEffect(effectMode);
1245
1246 e = settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION);
1247 if (e.count == 0) {
1248 ALOGE("%s: No exposure entry!", __FUNCTION__);
1249 //return BAD_VALUE;
1250 } else {
1251 exposureCmp = e.data.i32[0];
1252 DBG_LOGB("set expsore compensaton %d\n", exposureCmp);
1253 mSensor->setExposure(exposureCmp);
1254 }
1255
1256 int32_t cropRegion[4];
1257 int32_t cropWidth;
1258 int32_t outputWidth = request->output_buffers[0].stream->width;
1259
1260 e = settings.find(ANDROID_SCALER_CROP_REGION);
1261 if (e.count == 0) {
1262 ALOGE("%s: No corp region entry!", __FUNCTION__);
1263 //return BAD_VALUE;
1264 } else {
1265 cropRegion[0] = e.data.i32[0];
1266 cropRegion[1] = e.data.i32[1];
1267 cropWidth = cropRegion[2] = e.data.i32[2];
1268 cropRegion[3] = e.data.i32[3];
1269 for (int i = mZoomMin; i <= mZoomMax; i += mZoomStep) {
1270 //if ( (float) i / mZoomMin >= (float) outputWidth / cropWidth) {
1271 if ( i * cropWidth >= outputWidth * mZoomMin ) {
1272 mSensor->setZoom(i);
1273 break;
1274 }
1275 }
1276 DBG_LOGB("cropRegion:%d, %d, %d, %d\n", cropRegion[0], cropRegion[1],cropRegion[2],cropRegion[3]);
1277 }
1278 }
1279
1280 res = process3A(settings);
1281 if (res != OK) {
1282 ALOGVV("%s: process3A failed!", __FUNCTION__);
1283 //return res;
1284 }
1285
1286 // TODO: Handle reprocessing
1287
1288 /**
1289 * Get ready for sensor config
1290 */
1291
1292 bool needJpeg = false;
1293 ssize_t jpegbuffersize;
1294 uint32_t jpegpixelfmt;
1295
1296 exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
1297 //frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
1298 sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
1299
1300 sensorBuffers = new Buffers();
1301 buffers = new HalBufferVector();
1302
1303 sensorBuffers->setCapacity(request->num_output_buffers);
1304 buffers->setCapacity(request->num_output_buffers);
1305
1306 // Process all the buffers we got for output, constructing internal buffer
1307 // structures for them, and lock them for writing.
1308 for (size_t i = 0; i < request->num_output_buffers; i++) {
1309 const camera3_stream_buffer &srcBuf = request->output_buffers[i];
1310 const private_handle_t *privBuffer =
1311 (const private_handle_t*)(*srcBuf.buffer);
1312 StreamBuffer destBuf;
1313 destBuf.streamId = kGenericStreamId;
1314 destBuf.width = srcBuf.stream->width;
1315 destBuf.height = srcBuf.stream->height;
1316 destBuf.format = privBuffer->format; // Use real private format
1317 destBuf.stride = privBuffer->stride; //srcBuf.stream->width; // TODO: query from gralloc
1318 destBuf.buffer = srcBuf.buffer;
1319 destBuf.share_fd = privBuffer->share_fd;
1320
1321 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
1322 needJpeg = true;
1323 memset(&info,0,sizeof(struct ExifInfo));
1324 info.orientation = settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
1325 jpegpixelfmt = mSensor->getOutputFormat();
1326 if (!mSupportRotate) {
1327 info.mainwidth = srcBuf.stream->width;
1328 info.mainheight = srcBuf.stream->height;
1329 } else {
1330 if ((info.orientation == 90) || (info.orientation == 270)) {
1331 info.mainwidth = srcBuf.stream->height;
1332 info.mainheight = srcBuf.stream->width;
1333 } else {
1334 info.mainwidth = srcBuf.stream->width;
1335 info.mainheight = srcBuf.stream->height;
1336 }
1337 }
1338 if ((jpegpixelfmt == V4L2_PIX_FMT_MJPEG) || (jpegpixelfmt == V4L2_PIX_FMT_YUYV)) {
1339 mSensor->setOutputFormat(info.mainwidth,info.mainheight,jpegpixelfmt,1);
1340 } else {
1341 mSensor->setOutputFormat(info.mainwidth,info.mainheight,V4L2_PIX_FMT_RGB24,1);
1342 }
1343 }
1344
1345 // Wait on fence
1346 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
1347 res = bufferAcquireFence->wait(kFenceTimeoutMs);
1348 if (res == TIMED_OUT) {
1349 ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
1350 __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
1351 }
1352 if (res == OK) {
1353 // Lock buffer for writing
1354 const Rect rect(destBuf.width, destBuf.height);
1355 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1356 if (privBuffer->format == HAL_PIXEL_FORMAT_YCbCr_420_888/*HAL_PIXEL_FORMAT_YCrCb_420_SP*/) {
1357 android_ycbcr ycbcr = android_ycbcr();
1358 res = GraphicBufferMapper::get().lockYCbCr(
1359 *(destBuf.buffer),
1360 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK,
1361 rect,
1362 &ycbcr);
1363 // This is only valid because we know that emulator's
1364 // YCbCr_420_888 is really contiguous NV21 under the hood
1365 destBuf.img = static_cast<uint8_t*>(ycbcr.y);
1366 } else {
1367 ALOGE("Unexpected private format for flexible YUV: 0x%x",
1368 privBuffer->format);
1369 res = INVALID_OPERATION;
1370 }
1371 } else {
1372 res = GraphicBufferMapper::get().lock(*(destBuf.buffer),
1373 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK,
1374 rect,
1375 (void**)&(destBuf.img));
1376 }
1377 if (res != OK) {
1378 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
1379 __FUNCTION__, frameNumber, i);
1380 }
1381 }
1382
1383 if (res != OK) {
1384 // Either waiting or locking failed. Unlock locked buffers and bail
1385 // out.
1386 for (size_t j = 0; j < i; j++) {
1387 GraphicBufferMapper::get().unlock(
1388 *(request->output_buffers[i].buffer));
1389 }
1390 ALOGE("line:%d, format for this usage: %d x %d, usage %x, format=%x, returned\n",
1391 __LINE__, destBuf.width, destBuf.height, privBuffer->usage, privBuffer->format);
1392 return NO_INIT;
1393 }
1394 sensorBuffers->push_back(destBuf);
1395 buffers->push_back(srcBuf);
1396 }
1397
1398 if (needJpeg) {
1399 if (!mSupportRotate) {
1400 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1401 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1402 } else {
1403 if ((info.orientation == 90) || (info.orientation == 270)) {
1404 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1405 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1406 } else {
1407 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1408 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1409 }
1410 }
1411 if (settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
1412 info.latitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[0];
1413 info.longitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[1];
1414 info.altitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[2];
1415 info.has_latitude = true;
1416 info.has_longitude = true;
1417 info.has_altitude = true;
1418 } else {
1419 info.has_latitude = false;
1420 info.has_longitude = false;
1421 info.has_altitude = false;
1422 }
1423 if (settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
1424 uint8_t * gpsString = settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
1425 memcpy(info.gpsProcessingMethod, gpsString , sizeof(info.gpsProcessingMethod)-1);
1426 info.has_gpsProcessingMethod = true;
1427 } else {
1428 info.has_gpsProcessingMethod = false;
1429 }
1430 if (settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
1431 info.gpsTimestamp = settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
1432 info.has_gpsTimestamp = true;
1433 } else {
1434 info.has_gpsTimestamp = false;
1435 }
1436 if (settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
1437 info.focallen = settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
1438 info.has_focallen = true;
1439 } else {
1440 info.has_focallen = false;
1441 }
1442 jpegbuffersize = getJpegBufferSize(info.mainwidth,info.mainheight);
1443
1444 mJpegCompressor->SetMaxJpegBufferSize(jpegbuffersize);
1445 mJpegCompressor->SetExifInfo(info);
1446 mSensor->setPictureRotate(info.orientation);
1447 if ((info.thumbwidth > 0) && (info.thumbheight > 0)) {
1448 mHaveThumbnail = true;
1449 }
1450 DBG_LOGB("%s::thumbnailSize_width=%d,thumbnailSize_height=%d,mainsize_width=%d,mainsize_height=%d,jpegOrientation=%d",__FUNCTION__,
1451 info.thumbwidth,info.thumbheight,info.mainwidth,info.mainheight,info.orientation);
1452 }
1453 /**
1454 * Wait for JPEG compressor to not be busy, if needed
1455 */
1456#if 0
1457 if (needJpeg) {
1458 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1459 if (!ready) {
1460 ALOGE("%s: Timeout waiting for JPEG compression to complete!",
1461 __FUNCTION__);
1462 return NO_INIT;
1463 }
1464 }
1465#else
1466 while (needJpeg) {
1467 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1468 if (ready) {
1469 break;
1470 }
1471 }
1472#endif
1473 }
1474 /**
1475 * Wait until the in-flight queue has room
1476 */
1477 res = mReadoutThread->waitForReadout();
1478 if (res != OK) {
1479 ALOGE("%s: Timeout waiting for previous requests to complete!",
1480 __FUNCTION__);
1481 return NO_INIT;
1482 }
1483
1484 /**
1485 * Wait until sensor's ready. This waits for lengthy amounts of time with
1486 * mLock held, but the interface spec is that no other calls may by done to
1487 * the HAL by the framework while process_capture_request is happening.
1488 */
1489 {
1490 Mutex::Autolock l(mLock);
1491 int syncTimeoutCount = 0;
1492 while (!mSensor->waitForVSync(kSyncWaitTimeout)) {
1493 if (mStatus == STATUS_ERROR) {
1494 return NO_INIT;
1495 }
1496 if (syncTimeoutCount == kMaxSyncTimeoutCount) {
1497 ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
1498 __FUNCTION__, frameNumber,
1499 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
1500 return NO_INIT;
1501 }
1502 syncTimeoutCount++;
1503 }
1504
1505 /**
1506 * Configure sensor and queue up the request to the readout thread
1507 */
1508 mSensor->setExposureTime(exposureTime);
1509 //mSensor->setFrameDuration(frameDuration);
1510 mSensor->setFrameDuration(mFrameDuration);
1511 mSensor->setSensitivity(sensitivity);
1512 mSensor->setDestinationBuffers(sensorBuffers);
1513 mSensor->setFrameNumber(request->frame_number);
1514
1515 ReadoutThread::Request r;
1516 r.frameNumber = request->frame_number;
1517 r.settings = settings;
1518 r.sensorBuffers = sensorBuffers;
1519 r.buffers = buffers;
1520 r.havethumbnail = mHaveThumbnail;
1521
1522 mReadoutThread->queueCaptureRequest(r);
1523 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
1524
1525 // Cache the settings for next time
1526 mPrevSettings.acquire(settings);
1527 }
1528 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1529 return OK;
1530}
1531
1532/** Debug methods */
1533
1534void EmulatedFakeCamera3::dump(int fd) {
1535
1536 String8 result;
1537 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
1538 result = String8::format("%s, valid resolution\n", __FILE__);
1539
1540 for (uint32_t f = 0; f < count; f+=2) {
1541 if (mAvailableJpegSize[f] == 0)
1542 break;
1543 result.appendFormat("width: %d , height =%d\n",
1544 mAvailableJpegSize[f], mAvailableJpegSize[f+1]);
1545 }
1546 result.appendFormat("\nmZoomMin: %d , mZoomMax =%d, mZoomStep=%d\n",
1547 mZoomMin, mZoomMax, mZoomStep);
1548
1549 if (mZoomStep <= 0) {
1550 result.appendFormat("!!!!!!!!!camera apk may have no picture out\n");
1551 }
1552
1553 write(fd, result.string(), result.size());
1554
1555 if (mSensor.get() != NULL) {
1556 mSensor->dump(fd);
1557 }
1558
1559}
1560//flush all request
1561//TODO returned buffers every request held immediately with
1562//CAMERA3_BUFFER_STATUS_ERROR flag.
1563int EmulatedFakeCamera3::flush_all_requests() {
1564 DBG_LOGA("flush all request");
1565 mFlushTag = true;
1566 mReadoutThread->flushAllRequest(true);
1567 mReadoutThread->setFlushFlag(false);
1568 mSensor->setFlushFlag(false);
1569 return 0;
1570}
1571/** Tag query methods */
1572const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) {
1573 return NULL;
1574}
1575
1576const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) {
1577 return NULL;
1578}
1579
1580int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) {
1581 return 0;
1582}
1583
1584/**
1585 * Private methods
1586 */
1587
1588camera_metadata_ro_entry_t EmulatedFakeCamera3::staticInfo(const CameraMetadata *info, uint32_t tag,
1589 size_t minCount, size_t maxCount, bool required) const {
1590
1591 camera_metadata_ro_entry_t entry = info->find(tag);
1592
1593 if (CC_UNLIKELY( entry.count == 0 ) && required) {
1594 const char* tagSection = get_camera_metadata_section_name(tag);
1595 if (tagSection == NULL) tagSection = "<unknown>";
1596 const char* tagName = get_camera_metadata_tag_name(tag);
1597 if (tagName == NULL) tagName = "<unknown>";
1598
1599 ALOGE("Error finding static metadata entry '%s.%s' (%x)",
1600 tagSection, tagName, tag);
1601 } else if (CC_UNLIKELY(
1602 (minCount != 0 && entry.count < minCount) ||
1603 (maxCount != 0 && entry.count > maxCount) ) ) {
1604 const char* tagSection = get_camera_metadata_section_name(tag);
1605 if (tagSection == NULL) tagSection = "<unknown>";
1606 const char* tagName = get_camera_metadata_tag_name(tag);
1607 if (tagName == NULL) tagName = "<unknown>";
1608 ALOGE("Malformed static metadata entry '%s.%s' (%x):"
1609 "Expected between %zu and %zu values, but got %zu values",
1610 tagSection, tagName, tag, minCount, maxCount, entry.count);
1611 }
1612
1613 return entry;
1614}
1615
1616//this is only for debug
1617void EmulatedFakeCamera3::getStreamConfigurationp(CameraMetadata *info) {
1618 const int STREAM_CONFIGURATION_SIZE = 4;
1619 const int STREAM_FORMAT_OFFSET = 0;
1620 const int STREAM_WIDTH_OFFSET = 1;
1621 const int STREAM_HEIGHT_OFFSET = 2;
1622 const int STREAM_IS_INPUT_OFFSET = 3;
1623
1624 camera_metadata_ro_entry_t availableStreamConfigs =
1625 staticInfo(info, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
1626 CAMHAL_LOGDB(" stream, availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1627
1628 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1629 int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
1630 int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
1631 int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
1632 int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
1633 CAMHAL_LOGDB("f=%x, w*h=%dx%d, du=%d\n", format, width, height, isInput);
1634 }
1635
1636}
1637
1638//this is only for debug
1639void EmulatedFakeCamera3::getStreamConfigurationDurations(CameraMetadata *info) {
1640 const int STREAM_CONFIGURATION_SIZE = 4;
1641 const int STREAM_FORMAT_OFFSET = 0;
1642 const int STREAM_WIDTH_OFFSET = 1;
1643 const int STREAM_HEIGHT_OFFSET = 2;
1644 const int STREAM_IS_INPUT_OFFSET = 3;
1645
1646 camera_metadata_ro_entry_t availableStreamConfigs =
1647 staticInfo(info, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
1648 CAMHAL_LOGDB("availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1649
1650 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1651 int64_t format = availableStreamConfigs.data.i64[i + STREAM_FORMAT_OFFSET];
1652 int64_t width = availableStreamConfigs.data.i64[i + STREAM_WIDTH_OFFSET];
1653 int64_t height = availableStreamConfigs.data.i64[i + STREAM_HEIGHT_OFFSET];
1654 int64_t isInput = availableStreamConfigs.data.i64[i + STREAM_IS_INPUT_OFFSET];
1655 CAMHAL_LOGDB("f=%llx, w*h=%lldx%lld, du=%lld\n", format, width, height, isInput);
1656 }
1657}
1658
1659void EmulatedFakeCamera3::updateCameraMetaData(CameraMetadata *info) {
1660
1661}
1662
1663status_t EmulatedFakeCamera3::constructStaticInfo() {
1664
1665 status_t ret = OK;
1666 CameraMetadata info;
1667 uint32_t picSizes[64 * 8];
1668 int64_t* duration = NULL;
1669 int count, duration_count, availablejpegsize;
1670 uint8_t maxCount = 10;
1671 char property[PROPERTY_VALUE_MAX];
1672 unsigned int supportrotate;
1673 availablejpegsize = ARRAY_SIZE(mAvailableJpegSize);
1674 memset(mAvailableJpegSize,0,(sizeof(uint32_t))*availablejpegsize);
1675 sp<Sensor> s = new Sensor();
1676 ret = s->startUp(mCameraID);
1677 if (ret != OK) {
1678 DBG_LOGA("sensor start up failed");
1679 return ret;
1680 }
1681
1682 mSensorType = s->getSensorType();
1683
1684 if ( mSensorType == SENSOR_USB) {
1685 char property[PROPERTY_VALUE_MAX];
1686 property_get("rw.camera.usb.faceback", property, "false");
1687 if (strstr(property, "true"))
1688 mFacingBack = 1;
1689 else
1690 mFacingBack = 0;
1691 ALOGI("Setting usb camera cameraID:%d to back camera:%s\n",
1692 mCameraID, property);
1693 } else {
1694 if (s->mSensorFace == SENSOR_FACE_FRONT) {
1695 mFacingBack = 0;
1696 } else if (s->mSensorFace == SENSOR_FACE_BACK) {
1697 mFacingBack = 1;
1698 } else if (s->mSensorFace == SENSOR_FACE_NONE) {
1699 if (gEmulatedCameraFactory.getEmulatedCameraNum() == 1) {
1700 mFacingBack = 1;
1701 } else if ( mCameraID == 0) {
1702 mFacingBack = 1;
1703 } else {
1704 mFacingBack = 0;
1705 }
1706 }
1707
1708 ALOGI("Setting on board camera cameraID:%d to back camera:%d[0 false, 1 true]\n",
1709 mCameraID, mFacingBack);
1710 }
1711
1712 mSupportCap = s->IoctlStateProbe();
1713 if (mSupportCap & IOCTL_MASK_ROTATE) {
1714 supportrotate = true;
1715 } else {
1716 supportrotate = false;
1717 }
1718 // android.lens
1719
1720 // 5 cm min focus distance for back camera, infinity (fixed focus) for front
1721 // TODO read this ioctl from camera driver
1722 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
1723 const float minFocusDistance = 0.0;
1724 info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1725 &minFocusDistance, 1);
1726
1727 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
1728 const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
1729 info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1730 &minFocusDistance, 1);
1731
1732 static const float focalLength = 3.30f; // mm
1733 info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1734 &focalLength, 1);
1735 static const float aperture = 2.8f;
1736 info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1737 &aperture, 1);
1738 static const float filterDensity = 0;
1739 info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1740 &filterDensity, 1);
1741 static const uint8_t availableOpticalStabilization =
1742 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1743 info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1744 &availableOpticalStabilization, 1);
1745
1746 static const int32_t lensShadingMapSize[] = {1, 1};
1747 info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1748 sizeof(lensShadingMapSize)/sizeof(int32_t));
1749
1750 /*lens facing related camera feature*/
1751 /*camera feature setting in /device/amlogic/xxx/xxx.mk files*/
1752 uint8_t lensFacing = mFacingBack ?
1753 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1754 info.update(ANDROID_LENS_FACING, &lensFacing, 1);
1755
1756 float lensPosition[3];
1757 if (mFacingBack) {
1758 // Back-facing camera is center-top on device
1759 lensPosition[0] = 0;
1760 lensPosition[1] = 20;
1761 lensPosition[2] = -5;
1762 } else {
1763 // Front-facing camera is center-right on device
1764 lensPosition[0] = 20;
1765 lensPosition[1] = 20;
1766 lensPosition[2] = 0;
1767 }
1768#if PLATFORM_SDK_VERSION <= 22
1769 info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
1770 sizeof(float));
1771#endif
1772 static const uint8_t lensCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED;
1773 info.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,&lensCalibration,1);
1774
1775 // android.sensor
1776
1777 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1778 info.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
1779 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1780 info.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
1781 info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1782 Sensor::kExposureTimeRange, 2);
1783
1784 info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1785 &Sensor::kFrameDurationRange[1], 1);
1786
1787 info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
1788 Sensor::kSensitivityRange,
1789 sizeof(Sensor::kSensitivityRange)
1790 /sizeof(int32_t));
1791
1792 info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1793 &Sensor::kColorFilterArrangement, 1);
1794
1795 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1796 info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1797 sensorPhysicalSize, 2);
1798
1799 info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1800 (int32_t*)&Sensor::kMaxRawValue, 1);
1801
1802 static const int32_t blackLevelPattern[4] = {
1803 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
1804 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
1805 };
1806 info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1807 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
1808
1809 static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
1810 info.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
1811 if (mSensorType == SENSOR_USB) {
1812 if (mFacingBack) {
1813 property_get("hw.camera.orientation.back", property, "0");
1814 } else {
1815 property_get("hw.camera.orientation.front", property, "0");
1816 }
1817 int32_t orientation = atoi(property);
1818 property_get("hw.camera.usb.orientation_offset", property, "0");
1819 orientation += atoi(property);
1820 orientation %= 360;
1821 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1822 } else {
1823 if (mFacingBack) {
1824 property_get("hw.camera.orientation.back", property, "270");
1825 const int32_t orientation = atoi(property);
1826 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1827 } else {
1828 property_get("hw.camera.orientation.front", property, "90");
1829 const int32_t orientation = atoi(property);
1830 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1831 }
1832 }
1833
1834 static const int64_t rollingShutterSkew = 0;
1835 info.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
1836
1837 //TODO: sensor color calibration fields
1838
1839 // android.flash
1840 static const uint8_t flashAvailable = 0;
1841 info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1842
1843 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
1844 info.update(ANDROID_FLASH_STATE, &flashstate, 1);
1845
1846 static const int64_t flashChargeDuration = 0;
1847 info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
1848
1849 /** android.noise */
1850 static const uint8_t availableNBModes = ANDROID_NOISE_REDUCTION_MODE_OFF;
1851 info.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, &availableNBModes, 1);
1852
1853 // android.tonemap
1854 static const uint8_t availabletonemapModes[] = {
1855 ANDROID_TONEMAP_MODE_FAST,
1856 ANDROID_TONEMAP_MODE_HIGH_QUALITY
1857 };
1858 info.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES, availabletonemapModes,
1859 sizeof(availabletonemapModes)/sizeof(availabletonemapModes[0]));
1860
1861 static const int32_t tonemapCurvePoints = 128;
1862 info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
1863
1864 // android.scaler
1865
1866 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
1867 info.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
1868
1869 info.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1870 kAvailableFormats,
1871 sizeof(kAvailableFormats)/sizeof(int32_t));
1872
1873 info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1874 (int64_t*)kAvailableRawMinDurations,
1875 sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
1876
1877 //for version 3.2 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
1878 count = sizeof(picSizes)/sizeof(picSizes[0]);
1879 count = s->getStreamConfigurations(picSizes, kAvailableFormats, count);
1880
1881 info.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1882 (int32_t*)picSizes, count);
1883
1884 if (count < availablejpegsize) {
1885 availablejpegsize = count;
1886 }
1887 getValidJpegSize(picSizes,mAvailableJpegSize,availablejpegsize);
1888
1889 maxJpegResolution = getMaxJpegResolution(picSizes,count);
1890 int32_t full_size[4];
1891 if (mFacingBack) {
1892 full_size[0] = 0;
1893 full_size[1] = 0;
1894 full_size[2] = maxJpegResolution.width;
1895 full_size[3] = maxJpegResolution.height;
1896 } else {
1897 full_size[0] = 0;
1898 full_size[1] = 0;
1899 full_size[2] = maxJpegResolution.width;
1900 full_size[3] = maxJpegResolution.height;
1901 }
1902 /*activeArray.width <= pixelArraySize.Width && activeArray.height<= pixelArraySize.Height*/
1903 info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1904 (int32_t*)full_size,
1905 sizeof(full_size)/sizeof(full_size[0]));
1906 info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1907 (int32_t*)(&full_size[2]), 2);
1908
1909 duration = new int64_t[count];
1910 if (duration == NULL) {
1911 DBG_LOGA("allocate memory for duration failed");
1912 return NO_MEMORY;
1913 } else {
1914 memset(duration,0,sizeof(int64_t)*count);
1915 }
1916 duration_count = s->getStreamConfigurationDurations(picSizes, duration, count, true);
1917 info.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1918 duration, duration_count);
1919
1920 memset(duration,0,sizeof(int64_t)*count);
1921 duration_count = s->getStreamConfigurationDurations(picSizes, duration, count, false);
1922 info.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1923 duration, duration_count);
1924
1925 info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1926 (int64_t*)kAvailableProcessedMinDurations,
1927 sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
1928
1929 info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
1930 (int64_t*)kAvailableJpegMinDurations,
1931 sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
1932
1933
1934 // android.jpeg
1935
1936 static const int32_t jpegThumbnailSizes[] = {
1937 0, 0,
1938 128, 72,
1939 160, 120,
1940 320, 240
1941 };
1942 info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1943 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
1944
1945 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1946 info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1947
1948 // android.stats
1949
1950 static const uint8_t availableFaceDetectModes[] = {
1951 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1952 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
1953 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
1954 };
1955
1956 info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1957 availableFaceDetectModes,
1958 sizeof(availableFaceDetectModes));
1959
1960 static const int32_t maxFaceCount = 8;
1961 info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1962 &maxFaceCount, 1);
1963
1964 static const int32_t histogramSize = 64;
1965 info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1966 &histogramSize, 1);
1967
1968 static const int32_t maxHistogramCount = 1000;
1969 info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1970 &maxHistogramCount, 1);
1971
1972 static const int32_t sharpnessMapSize[2] = {64, 64};
1973 info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1974 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
1975
1976 static const int32_t maxSharpnessMapValue = 1000;
1977 info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1978 &maxSharpnessMapValue, 1);
1979 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
1980 info.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
1981
1982 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
1983 info.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
1984 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
1985 info.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
1986 // android.control
1987
1988 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
1989 info.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
1990
1991 static const uint8_t availableSceneModes[] = {
1992 // ANDROID_CONTROL_SCENE_MODE_DISABLED,
1993 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY
1994 };
1995 info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1996 availableSceneModes, sizeof(availableSceneModes));
1997
1998 static const uint8_t availableEffects[] = {
1999 ANDROID_CONTROL_EFFECT_MODE_OFF
2000 };
2001 info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2002 availableEffects, sizeof(availableEffects));
2003
2004 static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0};
2005 info.update(ANDROID_CONTROL_MAX_REGIONS,
2006 max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
2007
2008 static const uint8_t availableAeModes[] = {
2009 ANDROID_CONTROL_AE_MODE_OFF,
2010 ANDROID_CONTROL_AE_MODE_ON
2011 };
2012 info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2013 availableAeModes, sizeof(availableAeModes));
2014
2015
2016 static const int32_t availableTargetFpsRanges[] = {
2017 5, 15, 15, 15, 5, 20, 20, 20, 5, 25, 25, 25, 5, 30, 30, 30,
2018 };
2019 info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2020 availableTargetFpsRanges,
2021 sizeof(availableTargetFpsRanges)/sizeof(int32_t));
2022
2023 uint8_t awbModes[maxCount];
2024 count = s->getAWB(awbModes, maxCount);
2025 if (count < 0) {
2026 static const uint8_t availableAwbModes[] = {
2027 ANDROID_CONTROL_AWB_MODE_OFF,
2028 ANDROID_CONTROL_AWB_MODE_AUTO,
2029 ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
2030 ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
2031 ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
2032 ANDROID_CONTROL_AWB_MODE_SHADE
2033 };
2034 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2035 availableAwbModes, sizeof(availableAwbModes));
2036 } else {
2037 DBG_LOGB("getAWB %d ",count);
2038 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2039 awbModes, count);
2040 }
2041
2042 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
2043 info.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
2044
2045 static const uint8_t availableAfModesFront[] = {
2046 ANDROID_CONTROL_AF_MODE_OFF
2047 };
2048
2049 if (mFacingBack) {
2050 uint8_t afMode[maxCount];
2051 count = s->getAutoFocus(afMode, maxCount);
2052 if (count < 0) {
2053 static const uint8_t availableAfModesBack[] = {
2054 ANDROID_CONTROL_AF_MODE_OFF,
2055 //ANDROID_CONTROL_AF_MODE_AUTO,
2056 //ANDROID_CONTROL_AF_MODE_MACRO,
2057 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
2058 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE,
2059 };
2060
2061 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2062 availableAfModesBack, sizeof(availableAfModesBack));
2063 } else {
2064 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2065 afMode, count);
2066 }
2067 } else {
2068 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2069 availableAfModesFront, sizeof(availableAfModesFront));
2070 }
2071
2072 uint8_t antiBanding[maxCount];
2073 count = s->getAntiBanding(antiBanding, maxCount);
2074 if (count < 0) {
2075 static const uint8_t availableAntibanding[] = {
2076 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
2077 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
2078 };
2079 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2080 availableAntibanding, sizeof(availableAntibanding));
2081 } else {
2082 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2083 antiBanding, count);
2084 }
2085
2086 camera_metadata_rational step;
2087 int maxExp, minExp, def;
2088 ret = s->getExposure(&maxExp, &minExp, &def, &step);
2089 if (ret < 0) {
2090 static const int32_t aeExpCompensation = 0;
2091 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
2092
2093 static const camera_metadata_rational exposureCompensationStep = {
2094 1, 3
2095 };
2096 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2097 &exposureCompensationStep, 1);
2098
2099 int32_t exposureCompensationRange[] = {-6, 6};
2100 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2101 exposureCompensationRange,
2102 sizeof(exposureCompensationRange)/sizeof(int32_t));
2103 } else {
2104 DBG_LOGB("exposure compensation support:(%d, %d)\n", minExp, maxExp);
2105 int32_t exposureCompensationRange[] = {minExp, maxExp};
2106 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2107 exposureCompensationRange,
2108 sizeof(exposureCompensationRange)/sizeof(int32_t));
2109 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2110 &step, 1);
2111 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &def, 1);
2112 }
2113
2114 ret = s->getZoom(&mZoomMin, &mZoomMax, &mZoomStep);
2115 if (ret < 0) {
2116 float maxZoom = 1.0;
2117 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2118 &maxZoom, 1);
2119 } else {
2120 if (mZoomMin != 0) {
2121 float maxZoom = mZoomMax / mZoomMin;
2122 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2123 &maxZoom, 1);
2124 } else {
2125 float maxZoom = 1.0;
2126 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2127 &maxZoom, 1);
2128 }
2129 }
2130
2131 static const uint8_t availableVstabModes[] = {
2132 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
2133 };
2134 info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2135 availableVstabModes, sizeof(availableVstabModes));
2136
2137 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
2138 info.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
2139 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2140 info.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
2141 // android.info
2142 const uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2143 //mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
2144 // ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2145 info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2146 &supportedHardwareLevel,
2147 /*count*/1);
2148
2149 int32_t android_sync_max_latency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
2150 info.update(ANDROID_SYNC_MAX_LATENCY, &android_sync_max_latency, 1);
2151
2152 uint8_t len[] = {1};
2153 info.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1);
2154
2155 /*for cts BurstCaptureTest ->testYuvBurst */
2156 uint8_t maxlen[] = {kMaxBufferCount};
2157 info.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1);
2158 uint8_t cap[] = {
2159 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
2160 };
2161 info.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2162 (uint8_t *)cap, sizeof(cap)/sizeof(cap[0]));
2163
2164
2165 int32_t partialResultCount = 1;
2166 info.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,&partialResultCount,1);
2167 int32_t maxNumOutputStreams[3] = {0,2,1};
2168 info.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,maxNumOutputStreams,3);
2169 uint8_t aberrationMode[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF};
2170 info.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
2171 aberrationMode, 1);
2172 info.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
2173 aberrationMode, 1);
2174
2175 getAvailableChKeys(&info, supportedHardwareLevel);
2176
2177 if (mCameraInfo != NULL) {
2178 CAMHAL_LOGDA("mCameraInfo is not null, mem leak?");
2179 }
2180 mCameraInfo = info.release();
2181 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
2182
2183 if (duration != NULL) {
2184 delete [] duration;
2185 }
2186
2187 s->shutDown();
2188 s.clear();
2189 mPlugged = true;
2190
2191 return OK;
2192}
2193
2194status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
2195 /**
2196 * Extract top-level 3A controls
2197 */
2198 status_t res;
2199
2200 bool facePriority = false;
2201
2202 camera_metadata_entry e;
2203
2204 e = settings.find(ANDROID_CONTROL_MODE);
2205 if (e.count == 0) {
2206 ALOGE("%s: No control mode entry!", __FUNCTION__);
2207 return BAD_VALUE;
2208 }
2209 uint8_t controlMode = e.data.u8[0];
2210
2211 e = settings.find(ANDROID_CONTROL_SCENE_MODE);
2212 if (e.count == 0) {
2213 ALOGE("%s: No scene mode entry!", __FUNCTION__);
2214 return BAD_VALUE;
2215 }
2216 uint8_t sceneMode = e.data.u8[0];
2217
2218 if (controlMode == ANDROID_CONTROL_MODE_OFF) {
2219 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2220 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2221 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2222 update3A(settings);
2223 return OK;
2224 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2225 switch(sceneMode) {
2226 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
2227 mFacePriority = true;
2228 break;
2229 default:
2230 ALOGE("%s: Emulator doesn't support scene mode %d",
2231 __FUNCTION__, sceneMode);
2232 return BAD_VALUE;
2233 }
2234 } else {
2235 mFacePriority = false;
2236 }
2237
2238 // controlMode == AUTO or sceneMode = FACE_PRIORITY
2239 // Process individual 3A controls
2240
2241 res = doFakeAE(settings);
2242 if (res != OK) return res;
2243
2244 res = doFakeAF(settings);
2245 if (res != OK) return res;
2246
2247 res = doFakeAWB(settings);
2248 if (res != OK) return res;
2249
2250 update3A(settings);
2251 return OK;
2252}
2253
2254status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
2255 camera_metadata_entry e;
2256
2257 e = settings.find(ANDROID_CONTROL_AE_MODE);
2258 if (e.count == 0) {
2259 ALOGE("%s: No AE mode entry!", __FUNCTION__);
2260 return BAD_VALUE;
2261 }
2262 uint8_t aeMode = e.data.u8[0];
2263
2264 switch (aeMode) {
2265 case ANDROID_CONTROL_AE_MODE_OFF:
2266 // AE is OFF
2267 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2268 return OK;
2269 case ANDROID_CONTROL_AE_MODE_ON:
2270 // OK for AUTO modes
2271 break;
2272 default:
2273 ALOGVV("%s: Emulator doesn't support AE mode %d",
2274 __FUNCTION__, aeMode);
2275 return BAD_VALUE;
2276 }
2277
2278 e = settings.find(ANDROID_CONTROL_AE_LOCK);
2279 if (e.count == 0) {
2280 ALOGE("%s: No AE lock entry!", __FUNCTION__);
2281 return BAD_VALUE;
2282 }
2283 bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON);
2284
2285 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
2286 bool precaptureTrigger = false;
2287 if (e.count != 0) {
2288 precaptureTrigger =
2289 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
2290 }
2291
2292 if (precaptureTrigger) {
2293 ALOGVV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
2294 } else if (e.count > 0) {
2295 ALOGVV("%s: Pre capture trigger was present? %zu",
2296 __FUNCTION__,
2297 e.count);
2298 }
2299
2300 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2301 // Run precapture sequence
2302 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2303 mAeCounter = 0;
2304 }
2305
2306 if (mFacePriority) {
2307 mAeTargetExposureTime = kFacePriorityExposureTime;
2308 } else {
2309 mAeTargetExposureTime = kNormalExposureTime;
2310 }
2311
2312 if (mAeCounter > kPrecaptureMinFrames &&
2313 (mAeTargetExposureTime - mAeCurrentExposureTime) <
2314 mAeTargetExposureTime / 10) {
2315 // Done with precapture
2316 mAeCounter = 0;
2317 mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
2318 ANDROID_CONTROL_AE_STATE_CONVERGED;
2319 } else {
2320 // Converge some more
2321 mAeCurrentExposureTime +=
2322 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2323 kExposureTrackRate;
2324 mAeCounter++;
2325 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
2326 }
2327
2328 } else if (!aeLocked) {
2329 // Run standard occasional AE scan
2330 switch (mAeState) {
2331 case ANDROID_CONTROL_AE_STATE_CONVERGED:
2332 case ANDROID_CONTROL_AE_STATE_INACTIVE:
2333 mAeCounter++;
2334 if (mAeCounter > kStableAeMaxFrames) {
2335 mAeTargetExposureTime =
2336 mFacePriority ? kFacePriorityExposureTime :
2337 kNormalExposureTime;
2338 float exposureStep = ((double)rand() / RAND_MAX) *
2339 (kExposureWanderMax - kExposureWanderMin) +
2340 kExposureWanderMin;
2341 mAeTargetExposureTime *= std::pow(2, exposureStep);
2342 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2343 }
2344 break;
2345 case ANDROID_CONTROL_AE_STATE_SEARCHING:
2346 mAeCurrentExposureTime +=
2347 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2348 kExposureTrackRate;
2349 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
2350 mAeTargetExposureTime / 10) {
2351 // Close enough
2352 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2353 mAeCounter = 0;
2354 }
2355 break;
2356 case ANDROID_CONTROL_AE_STATE_LOCKED:
2357 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2358 mAeCounter = 0;
2359 break;
2360 default:
2361 ALOGE("%s: Emulator in unexpected AE state %d",
2362 __FUNCTION__, mAeState);
2363 return INVALID_OPERATION;
2364 }
2365 } else {
2366 // AE is locked
2367 mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
2368 }
2369
2370 return OK;
2371}
2372
2373status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
2374 camera_metadata_entry e;
2375
2376 e = settings.find(ANDROID_CONTROL_AF_MODE);
2377 if (e.count == 0) {
2378 ALOGE("%s: No AF mode entry!", __FUNCTION__);
2379 return BAD_VALUE;
2380 }
2381 uint8_t afMode = e.data.u8[0];
2382
2383 e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
2384 typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
2385 af_trigger_t afTrigger;
2386 // If we have an afTrigger, afTriggerId should be set too
2387 if (e.count != 0) {
2388 afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
2389
2390 e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID);
2391
2392 if (e.count == 0) {
2393 ALOGE("%s: When android.control.afTrigger is set "
2394 " in the request, afTriggerId needs to be set as well",
2395 __FUNCTION__);
2396 return BAD_VALUE;
2397 }
2398
2399 mAfTriggerId = e.data.i32[0];
2400
2401 ALOGVV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
2402 ALOGVV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId);
2403 ALOGVV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
2404 } else {
2405 afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
2406 }
2407 if (!mFacingBack) {
2408 afMode = ANDROID_CONTROL_AF_MODE_OFF;
2409 }
2410
2411 switch (afMode) {
2412 case ANDROID_CONTROL_AF_MODE_OFF:
2413 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2414 return OK;
2415 case ANDROID_CONTROL_AF_MODE_AUTO:
2416 case ANDROID_CONTROL_AF_MODE_MACRO:
2417 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2418 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2419 if (!mFacingBack) {
2420 ALOGE("%s: Front camera doesn't support AF mode %d",
2421 __FUNCTION__, afMode);
2422 return BAD_VALUE;
2423 }
2424 mSensor->setAutoFocuas(afMode);
2425 // OK, handle transitions lower on
2426 break;
2427 default:
2428 ALOGE("%s: Emulator doesn't support AF mode %d",
2429 __FUNCTION__, afMode);
2430 return BAD_VALUE;
2431 }
2432#if 0
2433 e = settings.find(ANDROID_CONTROL_AF_REGIONS);
2434 if (e.count == 0) {
2435 ALOGE("%s:Get ANDROID_CONTROL_AF_REGIONS failed\n", __FUNCTION__);
2436 return BAD_VALUE;
2437 }
2438 int32_t x0 = e.data.i32[0];
2439 int32_t y0 = e.data.i32[1];
2440 int32_t x1 = e.data.i32[2];
2441 int32_t y1 = e.data.i32[3];
2442 mSensor->setFocuasArea(x0, y0, x1, y1);
2443 DBG_LOGB(" x0:%d, y0:%d,x1:%d,y1:%d,\n", x0, y0, x1, y1);
2444#endif
2445
2446
2447 bool afModeChanged = mAfMode != afMode;
2448 mAfMode = afMode;
2449
2450 /**
2451 * Simulate AF triggers. Transition at most 1 state per frame.
2452 * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
2453 */
2454
2455 bool afTriggerStart = false;
2456 bool afTriggerCancel = false;
2457 switch (afTrigger) {
2458 case ANDROID_CONTROL_AF_TRIGGER_IDLE:
2459 break;
2460 case ANDROID_CONTROL_AF_TRIGGER_START:
2461 afTriggerStart = true;
2462 break;
2463 case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
2464 afTriggerCancel = true;
2465 // Cancel trigger always transitions into INACTIVE
2466 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2467
2468 ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
2469
2470 // Stay in 'inactive' until at least next frame
2471 return OK;
2472 default:
2473 ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
2474 return BAD_VALUE;
2475 }
2476
2477 // If we get down here, we're either in an autofocus mode
2478 // or in a continuous focus mode (and no other modes)
2479
2480 int oldAfState = mAfState;
2481 switch (mAfState) {
2482 case ANDROID_CONTROL_AF_STATE_INACTIVE:
2483 if (afTriggerStart) {
2484 switch (afMode) {
2485 case ANDROID_CONTROL_AF_MODE_AUTO:
2486 // fall-through
2487 case ANDROID_CONTROL_AF_MODE_MACRO:
2488 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2489 break;
2490 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2491 // fall-through
2492 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2493 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2494 break;
2495 }
2496 } else {
2497 // At least one frame stays in INACTIVE
2498 if (!afModeChanged) {
2499 switch (afMode) {
2500 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2501 // fall-through
2502 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2503 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
2504 break;
2505 }
2506 }
2507 }
2508 break;
2509 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2510 /**
2511 * When the AF trigger is activated, the algorithm should finish
2512 * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
2513 * or AF_NOT_FOCUSED as appropriate
2514 */
2515 if (afTriggerStart) {
2516 // Randomly transition to focused or not focused
2517 if (rand() % 3) {
2518 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2519 } else {
2520 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2521 }
2522 }
2523 /**
2524 * When the AF trigger is not involved, the AF algorithm should
2525 * start in INACTIVE state, and then transition into PASSIVE_SCAN
2526 * and PASSIVE_FOCUSED states
2527 */
2528 else if (!afTriggerCancel) {
2529 // Randomly transition to passive focus
2530 if (rand() % 3 == 0) {
2531 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
2532 }
2533 }
2534
2535 break;
2536 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2537 if (afTriggerStart) {
2538 // Randomly transition to focused or not focused
2539 if (rand() % 3) {
2540 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2541 } else {
2542 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2543 }
2544 }
2545 // TODO: initiate passive scan (PASSIVE_SCAN)
2546 break;
2547 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2548 // Simulate AF sweep completing instantaneously
2549
2550 // Randomly transition to focused or not focused
2551 if (rand() % 3) {
2552 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2553 } else {
2554 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2555 }
2556 break;
2557 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2558 if (afTriggerStart) {
2559 switch (afMode) {
2560 case ANDROID_CONTROL_AF_MODE_AUTO:
2561 // fall-through
2562 case ANDROID_CONTROL_AF_MODE_MACRO:
2563 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2564 break;
2565 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2566 // fall-through
2567 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2568 // continuous autofocus => trigger start has no effect
2569 break;
2570 }
2571 }
2572 break;
2573 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2574 if (afTriggerStart) {
2575 switch (afMode) {
2576 case ANDROID_CONTROL_AF_MODE_AUTO:
2577 // fall-through
2578 case ANDROID_CONTROL_AF_MODE_MACRO:
2579 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2580 break;
2581 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2582 // fall-through
2583 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2584 // continuous autofocus => trigger start has no effect
2585 break;
2586 }
2587 }
2588 break;
2589 default:
2590 ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
2591 }
2592
2593 {
2594 char afStateString[100] = {0,};
2595 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2596 oldAfState,
2597 afStateString,
2598 sizeof(afStateString));
2599
2600 char afNewStateString[100] = {0,};
2601 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2602 mAfState,
2603 afNewStateString,
2604 sizeof(afNewStateString));
2605 ALOGVV("%s: AF state transitioned from %s to %s",
2606 __FUNCTION__, afStateString, afNewStateString);
2607 }
2608
2609
2610 return OK;
2611}
2612
2613status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
2614 camera_metadata_entry e;
2615
2616 e = settings.find(ANDROID_CONTROL_AWB_MODE);
2617 if (e.count == 0) {
2618 ALOGE("%s: No AWB mode entry!", __FUNCTION__);
2619 return BAD_VALUE;
2620 }
2621 uint8_t awbMode = e.data.u8[0];
2622 //DBG_LOGB(" awbMode%d\n", awbMode);
2623
2624 // TODO: Add white balance simulation
2625
2626 switch (awbMode) {
2627 case ANDROID_CONTROL_AWB_MODE_OFF:
2628 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2629 return OK;
2630 case ANDROID_CONTROL_AWB_MODE_AUTO:
2631 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
2632 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
2633 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
2634 case ANDROID_CONTROL_AWB_MODE_SHADE:
2635 mAwbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; //add for cts
2636 if (mSensorType == SENSOR_USB)
2637 return OK;
2638 else
2639 return mSensor->setAWB(awbMode);
2640 break;
2641 default:
2642 ALOGE("%s: Emulator doesn't support AWB mode %d",
2643 __FUNCTION__, awbMode);
2644 return BAD_VALUE;
2645 }
2646
2647 return OK;
2648}
2649
2650
2651void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
2652 if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) {
2653 settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
2654 &mAeCurrentExposureTime, 1);
2655 settings.update(ANDROID_SENSOR_SENSITIVITY,
2656 &mAeCurrentSensitivity, 1);
2657 }
2658
2659 settings.update(ANDROID_CONTROL_AE_STATE,
2660 &mAeState, 1);
2661 settings.update(ANDROID_CONTROL_AF_STATE,
2662 &mAfState, 1);
2663 settings.update(ANDROID_CONTROL_AWB_STATE,
2664 &mAwbState, 1);
2665 /**
2666 * TODO: Trigger IDs need a think-through
2667 */
2668 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,
2669 &mAfTriggerId, 1);
2670}
2671
2672void EmulatedFakeCamera3::signalReadoutIdle() {
2673 Mutex::Autolock l(mLock);
2674 CAMHAL_LOGVB("%s , E" , __FUNCTION__);
2675 // Need to chek isIdle again because waiting on mLock may have allowed
2676 // something to be placed in the in-flight queue.
2677 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
2678 ALOGV("Now idle");
2679 mStatus = STATUS_READY;
2680 }
2681 CAMHAL_LOGVB("%s , X , mStatus = %d " , __FUNCTION__, mStatus);
2682}
2683
2684void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
2685 nsecs_t timestamp) {
2686 switch(e) {
2687 case Sensor::SensorListener::EXPOSURE_START: {
2688 ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
2689 __FUNCTION__, frameNumber, timestamp);
2690 // Trigger shutter notify to framework
2691 camera3_notify_msg_t msg;
2692 msg.type = CAMERA3_MSG_SHUTTER;
2693 msg.message.shutter.frame_number = frameNumber;
2694 msg.message.shutter.timestamp = timestamp;
2695 sendNotify(&msg);
2696 break;
2697 }
2698 case Sensor::SensorListener::ERROR_CAMERA_DEVICE: {
2699 camera3_notify_msg_t msg;
2700 msg.type = CAMERA3_MSG_ERROR;
2701 msg.message.error.frame_number = frameNumber;
2702 msg.message.error.error_stream = NULL;
2703 msg.message.error.error_code = 1;
2704 sendNotify(&msg);
2705 break;
2706 }
2707 default:
2708 ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
2709 e, timestamp);
2710 break;
2711 }
2712}
2713
2714EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) :
2715 mParent(parent), mJpegWaiting(false) {
2716 mExitReadoutThread = false;
2717 mFlushFlag = false;
2718}
2719
2720EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
2721 for (List<Request>::iterator i = mInFlightQueue.begin();
2722 i != mInFlightQueue.end(); i++) {
2723 delete i->buffers;
2724 delete i->sensorBuffers;
2725 }
2726}
2727
2728status_t EmulatedFakeCamera3::ReadoutThread::flushAllRequest(bool flag) {
2729 status_t res;
2730 mFlushFlag = flag;
2731 Mutex::Autolock l(mLock);
2732 CAMHAL_LOGDB("count = %d" , mInFlightQueue.size());
2733 if (mInFlightQueue.size() > 0) {
2734 mParent->mSensor->setFlushFlag(true);
2735 res = mFlush.waitRelative(mLock, kSyncWaitTimeout * 15);
2736 if (res != OK && res != TIMED_OUT) {
2737 ALOGE("%s: Error waiting for mFlush singnal : %d",
2738 __FUNCTION__, res);
2739 return INVALID_OPERATION;
2740 }
2741 DBG_LOGA("finish flush all request");
2742 }
2743 return 0;
2744}
2745
2746void EmulatedFakeCamera3::ReadoutThread::sendFlushSingnal(void) {
2747 Mutex::Autolock l(mLock);
2748 mFlush.signal();
2749}
2750
2751void EmulatedFakeCamera3::ReadoutThread::setFlushFlag(bool flag) {
2752 mFlushFlag = flag;
2753}
2754
2755void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
2756 Mutex::Autolock l(mLock);
2757
2758 mInFlightQueue.push_back(r);
2759 mInFlightSignal.signal();
2760}
2761
2762bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
2763 Mutex::Autolock l(mLock);
2764 return mInFlightQueue.empty() && !mThreadActive;
2765}
2766
2767status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
2768 status_t res;
2769 Mutex::Autolock l(mLock);
2770 CAMHAL_LOGVB("%s , E" , __FUNCTION__);
2771 int loopCount = 0;
2772 while (mInFlightQueue.size() >= kMaxQueueSize) {
2773 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2774 if (res != OK && res != TIMED_OUT) {
2775 ALOGE("%s: Error waiting for in-flight queue to shrink",
2776 __FUNCTION__);
2777 return INVALID_OPERATION;
2778 }
2779 if (loopCount == kMaxWaitLoops) {
2780 ALOGE("%s: Timed out waiting for in-flight queue to shrink",
2781 __FUNCTION__);
2782 return TIMED_OUT;
2783 }
2784 loopCount++;
2785 }
2786 return OK;
2787}
2788
2789status_t EmulatedFakeCamera3::ReadoutThread::setJpegCompressorListener(EmulatedFakeCamera3 *parent) {
2790 status_t res;
2791 res = mParent->mJpegCompressor->setlistener(this);
2792 if (res != NO_ERROR) {
2793 ALOGE("%s: set JpegCompressor Listner failed",__FUNCTION__);
2794 }
2795 return res;
2796}
2797
2798status_t EmulatedFakeCamera3::ReadoutThread::startJpegCompressor(EmulatedFakeCamera3 *parent) {
2799 status_t res;
2800 res = mParent->mJpegCompressor->start();
2801 if (res != NO_ERROR) {
2802 ALOGE("%s: JpegCompressor start failed",__FUNCTION__);
2803 }
2804 return res;
2805}
2806
2807status_t EmulatedFakeCamera3::ReadoutThread::shutdownJpegCompressor(EmulatedFakeCamera3 *parent) {
2808 status_t res;
2809 res = mParent->mJpegCompressor->cancel();
2810 if (res != OK) {
2811 ALOGE("%s: JpegCompressor cancel failed",__FUNCTION__);
2812 }
2813 return res;
2814}
2815
2816void EmulatedFakeCamera3::ReadoutThread::sendExitReadoutThreadSignal(void) {
2817 mExitReadoutThread = true;
2818 mInFlightSignal.signal();
2819}
2820
2821bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
2822 status_t res;
2823 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
2824
2825 // First wait for a request from the in-flight queue
2826 if (mExitReadoutThread) {
2827 return false;
2828 }
2829
2830 {
2831 Mutex::Autolock l(mLock);
2832 if ((mInFlightQueue.size() == 0) && (mFlushFlag) &&
2833 (mCurrentRequest.settings.isEmpty())) {
2834 mFlush.signal();
2835 }
2836 }
2837
2838 if (mCurrentRequest.settings.isEmpty()) {
2839 Mutex::Autolock l(mLock);
2840 if (mInFlightQueue.empty()) {
2841 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2842 if (res == TIMED_OUT) {
2843 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
2844 __FUNCTION__);
2845 return true;
2846 } else if (res != NO_ERROR) {
2847 ALOGE("%s: Error waiting for capture requests: %d",
2848 __FUNCTION__, res);
2849 return false;
2850 }
2851 }
2852
2853 if (mExitReadoutThread) {
2854 return false;
2855 }
2856
2857 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
2858 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
2859 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
2860 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
2861 mCurrentRequest.havethumbnail = mInFlightQueue.begin()->havethumbnail;
2862 mInFlightQueue.erase(mInFlightQueue.begin());
2863 mInFlightSignal.signal();
2864 mThreadActive = true;
2865 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
2866 mCurrentRequest.frameNumber);
2867 }
2868
2869 // Then wait for it to be delivered from the sensor
2870 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
2871 __FUNCTION__);
2872
2873 nsecs_t captureTime;
2874 status_t gotFrame =
2875 mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
2876 if (gotFrame == 0) {
2877 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
2878 __FUNCTION__);
2879 return true;
2880 }
2881
2882 if (gotFrame == -1) {
2883 DBG_LOGA("Sensor thread had exited , here should exit ReadoutThread Loop");
2884 return false;
2885 }
2886
2887 bool workflag =
2888 mParent->mSensor->get_sensor_status();
2889 if (!workflag)
2890 return true;
2891
2892 ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
2893 mCurrentRequest.frameNumber, captureTime);
2894
2895 // Check if we need to JPEG encode a buffer, and send it for async
2896 // compression if so. Otherwise prepare the buffer for return.
2897 bool needJpeg = false;
2898 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
2899 while (buf != mCurrentRequest.buffers->end()) {
2900 bool goodBuffer = true;
2901 if ( buf->stream->format ==
2902 HAL_PIXEL_FORMAT_BLOB) {
2903 Mutex::Autolock jl(mJpegLock);
2904 needJpeg = true;
2905 CaptureRequest currentcapture;
2906 currentcapture.frameNumber = mCurrentRequest.frameNumber;
2907 currentcapture.sensorBuffers = mCurrentRequest.sensorBuffers;
2908 currentcapture.buf = buf;
2909 currentcapture.mNeedThumbnail = mCurrentRequest.havethumbnail;
2910 mParent->mJpegCompressor->queueRequest(currentcapture);
2911 //this sensorBuffers delete in the jpegcompress;
2912 mCurrentRequest.sensorBuffers = NULL;
2913 buf = mCurrentRequest.buffers->erase(buf);
2914 continue;
2915 }
2916 GraphicBufferMapper::get().unlock(*(buf->buffer));
2917
2918 buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
2919 CAMERA3_BUFFER_STATUS_ERROR;
2920 buf->acquire_fence = -1;
2921 buf->release_fence = -1;
2922
2923 ++buf;
2924 } // end while
2925
2926 // Construct result for all completed buffers and results
2927
2928 camera3_capture_result result;
2929
2930 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
2931 &captureTime, 1);
2932
2933 const uint8_t pipelineDepth = needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
2934 mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
2935 &pipelineDepth, 1);
2936
2937 memset(&result, 0, sizeof(result));
2938 result.frame_number = mCurrentRequest.frameNumber;
2939 result.result = mCurrentRequest.settings.getAndLock();
2940 result.num_output_buffers = mCurrentRequest.buffers->size();
2941 result.output_buffers = mCurrentRequest.buffers->array();
2942 result.partial_result = 1;
2943
2944 // Go idle if queue is empty, before sending result
2945
2946 bool signalIdle = false;
2947 {
2948 Mutex::Autolock l(mLock);
2949 if (mInFlightQueue.empty()) {
2950 mThreadActive = false;
2951 signalIdle = true;
2952 }
2953 }
2954
2955 if (signalIdle) mParent->signalReadoutIdle();
2956
2957 // Send it off to the framework
2958 ALOGVV("%s: ReadoutThread: Send result to framework",
2959 __FUNCTION__);
2960 mParent->sendCaptureResult(&result);
2961
2962 // Clean up
2963 mCurrentRequest.settings.unlock(result.result);
2964
2965 delete mCurrentRequest.buffers;
2966 mCurrentRequest.buffers = NULL;
2967 if (!needJpeg) {
2968 delete mCurrentRequest.sensorBuffers;
2969 mCurrentRequest.sensorBuffers = NULL;
2970 }
2971 mCurrentRequest.settings.clear();
2972 CAMHAL_LOGVB("%s , X " , __FUNCTION__);
2973 return true;
2974}
2975
2976void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
2977 const StreamBuffer &jpegBuffer, bool success , CaptureRequest &r) {
2978 Mutex::Autolock jl(mJpegLock);
2979 GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer));
2980
2981 mJpegHalBuffer = *(r.buf);
2982 mJpegHalBuffer.status = success ?
2983 CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2984 mJpegHalBuffer.acquire_fence = -1;
2985 mJpegHalBuffer.release_fence = -1;
2986 mJpegWaiting = false;
2987
2988 camera3_capture_result result;
2989 result.frame_number = r.frameNumber;
2990 result.result = NULL;
2991 result.num_output_buffers = 1;
2992 result.output_buffers = &mJpegHalBuffer;
2993 result.partial_result = 1;
2994
2995 if (!success) {
2996 ALOGE("%s: Compression failure, returning error state buffer to"
2997 " framework", __FUNCTION__);
2998 } else {
2999 DBG_LOGB("%s: Compression complete, returning buffer to framework",
3000 __FUNCTION__);
3001 }
3002
3003 mParent->sendCaptureResult(&result);
3004
3005}
3006
3007void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
3008 const StreamBuffer &inputBuffer) {
3009 // Should never get here, since the input buffer has to be returned
3010 // by end of processCaptureRequest
3011 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
3012}
3013
3014
3015}; // namespace android
3016