summaryrefslogtreecommitdiff
path: root/v3/EmulatedFakeCamera3.cpp (plain)
blob: 04574cba756687d9c25eb31138a80e33147710a4
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/*
18 * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
19 * functionality of an advanced fake camera.
20 */
21
22#include <inttypes.h>
23
24#define LOG_NDEBUG 0
25//#define LOG_NNDEBUG 0
26#define LOG_TAG "EmulatedCamera_FakeCamera3"
27#include <utils/Log.h>
28
29#include "EmulatedFakeCamera3.h"
30#include "EmulatedCameraFactory.h"
31#include <ui/Fence.h>
32#include <ui/Rect.h>
33#include <ui/GraphicBufferMapper.h>
34#include <sys/types.h>
35
36#include <cutils/properties.h>
37#include "fake-pipeline2/Sensor.h"
38#include "fake-pipeline2/JpegCompressor.h"
39#include <cmath>
40#include <gralloc_priv.h>
41#include <binder/IPCThreadState.h>
42
43#if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
44#define ALOGVV ALOGV
45#else
46#define ALOGVV(...) ((void)0)
47#endif
48
49namespace android {
50
51/**
52 * Constants for camera capabilities
53 */
54
55const int64_t USEC = 1000LL;
56const int64_t MSEC = USEC * 1000LL;
57const int64_t SEC = MSEC * 1000LL;
58
59
60const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
61 //HAL_PIXEL_FORMAT_RAW_SENSOR,
62 HAL_PIXEL_FORMAT_BLOB,
63 //HAL_PIXEL_FORMAT_RGBA_8888,
64 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
65 // These are handled by YCbCr_420_888
66 HAL_PIXEL_FORMAT_YV12,
67 HAL_PIXEL_FORMAT_YCrCb_420_SP,
68 //HAL_PIXEL_FORMAT_YCbCr_422_I,
69 HAL_PIXEL_FORMAT_YCbCr_420_888
70};
71
72const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = {
73 640, 480
74 // Sensor::kResolution[0], Sensor::kResolution[1]
75};
76
77const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = {
78 (const uint64_t)Sensor::kFrameDurationRange[0]
79};
80
81const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[6] = {
82 640, 480, 320, 240,// 1280, 720
83 // Sensor::kResolution[0], Sensor::kResolution[1]
84};
85
86const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = {
87 640, 480, 320, 240
88 // Sensor::kResolution[0], Sensor::kResolution[1]
89};
90
91const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = {
92 (const uint64_t)Sensor::kFrameDurationRange[0]
93};
94
95const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = {
96 1280,720
97 // Sensor::kResolution[0], Sensor::kResolution[1]
98};
99
100const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = {
101 640, 480
102 // Sensor::kResolution[0], Sensor::kResolution[1]
103};
104
105
106const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = {
107 (const uint64_t)Sensor::kFrameDurationRange[0]
108};
109
110/**
111 * 3A constants
112 */
113
114// Default exposure and gain targets for different scenarios
115const nsecs_t EmulatedFakeCamera3::kNormalExposureTime = 10 * MSEC;
116const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
117const int EmulatedFakeCamera3::kNormalSensitivity = 100;
118const int EmulatedFakeCamera3::kFacePrioritySensitivity = 400;
119const float EmulatedFakeCamera3::kExposureTrackRate = 0.1;
120const int EmulatedFakeCamera3::kPrecaptureMinFrames = 10;
121const int EmulatedFakeCamera3::kStableAeMaxFrames = 100;
122const float EmulatedFakeCamera3::kExposureWanderMin = -2;
123const float EmulatedFakeCamera3::kExposureWanderMax = 1;
124
125/**
126 * Camera device lifecycle methods
127 */
128static const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(camera3_jpeg_blob);
129jpegsize EmulatedFakeCamera3::getMaxJpegResolution(uint32_t picSizes[],int count) {
130 uint32_t maxJpegWidth = 0, maxJpegHeight = 0;
131 jpegsize maxJpegResolution;
132 for (int i=0; i < count; i+= 4) {
133 uint32_t width = picSizes[i+1];
134 uint32_t height = picSizes[i+2];
135 if (picSizes[i+0] == HAL_PIXEL_FORMAT_BLOB &&
136 (width * height > maxJpegWidth * maxJpegHeight)) {
137 maxJpegWidth = width;
138 maxJpegHeight = height;
139 }
140 }
141 maxJpegResolution.width = maxJpegWidth;
142 maxJpegResolution.height = maxJpegHeight;
143 return maxJpegResolution;
144}
145ssize_t EmulatedFakeCamera3::getJpegBufferSize(int width, int height) {
146 if (maxJpegResolution.width == 0) {
147 return BAD_VALUE;
148 }
149 ssize_t maxJpegBufferSize = JpegCompressor::kMaxJpegSize;
150
151#if PLATFORM_SDK_VERSION <= 22
152 // Calculate final jpeg buffer size for the given resolution.
153 float scaleFactor = ((float) (width * height)) /
154 (maxJpegResolution.width * maxJpegResolution.height);
155 ssize_t jpegBufferSize = scaleFactor * maxJpegBufferSize;
156 // Bound the buffer size to [MIN_JPEG_BUFFER_SIZE, maxJpegBufferSize].
157 if (jpegBufferSize > maxJpegBufferSize) {
158 jpegBufferSize = maxJpegBufferSize;
159 } else if (jpegBufferSize < kMinJpegBufferSize) {
160 jpegBufferSize = kMinJpegBufferSize;
161 }
162#else
163 assert(kMinJpegBufferSize < maxJpegBufferSize);
164 // Calculate final jpeg buffer size for the given resolution.
165 float scaleFactor = ((float) (width * height)) /
166 (maxJpegResolution.width * maxJpegResolution.height);
167 ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) +
168 kMinJpegBufferSize;
169 if (jpegBufferSize > maxJpegBufferSize)
170 jpegBufferSize = maxJpegBufferSize;
171#endif
172
173 return jpegBufferSize;
174}
175
176EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, struct hw_module_t* module) :
177 EmulatedCamera3(cameraId, module) {
178 ALOGI("Constructing emulated fake camera 3 cameraID:%d", mCameraID);
179
180 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
181 mDefaultTemplates[i] = NULL;
182 }
183
184 /**
185 * Front cameras = limited mode
186 * Back cameras = full mode
187 */
188 //TODO limited or full mode, read this from camera driver
189 //mFullMode = facingBack;
190 mCameraStatus = CAMERA_INIT;
191 mSupportCap = 0;
192 mSupportRotate = 0;
193 mFullMode = 0;
194 mFlushTag = false;
195 mPlugged = false;
196
197 gLoadXml.parseXMLFile();
198}
199
200EmulatedFakeCamera3::~EmulatedFakeCamera3() {
201 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
202 if (mDefaultTemplates[i] != NULL) {
203 free_camera_metadata(mDefaultTemplates[i]);
204 }
205 }
206
207 if (mCameraInfo != NULL) {
208 CAMHAL_LOGIA("free mCameraInfo");
209 free_camera_metadata(mCameraInfo);
210 mCameraInfo = NULL;
211 }
212}
213
214status_t EmulatedFakeCamera3::Initialize() {
215 DBG_LOGB("mCameraID=%d,mStatus=%d,ddd\n", mCameraID, mStatus);
216 status_t res;
217
218#ifdef HAVE_VERSION_INFO
219 CAMHAL_LOGIB("\n--------------------------------\n"
220 "author:aml.sh multi-media team\n"
221 "branch name: %s\n"
222 "git version: %s \n"
223 "last changed: %s\n"
224 "build-time: %s\n"
225 "build-name: %s\n"
226 "uncommitted-file-num:%d\n"
227 "ssh user@%s, cd %s\n"
228 "hostname %s\n"
229 "--------------------------------\n",
230 CAMHAL_BRANCH_NAME,
231 CAMHAL_GIT_VERSION,
232 CAMHAL_LAST_CHANGED,
233 CAMHAL_BUILD_TIME,
234 CAMHAL_BUILD_NAME,
235 CAMHAL_GIT_UNCOMMIT_FILE_NUM,
236 CAMHAL_IP, CAMHAL_PATH, CAMHAL_HOSTNAME
237 );
238#endif
239
240
241 if (mStatus != STATUS_ERROR) {
242 ALOGE("%s: Already initialized!", __FUNCTION__);
243 return INVALID_OPERATION;
244 }
245
246 res = constructStaticInfo();
247 if (res != OK) {
248 ALOGE("%s: Unable to allocate static info: %s (%d)",
249 __FUNCTION__, strerror(-res), res);
250 return res;
251 }
252
253 return EmulatedCamera3::Initialize();
254}
255
256status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) {
257 ALOGV("%s: E", __FUNCTION__);
258 DBG_LOGB("%s, ddd", __FUNCTION__);
259 Mutex::Autolock l(mLock);
260 status_t res;
261 DBG_LOGB("%s , mStatus = %d" , __FUNCTION__, mStatus);
262
263 if ((mStatus != STATUS_CLOSED) || !mPlugged) {
264 ALOGE("%s: Can't connect in state %d, mPlugged=%d",
265 __FUNCTION__, mStatus, mPlugged);
266 return INVALID_OPERATION;
267 }
268
269 mSensor = new Sensor();
270 mSensor->setSensorListener(this);
271
272 res = mSensor->startUp(mCameraID);
273 DBG_LOGB("mSensor startUp, mCameraID=%d\n", mCameraID);
274 if (res != NO_ERROR) return res;
275
276 mSupportCap = mSensor->IoctlStateProbe();
277 if (mSupportCap & IOCTL_MASK_ROTATE) {
278 mSupportRotate = true;
279 }
280
281 mReadoutThread = new ReadoutThread(this);
282 mJpegCompressor = new JpegCompressor();
283
284 res = mReadoutThread->setJpegCompressorListener(this);
285 if (res != NO_ERROR) {
286 return res;
287 }
288 res = mReadoutThread->startJpegCompressor(this);
289 if (res != NO_ERROR) {
290 return res;
291 }
292
293 res = mReadoutThread->run("EmuCam3::readoutThread");
294 if (res != NO_ERROR) return res;
295
296 // Initialize fake 3A
297
298 mControlMode = ANDROID_CONTROL_MODE_AUTO;
299 mFacePriority = false;
300 mAeMode = ANDROID_CONTROL_AE_MODE_ON;
301 mAfMode = ANDROID_CONTROL_AF_MODE_AUTO;
302 mAwbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
303 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;//ANDROID_CONTROL_AE_STATE_INACTIVE;
304 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
305 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
306 mAfTriggerId = 0;
307 mAeCurrentExposureTime = kNormalExposureTime;
308 mAeCurrentSensitivity = kNormalSensitivity;
309
310 return EmulatedCamera3::connectCamera(device);
311}
312
313status_t EmulatedFakeCamera3::plugCamera() {
314 {
315 Mutex::Autolock l(mLock);
316
317 if (!mPlugged) {
318 CAMHAL_LOGIB("%s: Plugged back in", __FUNCTION__);
319 mPlugged = true;
320 }
321 }
322
323 return NO_ERROR;
324}
325
326status_t EmulatedFakeCamera3::unplugCamera() {
327 {
328 Mutex::Autolock l(mLock);
329
330 if (mPlugged) {
331 CAMHAL_LOGIB("%s: Unplugged camera", __FUNCTION__);
332 mPlugged = false;
333 }
334 }
335 return true;
336}
337
338camera_device_status_t EmulatedFakeCamera3::getHotplugStatus() {
339 Mutex::Autolock l(mLock);
340 return mPlugged ?
341 CAMERA_DEVICE_STATUS_PRESENT :
342 CAMERA_DEVICE_STATUS_NOT_PRESENT;
343}
344
345bool EmulatedFakeCamera3::getCameraStatus()
346{
347 CAMHAL_LOGVB("%s, mCameraStatus = %d",__FUNCTION__,mCameraStatus);
348 bool ret = false;
349 if (mStatus == STATUS_CLOSED) {
350 ret = true;
351 } else {
352 ret = false;
353 }
354 return ret;
355}
356
357status_t EmulatedFakeCamera3::closeCamera() {
358 DBG_LOGB("%s, %d\n", __FUNCTION__, __LINE__);
359 status_t res;
360 {
361 Mutex::Autolock l(mLock);
362 if (mStatus == STATUS_CLOSED) return OK;
363 }
364
365 CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__);
366 mReadoutThread->sendFlushSingnal();
367 mSensor->sendExitSingalToSensor();
368 res = mSensor->shutDown();
369 if (res != NO_ERROR) {
370 ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
371 return res;
372 }
373 mSensor.clear();
374 CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__);
375
376 {
377 Mutex::Autolock l(mLock);
378 res = mReadoutThread->shutdownJpegCompressor(this);
379 if (res != OK) {
380 ALOGE("%s: Unable to shut down JpegCompressor: %d", __FUNCTION__, res);
381 return res;
382 }
383 mReadoutThread->sendExitReadoutThreadSignal();
384 mReadoutThread->requestExit();
385 }
386 CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__);
387
388 mReadoutThread->join();
389 DBG_LOGA("Sucess exit ReadOutThread");
390 {
391 Mutex::Autolock l(mLock);
392 // Clear out private stream information
393 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
394 PrivateStreamInfo *privStream =
395 static_cast<PrivateStreamInfo*>((*s)->priv);
396 delete privStream;
397 (*s)->priv = NULL;
398 }
399 mStreams.clear();
400 mReadoutThread.clear();
401 }
402 CAMHAL_LOGDB("%s, %d\n", __FUNCTION__, __LINE__);
403 return EmulatedCamera3::closeCamera();
404}
405
406status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
407 char property[PROPERTY_VALUE_MAX];
408 char* tempApkName = gLoadXml.getApkPackageName(IPCThreadState::self()->getCallingPid());
409 List_Or * temp=new List_Or();
410 info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
411 if (mSensorType == SENSOR_USB) {
412 if (mFacingBack) {
413 property_get("hw.camera.orientation.back", property, "0");
414 } else {
415 property_get("hw.camera.orientation.front", property, "0");
416 }
417 int32_t orientation = atoi(property);
418
419 if (gLoadXml.findApkCp(tempApkName, temp)) {
420 orientation = atoi(temp->pro);
421 }
422 if (temp != NULL) {
423 delete temp;
424 temp = NULL;
425 }
426
427 property_get("hw.camera.usb.orientation_offset", property, "0");
428 orientation += atoi(property);
429 orientation %= 360;
430 info->orientation = orientation ;
431 } else {
432 if (mFacingBack) {
433 property_get("hw.camera.orientation.back", property, "270");
434 } else {
435 property_get("hw.camera.orientation.front", property, "90");
436 }
437 info->orientation = atoi(property);
438 }
439 return EmulatedCamera3::getCameraInfo(info);
440}
441
442/**
443 * Camera3 interface methods
444 */
445
446void EmulatedFakeCamera3::getValidJpegSize(uint32_t picSizes[], uint32_t availablejpegsize[], int count) {
447 int i,j,k;
448 bool valid = true;
449 for (i=0,j=0; i < count; i+= 4) {
450 for (k= 0; k<=j ;k+=2) {
451 if ((availablejpegsize[k]*availablejpegsize[k+1]) == (picSizes[i+1]*picSizes[i+2])) {
452
453 valid = false;
454 }
455 }
456 if (valid) {
457 availablejpegsize[j] = picSizes[i+1];
458 availablejpegsize[j+1] = picSizes[i+2];
459 j+=2;
460 }
461 valid = true;
462 }
463}
464
465status_t EmulatedFakeCamera3::checkValidJpegSize(uint32_t width, uint32_t height) {
466
467 int validsizecount = 0;
468 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
469 for (uint32_t f = 0; f < count; f+=2) {
470 if (mAvailableJpegSize[f] != 0) {
471 if ((mAvailableJpegSize[f] == width)&&(mAvailableJpegSize[f+1] == height)) {
472 validsizecount++;
473 }
474 } else {
475 break;
476 }
477 }
478 if (validsizecount == 0)
479 return BAD_VALUE;
480 return OK;
481}
482
483status_t EmulatedFakeCamera3::configureStreams(
484 camera3_stream_configuration *streamList) {
485 Mutex::Autolock l(mLock);
486 uint32_t width, height, pixelfmt;
487 bool isRestart = false;
488 mFlushTag = false;
489 DBG_LOGB("%s: %d streams", __FUNCTION__, streamList->num_streams);
490
491 if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
492 ALOGE("%s: Cannot configure streams in state %d",
493 __FUNCTION__, mStatus);
494 return NO_INIT;
495 }
496
497 /**
498 * Sanity-check input list.
499 */
500 if (streamList == NULL) {
501 ALOGE("%s: NULL stream configuration", __FUNCTION__);
502 return BAD_VALUE;
503 }
504
505 if (streamList->streams == NULL) {
506 ALOGE("%s: NULL stream list", __FUNCTION__);
507 return BAD_VALUE;
508 }
509
510 if (streamList->num_streams < 1) {
511 ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
512 streamList->num_streams);
513 return BAD_VALUE;
514 }
515
516 camera3_stream_t *inputStream = NULL;
517 for (size_t i = 0; i < streamList->num_streams; i++) {
518 camera3_stream_t *newStream = streamList->streams[i];
519
520 if (newStream == NULL) {
521 ALOGE("%s: Stream index %zu was NULL",
522 __FUNCTION__, i);
523 return BAD_VALUE;
524 }
525
526 if (newStream->max_buffers <= 0) {
527 isRestart = true;//mSensor->isNeedRestart(newStream->width, newStream->height, newStream->format);
528 DBG_LOGB("format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d, isRestart=%d\n",
529 newStream->format, newStream->width, newStream->height,
530 newStream->stream_type, newStream->max_buffers,
531 isRestart);
532 }
533 ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
534 __FUNCTION__, newStream, i, newStream->stream_type,
535 newStream->usage,
536 newStream->format);
537
538 if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
539 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
540 if (inputStream != NULL) {
541
542 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
543 return BAD_VALUE;
544 }
545 inputStream = newStream;
546 }
547
548 bool validFormat = false;
549 for (size_t f = 0;
550 f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]);
551 f++) {
552 if (newStream->format == kAvailableFormats[f]) {
553 validFormat = true;
554 //HAL_PIXEL_FORMAT_YCrCb_420_SP,
555 if (HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format)
556 newStream->format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
557
558 break;
559 }
560 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
561 }
562 if (!validFormat) {
563 ALOGE("%s: Unsupported stream format 0x%x requested",
564 __FUNCTION__, newStream->format);
565 return BAD_VALUE;
566 }
567
568 status_t ret = checkValidJpegSize(newStream->width, newStream->height);
569 if (ret != OK) {
570 return BAD_VALUE;
571 }
572
573 }
574 mInputStream = inputStream;
575 width = 0;
576 height = 0;
577 for (size_t i = 0; i < streamList->num_streams; i++) {
578 camera3_stream_t *newStream = streamList->streams[i];
579 DBG_LOGB("find propert width and height, format=%x, w*h=%dx%d, stream_type=%d, max_buffers=%d\n",
580 newStream->format, newStream->width, newStream->height, newStream->stream_type, newStream->max_buffers);
581 if ((HAL_PIXEL_FORMAT_BLOB != newStream->format) &&
582 (CAMERA3_STREAM_OUTPUT == newStream->stream_type)) {
583
584 if (width < newStream->width)
585 width = newStream->width;
586
587 if (height < newStream->height)
588 height = newStream->height;
589
590 pixelfmt = (uint32_t)newStream->format;
591 if (HAL_PIXEL_FORMAT_YCbCr_420_888 == pixelfmt)
592 pixelfmt = HAL_PIXEL_FORMAT_YCrCb_420_SP;
593 }
594
595 }
596
597 //TODO modify this ugly code
598 if (isRestart) {
599 isRestart = mSensor->isNeedRestart(width, height, pixelfmt);
600 }
601
602 if (isRestart) {
603 mSensor->streamOff();
604 pixelfmt = mSensor->halFormatToSensorFormat(pixelfmt);
605 mSensor->setOutputFormat(width, height, pixelfmt, 0);
606 mSensor->streamOn();
607 DBG_LOGB("width=%d, height=%d, pixelfmt=%.4s\n",
608 width, height, (char*)&pixelfmt);
609 }
610
611 /**
612 * Initially mark all existing streams as not alive
613 */
614 for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
615 PrivateStreamInfo *privStream =
616 static_cast<PrivateStreamInfo*>((*s)->priv);
617 privStream->alive = false;
618 }
619
620 /**
621 * Find new streams and mark still-alive ones
622 */
623 for (size_t i = 0; i < streamList->num_streams; i++) {
624 camera3_stream_t *newStream = streamList->streams[i];
625 if (newStream->priv == NULL) {
626 // New stream, construct info
627 PrivateStreamInfo *privStream = new PrivateStreamInfo();
628 privStream->alive = true;
629 privStream->registered = false;
630
631 DBG_LOGB("stream_type=%d\n", newStream->stream_type);
632 newStream->max_buffers = kMaxBufferCount;
633 newStream->priv = privStream;
634 mStreams.push_back(newStream);
635 } else {
636 // Existing stream, mark as still alive.
637 PrivateStreamInfo *privStream =
638 static_cast<PrivateStreamInfo*>(newStream->priv);
639 CAMHAL_LOGDA("Existing stream ?");
640 privStream->alive = true;
641 }
642 // Always update usage and max buffers
643 /*for cts CameraDeviceTest -> testPrepare*/
644 newStream->max_buffers = kMaxBufferCount;
645 newStream->usage = mSensor->getStreamUsage(newStream->stream_type);
646 DBG_LOGB("%d, newStream=%p, stream_type=%d, usage=%x, priv=%p, w*h=%dx%d\n",
647 i, newStream, newStream->stream_type, newStream->usage, newStream->priv, newStream->width, newStream->height);
648 }
649
650 /**
651 * Reap the dead streams
652 */
653 for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
654 PrivateStreamInfo *privStream =
655 static_cast<PrivateStreamInfo*>((*s)->priv);
656 if (!privStream->alive) {
657 DBG_LOGA("delete not alive streams");
658 (*s)->priv = NULL;
659 delete privStream;
660 s = mStreams.erase(s);
661 } else {
662 ++s;
663 }
664 }
665
666 /**
667 * Can't reuse settings across configure call
668 */
669 mPrevSettings.clear();
670
671 return OK;
672}
673
674status_t EmulatedFakeCamera3::registerStreamBuffers(
675 const camera3_stream_buffer_set *bufferSet) {
676 DBG_LOGB("%s: E", __FUNCTION__);
677 Mutex::Autolock l(mLock);
678
679 /**
680 * Sanity checks
681 */
682 DBG_LOGA("==========sanity checks\n");
683
684 // OK: register streams at any time during configure
685 // (but only once per stream)
686 if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) {
687 ALOGE("%s: Cannot register buffers in state %d",
688 __FUNCTION__, mStatus);
689 return NO_INIT;
690 }
691
692 if (bufferSet == NULL) {
693 ALOGE("%s: NULL buffer set!", __FUNCTION__);
694 return BAD_VALUE;
695 }
696
697 StreamIterator s = mStreams.begin();
698 for (; s != mStreams.end(); ++s) {
699 if (bufferSet->stream == *s) break;
700 }
701 if (s == mStreams.end()) {
702 ALOGE("%s: Trying to register buffers for a non-configured stream!",
703 __FUNCTION__);
704 return BAD_VALUE;
705 }
706
707 /**
708 * Register the buffers. This doesn't mean anything to the emulator besides
709 * marking them off as registered.
710 */
711
712 PrivateStreamInfo *privStream =
713 static_cast<PrivateStreamInfo*>((*s)->priv);
714
715#if 0
716 if (privStream->registered) {
717 ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__);
718 return BAD_VALUE;
719 }
720#endif
721
722 privStream->registered = true;
723
724 return OK;
725}
726
727const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings(
728 int type) {
729 DBG_LOGB("%s: E", __FUNCTION__);
730 Mutex::Autolock l(mLock);
731
732 if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
733 ALOGE("%s: Unknown request settings template: %d",
734 __FUNCTION__, type);
735 return NULL;
736 }
737
738 /**
739 * Cache is not just an optimization - pointer returned has to live at
740 * least as long as the camera device instance does.
741 */
742 if (mDefaultTemplates[type] != NULL) {
743 return mDefaultTemplates[type];
744 }
745
746 CameraMetadata settings;
747
748 /** android.request */
749 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
750 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
751
752 static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
753 settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
754
755 static const int32_t id = 0;
756 settings.update(ANDROID_REQUEST_ID, &id, 1);
757
758 static const int32_t frameCount = 0;
759 settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
760
761 /** android.lens */
762
763 static const float focusDistance = 0;
764 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
765
766 static const float aperture = 2.8f;
767 settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
768
769// static const float focalLength = 5.0f;
770 static const float focalLength = 3.299999952316284f;
771 settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
772
773 static const float filterDensity = 0;
774 settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
775
776 static const uint8_t opticalStabilizationMode =
777 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
778 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
779 &opticalStabilizationMode, 1);
780
781 // FOCUS_RANGE set only in frame
782
783 /** android.sensor */
784
785 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
786 settings.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
787 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
788 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
789 static const int64_t exposureTime = 10 * MSEC;
790 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
791
792 int64_t frameDuration = mSensor->getMinFrameDuration();
793 settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
794
795 static const int32_t sensitivity = 100;
796 settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
797
798 static const int64_t rollingShutterSkew = 0;
799 settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
800 // TIMESTAMP set only in frame
801
802 /** android.flash */
803
804 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
805 settings.update(ANDROID_FLASH_STATE, &flashstate, 1);
806
807 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
808 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
809
810 static const uint8_t flashPower = 10;
811 settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
812
813 static const int64_t firingTime = 0;
814 settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
815
816 /** Processing block modes */
817 uint8_t hotPixelMode = 0;
818 uint8_t demosaicMode = 0;
819 uint8_t noiseMode = 0;
820 uint8_t shadingMode = 0;
821 uint8_t colorMode = 0;
822 uint8_t tonemapMode = 0;
823 uint8_t edgeMode = 0;
824 switch (type) {
825
826 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
827 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
828 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
829 // fall-through
830 case CAMERA3_TEMPLATE_STILL_CAPTURE:
831 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
832 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
833 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
834 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
835 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
836 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
837 break;
838 case CAMERA3_TEMPLATE_PREVIEW:
839 // fall-through
840 case CAMERA3_TEMPLATE_VIDEO_RECORD:
841 // fall-through
842 case CAMERA3_TEMPLATE_MANUAL:
843 // fall-through
844 default:
845 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
846 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
847 noiseMode = ANDROID_NOISE_REDUCTION_MODE_OFF;
848 shadingMode = ANDROID_SHADING_MODE_FAST;
849 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
850 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
851 edgeMode = ANDROID_EDGE_MODE_FAST;
852 break;
853 }
854 settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
855 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
856 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
857 settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
858 settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
859 settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
860 settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
861
862 /** android.noise */
863 static const uint8_t noiseStrength = 5;
864 settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
865 static uint8_t availableNBModes[] = {
866 ANDROID_NOISE_REDUCTION_MODE_OFF,
867 ANDROID_NOISE_REDUCTION_MODE_FAST,
868 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
869 };
870 settings.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
871 availableNBModes, sizeof(availableNBModes)/sizeof(availableNBModes));
872
873
874 /** android.color */
875#if PLATFORM_SDK_VERSION >= 23
876 static const camera_metadata_rational colorTransform[9] = {
877 {1, 1}, {0, 1}, {0, 1},
878 {0, 1}, {1, 1}, {0, 1},
879 {0, 1}, {0, 1}, {1, 1}
880 };
881 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
882#else
883 static const float colorTransform[9] = {
884 1.0f, 0.f, 0.f,
885 0.f, 1.f, 0.f,
886 0.f, 0.f, 1.f
887 };
888 settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
889#endif
890 /** android.tonemap */
891 static const float tonemapCurve[4] = {
892 0.f, 0.f,
893 1.f, 1.f
894 };
895 settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
896 settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
897 settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
898
899 /** android.edge */
900 static const uint8_t edgeStrength = 5;
901 settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
902
903 /** android.scaler */
904 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
905 settings.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
906
907 static const int32_t cropRegion[] = {
908 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
909 };
910 settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
911
912 /** android.jpeg */
913 static const uint8_t jpegQuality = 80;
914 settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
915
916 static const int32_t thumbnailSize[2] = {
917 320, 240
918 };
919 settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
920
921 static const uint8_t thumbnailQuality = 80;
922 settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
923
924 static const double gpsCoordinates[3] = {
925 0, 0, 0
926 };
927 settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3); //default 2 value
928
929 static const uint8_t gpsProcessingMethod[32] = "None";
930 settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
931
932 static const int64_t gpsTimestamp = 0;
933 settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
934
935 static const int32_t jpegOrientation = 0;
936 settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
937
938 /** android.stats */
939
940 static const uint8_t faceDetectMode =
941 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
942 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
943
944 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
945 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
946
947 static const uint8_t sharpnessMapMode =
948 ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
949 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
950
951 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
952 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
953 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
954 settings.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
955 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
956 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
957 // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
958 // sharpnessMap only in frames
959
960 /** android.control */
961
962 uint8_t controlIntent = 0;
963 uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; //default value
964 uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
965 uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
966 switch (type) {
967 case CAMERA3_TEMPLATE_PREVIEW:
968 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
969 break;
970 case CAMERA3_TEMPLATE_STILL_CAPTURE:
971 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
972 break;
973 case CAMERA3_TEMPLATE_VIDEO_RECORD:
974 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
975 break;
976 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
977 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
978 break;
979 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
980 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
981 break;
982 case CAMERA3_TEMPLATE_MANUAL:
983 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
984 controlMode = ANDROID_CONTROL_MODE_OFF;
985 aeMode = ANDROID_CONTROL_AE_MODE_OFF;
986 awbMode = ANDROID_CONTROL_AWB_MODE_OFF;
987 break;
988 default:
989 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
990 break;
991 }
992 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
993 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
994
995 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
996 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
997
998 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
999 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
1000
1001 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
1002
1003 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
1004 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
1005
1006 static const uint8_t aePrecaptureTrigger =
1007 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
1008 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
1009
1010 static const int32_t mAfTriggerId = 0;
1011 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,&mAfTriggerId, 1);
1012 static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
1013 settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
1014
1015 static const int32_t controlRegions[5] = {
1016 0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
1017 1000
1018 };
1019// settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
1020
1021 static const int32_t aeExpCompensation = 0;
1022 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
1023
1024 static const int32_t aeTargetFpsRange[2] = {
1025 30, 30
1026 };
1027 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
1028
1029 static const uint8_t aeAntibandingMode =
1030 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
1031 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
1032
1033 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
1034
1035 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
1036 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
1037
1038// settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
1039
1040 uint8_t afMode = 0;
1041 switch (type) {
1042 case CAMERA3_TEMPLATE_PREVIEW:
1043 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1044 break;
1045 case CAMERA3_TEMPLATE_STILL_CAPTURE:
1046 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1047 break;
1048 case CAMERA3_TEMPLATE_VIDEO_RECORD:
1049 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1050 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
1051 break;
1052 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
1053 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1054 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
1055 break;
1056 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
1057 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1058 //afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
1059 break;
1060 case CAMERA3_TEMPLATE_MANUAL:
1061 afMode = ANDROID_CONTROL_AF_MODE_OFF;
1062 break;
1063 default:
1064 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
1065 break;
1066 }
1067 settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
1068
1069 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
1070 settings.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
1071
1072// settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
1073
1074 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
1075 settings.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
1076 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1077 settings.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
1078 static const uint8_t vstabMode =
1079 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
1080 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
1081
1082 // aeState, awbState, afState only in frame
1083
1084 uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
1085 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
1086 &aberrationMode, 1);
1087
1088 mDefaultTemplates[type] = settings.release();
1089
1090 return mDefaultTemplates[type];
1091}
1092
1093status_t EmulatedFakeCamera3::processCaptureRequest(
1094 camera3_capture_request *request) {
1095 status_t res;
1096 nsecs_t exposureTime;
1097 //nsecs_t frameDuration;
1098 uint32_t sensitivity;
1099 uint32_t frameNumber;
1100 bool mHaveThumbnail = false;
1101 CameraMetadata settings;
1102 Buffers *sensorBuffers = NULL;
1103 HalBufferVector *buffers = NULL;
1104
1105 if (mFlushTag) {
1106 DBG_LOGA("already flush, but still send Capture Request .\n");
1107 }
1108
1109 {
1110 Mutex::Autolock l(mLock);
1111
1112 /** Validation */
1113
1114 if (mStatus < STATUS_READY) {
1115 ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
1116 mStatus);
1117 return INVALID_OPERATION;
1118 }
1119
1120 if (request == NULL) {
1121 ALOGE("%s: NULL request!", __FUNCTION__);
1122 return BAD_VALUE;
1123 }
1124
1125 frameNumber = request->frame_number;
1126
1127 if (request->settings == NULL && mPrevSettings.isEmpty()) {
1128 ALOGE("%s: Request %d: NULL settings for first request after"
1129 "configureStreams()", __FUNCTION__, frameNumber);
1130 return BAD_VALUE;
1131 }
1132
1133 if (request->input_buffer != NULL &&
1134 request->input_buffer->stream != mInputStream) {
1135 DBG_LOGB("%s: Request %d: Input buffer not from input stream!",
1136 __FUNCTION__, frameNumber);
1137 DBG_LOGB("%s: Bad stream %p, expected: %p",
1138 __FUNCTION__, request->input_buffer->stream,
1139 mInputStream);
1140 DBG_LOGB("%s: Bad stream type %d, expected stream type %d",
1141 __FUNCTION__, request->input_buffer->stream->stream_type,
1142 mInputStream ? mInputStream->stream_type : -1);
1143
1144 return BAD_VALUE;
1145 }
1146
1147 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
1148 ALOGE("%s: Request %d: No output buffers provided!",
1149 __FUNCTION__, frameNumber);
1150 return BAD_VALUE;
1151 }
1152
1153 // Validate all buffers, starting with input buffer if it's given
1154
1155 ssize_t idx;
1156 const camera3_stream_buffer_t *b;
1157 if (request->input_buffer != NULL) {
1158 idx = -1;
1159 b = request->input_buffer;
1160 } else {
1161 idx = 0;
1162 b = request->output_buffers;
1163 }
1164 do {
1165 PrivateStreamInfo *priv =
1166 static_cast<PrivateStreamInfo*>(b->stream->priv);
1167 if (priv == NULL) {
1168 ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
1169 __FUNCTION__, frameNumber, idx);
1170 return BAD_VALUE;
1171 }
1172#if 0
1173 if (!priv->alive || !priv->registered) {
1174 ALOGE("%s: Request %d: Buffer %zu: Unregistered or dead stream! alive=%d, registered=%d\n",
1175 __FUNCTION__, frameNumber, idx,
1176 priv->alive, priv->registered);
1177 //return BAD_VALUE;
1178 }
1179#endif
1180 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
1181 ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
1182 __FUNCTION__, frameNumber, idx);
1183 return BAD_VALUE;
1184 }
1185 if (b->release_fence != -1) {
1186 ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
1187 __FUNCTION__, frameNumber, idx);
1188 return BAD_VALUE;
1189 }
1190 if (b->buffer == NULL) {
1191 ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
1192 __FUNCTION__, frameNumber, idx);
1193 return BAD_VALUE;
1194 }
1195 idx++;
1196 b = &(request->output_buffers[idx]);
1197 } while (idx < (ssize_t)request->num_output_buffers);
1198
1199 // TODO: Validate settings parameters
1200
1201 /**
1202 * Start processing this request
1203 */
1204 mStatus = STATUS_ACTIVE;
1205
1206 camera_metadata_entry e;
1207
1208 if (request->settings == NULL) {
1209 settings.acquire(mPrevSettings);
1210 } else {
1211 settings = request->settings;
1212
1213 uint8_t antiBanding = 0;
1214 uint8_t effectMode = 0;
1215 int exposureCmp = 0;
1216 int32_t previewFpsRange[2];
1217
1218 e = settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE);
1219 if (e.count == 0) {
1220 ALOGE("%s: get ANDROID_CONTROL_AE_TARGET_FPS_RANGE failed!", __FUNCTION__);
1221 return BAD_VALUE;
1222 } else {
1223 previewFpsRange[0] = e.data.i32[0];
1224 previewFpsRange[1] = e.data.i32[1];
1225 mFrameDuration = 1000000000 / previewFpsRange[1];
1226 ALOGI("set ANDROID_CONTROL_AE_TARGET_FPS_RANGE :%d,%d", previewFpsRange[0], previewFpsRange[1]);
1227 }
1228
1229 e = settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE);
1230 if (e.count == 0) {
1231 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1232 return BAD_VALUE;
1233 }
1234 antiBanding = e.data.u8[0];
1235 mSensor->setAntiBanding(antiBanding);
1236
1237 e = settings.find(ANDROID_CONTROL_EFFECT_MODE);
1238 if (e.count == 0) {
1239 ALOGE("%s: No antibanding entry!", __FUNCTION__);
1240 return BAD_VALUE;
1241 }
1242 effectMode = e.data.u8[0];
1243 mSensor->setEffect(effectMode);
1244
1245 e = settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION);
1246 if (e.count == 0) {
1247 ALOGE("%s: No exposure entry!", __FUNCTION__);
1248 //return BAD_VALUE;
1249 } else {
1250 exposureCmp = e.data.i32[0];
1251 DBG_LOGB("set expsore compensaton %d\n", exposureCmp);
1252 mSensor->setExposure(exposureCmp);
1253 }
1254
1255 int32_t cropRegion[4];
1256 int32_t cropWidth;
1257 int32_t outputWidth = request->output_buffers[0].stream->width;
1258
1259 e = settings.find(ANDROID_SCALER_CROP_REGION);
1260 if (e.count == 0) {
1261 ALOGE("%s: No corp region entry!", __FUNCTION__);
1262 //return BAD_VALUE;
1263 } else {
1264 cropRegion[0] = e.data.i32[0];
1265 cropRegion[1] = e.data.i32[1];
1266 cropWidth = cropRegion[2] = e.data.i32[2];
1267 cropRegion[3] = e.data.i32[3];
1268 for (int i = mZoomMin; i <= mZoomMax; i += mZoomStep) {
1269 //if ( (float) i / mZoomMin >= (float) outputWidth / cropWidth) {
1270 if ( i * cropWidth >= outputWidth * mZoomMin ) {
1271 mSensor->setZoom(i);
1272 break;
1273 }
1274 }
1275 DBG_LOGB("cropRegion:%d, %d, %d, %d\n", cropRegion[0], cropRegion[1],cropRegion[2],cropRegion[3]);
1276 }
1277 }
1278
1279 res = process3A(settings);
1280 if (res != OK) {
1281 ALOGVV("%s: process3A failed!", __FUNCTION__);
1282 //return res;
1283 }
1284
1285 // TODO: Handle reprocessing
1286
1287 /**
1288 * Get ready for sensor config
1289 */
1290
1291 bool needJpeg = false;
1292 ssize_t jpegbuffersize;
1293 uint32_t jpegpixelfmt;
1294
1295 exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
1296 //frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
1297 sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
1298
1299 sensorBuffers = new Buffers();
1300 buffers = new HalBufferVector();
1301
1302 sensorBuffers->setCapacity(request->num_output_buffers);
1303 buffers->setCapacity(request->num_output_buffers);
1304
1305 // Process all the buffers we got for output, constructing internal buffer
1306 // structures for them, and lock them for writing.
1307 for (size_t i = 0; i < request->num_output_buffers; i++) {
1308 const camera3_stream_buffer &srcBuf = request->output_buffers[i];
1309 const private_handle_t *privBuffer =
1310 (const private_handle_t*)(*srcBuf.buffer);
1311 StreamBuffer destBuf;
1312 destBuf.streamId = kGenericStreamId;
1313 destBuf.width = srcBuf.stream->width;
1314 destBuf.height = srcBuf.stream->height;
1315 destBuf.format = privBuffer->format; // Use real private format
1316 destBuf.stride = srcBuf.stream->width; // TODO: query from gralloc
1317 destBuf.buffer = srcBuf.buffer;
1318 destBuf.share_fd = privBuffer->share_fd;
1319
1320 if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
1321 needJpeg = true;
1322 memset(&info,0,sizeof(struct ExifInfo));
1323 info.orientation = settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
1324 jpegpixelfmt = mSensor->getOutputFormat();
1325 if (!mSupportRotate) {
1326 info.mainwidth = srcBuf.stream->width;
1327 info.mainheight = srcBuf.stream->height;
1328 } else {
1329 if ((info.orientation == 90) || (info.orientation == 270)) {
1330 info.mainwidth = srcBuf.stream->height;
1331 info.mainheight = srcBuf.stream->width;
1332 } else {
1333 info.mainwidth = srcBuf.stream->width;
1334 info.mainheight = srcBuf.stream->height;
1335 }
1336 }
1337 if ((jpegpixelfmt == V4L2_PIX_FMT_MJPEG) || (jpegpixelfmt == V4L2_PIX_FMT_YUYV)) {
1338 mSensor->setOutputFormat(info.mainwidth,info.mainheight,jpegpixelfmt,1);
1339 } else {
1340 mSensor->setOutputFormat(info.mainwidth,info.mainheight,V4L2_PIX_FMT_RGB24,1);
1341 }
1342 }
1343
1344 // Wait on fence
1345 sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
1346 res = bufferAcquireFence->wait(kFenceTimeoutMs);
1347 if (res == TIMED_OUT) {
1348 ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
1349 __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
1350 }
1351 if (res == OK) {
1352 // Lock buffer for writing
1353 const Rect rect(destBuf.width, destBuf.height);
1354 if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
1355 if (privBuffer->format == HAL_PIXEL_FORMAT_YCbCr_420_888/*HAL_PIXEL_FORMAT_YCrCb_420_SP*/) {
1356 android_ycbcr ycbcr = android_ycbcr();
1357 res = GraphicBufferMapper::get().lockYCbCr(
1358 *(destBuf.buffer),
1359 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK,
1360 rect,
1361 &ycbcr);
1362 // This is only valid because we know that emulator's
1363 // YCbCr_420_888 is really contiguous NV21 under the hood
1364 destBuf.img = static_cast<uint8_t*>(ycbcr.y);
1365 } else {
1366 ALOGE("Unexpected private format for flexible YUV: 0x%x",
1367 privBuffer->format);
1368 res = INVALID_OPERATION;
1369 }
1370 } else {
1371 res = GraphicBufferMapper::get().lock(*(destBuf.buffer),
1372 GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK,
1373 rect,
1374 (void**)&(destBuf.img));
1375 }
1376 if (res != OK) {
1377 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
1378 __FUNCTION__, frameNumber, i);
1379 }
1380 }
1381
1382 if (res != OK) {
1383 // Either waiting or locking failed. Unlock locked buffers and bail
1384 // out.
1385 for (size_t j = 0; j < i; j++) {
1386 GraphicBufferMapper::get().unlock(
1387 *(request->output_buffers[i].buffer));
1388 }
1389 ALOGE("line:%d, format for this usage: %d x %d, usage %x, format=%x, returned\n",
1390 __LINE__, destBuf.width, destBuf.height, privBuffer->usage, privBuffer->format);
1391 return NO_INIT;
1392 }
1393 sensorBuffers->push_back(destBuf);
1394 buffers->push_back(srcBuf);
1395 }
1396
1397 if (needJpeg) {
1398 if (!mSupportRotate) {
1399 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1400 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1401 } else {
1402 if ((info.orientation == 90) || (info.orientation == 270)) {
1403 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1404 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1405 } else {
1406 info.thumbwidth = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
1407 info.thumbheight = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
1408 }
1409 }
1410 if (settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
1411 info.latitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[0];
1412 info.longitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[1];
1413 info.altitude = settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[2];
1414 info.has_latitude = true;
1415 info.has_longitude = true;
1416 info.has_altitude = true;
1417 } else {
1418 info.has_latitude = false;
1419 info.has_longitude = false;
1420 info.has_altitude = false;
1421 }
1422 if (settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
1423 uint8_t * gpsString = settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
1424 memcpy(info.gpsProcessingMethod, gpsString , sizeof(info.gpsProcessingMethod)-1);
1425 info.has_gpsProcessingMethod = true;
1426 } else {
1427 info.has_gpsProcessingMethod = false;
1428 }
1429 if (settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
1430 info.gpsTimestamp = settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
1431 info.has_gpsTimestamp = true;
1432 } else {
1433 info.has_gpsTimestamp = false;
1434 }
1435 if (settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
1436 info.focallen = settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
1437 info.has_focallen = true;
1438 } else {
1439 info.has_focallen = false;
1440 }
1441 jpegbuffersize = getJpegBufferSize(info.mainwidth,info.mainheight);
1442
1443 mJpegCompressor->SetMaxJpegBufferSize(jpegbuffersize);
1444 mJpegCompressor->SetExifInfo(info);
1445 mSensor->setPictureRotate(info.orientation);
1446 if ((info.thumbwidth > 0) && (info.thumbheight > 0)) {
1447 mHaveThumbnail = true;
1448 }
1449 DBG_LOGB("%s::thumbnailSize_width=%d,thumbnailSize_height=%d,mainsize_width=%d,mainsize_height=%d,jpegOrientation=%d",__FUNCTION__,
1450 info.thumbwidth,info.thumbheight,info.mainwidth,info.mainheight,info.orientation);
1451 }
1452 /**
1453 * Wait for JPEG compressor to not be busy, if needed
1454 */
1455#if 0
1456 if (needJpeg) {
1457 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1458 if (!ready) {
1459 ALOGE("%s: Timeout waiting for JPEG compression to complete!",
1460 __FUNCTION__);
1461 return NO_INIT;
1462 }
1463 }
1464#else
1465 while (needJpeg) {
1466 bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
1467 if (ready) {
1468 break;
1469 }
1470 }
1471#endif
1472 }
1473 /**
1474 * Wait until the in-flight queue has room
1475 */
1476 res = mReadoutThread->waitForReadout();
1477 if (res != OK) {
1478 ALOGE("%s: Timeout waiting for previous requests to complete!",
1479 __FUNCTION__);
1480 return NO_INIT;
1481 }
1482
1483 /**
1484 * Wait until sensor's ready. This waits for lengthy amounts of time with
1485 * mLock held, but the interface spec is that no other calls may by done to
1486 * the HAL by the framework while process_capture_request is happening.
1487 */
1488 {
1489 Mutex::Autolock l(mLock);
1490 int syncTimeoutCount = 0;
1491 while (!mSensor->waitForVSync(kSyncWaitTimeout)) {
1492 if (mStatus == STATUS_ERROR) {
1493 return NO_INIT;
1494 }
1495 if (syncTimeoutCount == kMaxSyncTimeoutCount) {
1496 ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
1497 __FUNCTION__, frameNumber,
1498 kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
1499 return NO_INIT;
1500 }
1501 syncTimeoutCount++;
1502 }
1503
1504 /**
1505 * Configure sensor and queue up the request to the readout thread
1506 */
1507 mSensor->setExposureTime(exposureTime);
1508 //mSensor->setFrameDuration(frameDuration);
1509 mSensor->setFrameDuration(mFrameDuration);
1510 mSensor->setSensitivity(sensitivity);
1511 mSensor->setDestinationBuffers(sensorBuffers);
1512 mSensor->setFrameNumber(request->frame_number);
1513
1514 ReadoutThread::Request r;
1515 r.frameNumber = request->frame_number;
1516 r.settings = settings;
1517 r.sensorBuffers = sensorBuffers;
1518 r.buffers = buffers;
1519 r.havethumbnail = mHaveThumbnail;
1520
1521 mReadoutThread->queueCaptureRequest(r);
1522 ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
1523
1524 // Cache the settings for next time
1525 mPrevSettings.acquire(settings);
1526 }
1527 CAMHAL_LOGVB("%s , X" , __FUNCTION__);
1528 return OK;
1529}
1530
1531/** Debug methods */
1532
1533void EmulatedFakeCamera3::dump(int fd) {
1534
1535 String8 result;
1536 uint32_t count = sizeof(mAvailableJpegSize)/sizeof(mAvailableJpegSize[0]);
1537 result = String8::format("%s, valid resolution\n", __FILE__);
1538
1539 for (uint32_t f = 0; f < count; f+=2) {
1540 if (mAvailableJpegSize[f] == 0)
1541 break;
1542 result.appendFormat("width: %d , height =%d\n",
1543 mAvailableJpegSize[f], mAvailableJpegSize[f+1]);
1544 }
1545 result.appendFormat("\nmZoomMin: %d , mZoomMax =%d, mZoomStep=%d\n",
1546 mZoomMin, mZoomMax, mZoomStep);
1547
1548 if (mZoomStep <= 0) {
1549 result.appendFormat("!!!!!!!!!camera apk may have no picture out\n");
1550 }
1551
1552 write(fd, result.string(), result.size());
1553
1554 if (mSensor.get() != NULL) {
1555 mSensor->dump(fd);
1556 }
1557
1558}
1559//flush all request
1560//TODO returned buffers every request held immediately with
1561//CAMERA3_BUFFER_STATUS_ERROR flag.
1562int EmulatedFakeCamera3::flush_all_requests() {
1563 DBG_LOGA("flush all request");
1564 mFlushTag = true;
1565 mReadoutThread->flushAllRequest(true);
1566 mReadoutThread->setFlushFlag(false);
1567 mSensor->setFlushFlag(false);
1568 return 0;
1569}
1570/** Tag query methods */
1571const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) {
1572 return NULL;
1573}
1574
1575const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) {
1576 return NULL;
1577}
1578
1579int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) {
1580 return 0;
1581}
1582
1583/**
1584 * Private methods
1585 */
1586
1587camera_metadata_ro_entry_t EmulatedFakeCamera3::staticInfo(const CameraMetadata *info, uint32_t tag,
1588 size_t minCount, size_t maxCount, bool required) const {
1589
1590 camera_metadata_ro_entry_t entry = info->find(tag);
1591
1592 if (CC_UNLIKELY( entry.count == 0 ) && required) {
1593 const char* tagSection = get_camera_metadata_section_name(tag);
1594 if (tagSection == NULL) tagSection = "<unknown>";
1595 const char* tagName = get_camera_metadata_tag_name(tag);
1596 if (tagName == NULL) tagName = "<unknown>";
1597
1598 ALOGE("Error finding static metadata entry '%s.%s' (%x)",
1599 tagSection, tagName, tag);
1600 } else if (CC_UNLIKELY(
1601 (minCount != 0 && entry.count < minCount) ||
1602 (maxCount != 0 && entry.count > maxCount) ) ) {
1603 const char* tagSection = get_camera_metadata_section_name(tag);
1604 if (tagSection == NULL) tagSection = "<unknown>";
1605 const char* tagName = get_camera_metadata_tag_name(tag);
1606 if (tagName == NULL) tagName = "<unknown>";
1607 ALOGE("Malformed static metadata entry '%s.%s' (%x):"
1608 "Expected between %zu and %zu values, but got %zu values",
1609 tagSection, tagName, tag, minCount, maxCount, entry.count);
1610 }
1611
1612 return entry;
1613}
1614
1615//this is only for debug
1616void EmulatedFakeCamera3::getStreamConfigurationp(CameraMetadata *info) {
1617 const int STREAM_CONFIGURATION_SIZE = 4;
1618 const int STREAM_FORMAT_OFFSET = 0;
1619 const int STREAM_WIDTH_OFFSET = 1;
1620 const int STREAM_HEIGHT_OFFSET = 2;
1621 const int STREAM_IS_INPUT_OFFSET = 3;
1622
1623 camera_metadata_ro_entry_t availableStreamConfigs =
1624 staticInfo(info, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
1625 CAMHAL_LOGDB(" stream, availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1626
1627 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1628 int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
1629 int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
1630 int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
1631 int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
1632 CAMHAL_LOGDB("f=%x, w*h=%dx%d, du=%d\n", format, width, height, isInput);
1633 }
1634
1635}
1636
1637//this is only for debug
1638void EmulatedFakeCamera3::getStreamConfigurationDurations(CameraMetadata *info) {
1639 const int STREAM_CONFIGURATION_SIZE = 4;
1640 const int STREAM_FORMAT_OFFSET = 0;
1641 const int STREAM_WIDTH_OFFSET = 1;
1642 const int STREAM_HEIGHT_OFFSET = 2;
1643 const int STREAM_IS_INPUT_OFFSET = 3;
1644
1645 camera_metadata_ro_entry_t availableStreamConfigs =
1646 staticInfo(info, ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
1647 CAMHAL_LOGDB("availableStreamConfigs.count=%d\n", availableStreamConfigs.count);
1648
1649 for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
1650 int64_t format = availableStreamConfigs.data.i64[i + STREAM_FORMAT_OFFSET];
1651 int64_t width = availableStreamConfigs.data.i64[i + STREAM_WIDTH_OFFSET];
1652 int64_t height = availableStreamConfigs.data.i64[i + STREAM_HEIGHT_OFFSET];
1653 int64_t isInput = availableStreamConfigs.data.i64[i + STREAM_IS_INPUT_OFFSET];
1654 CAMHAL_LOGDB("f=%llx, w*h=%lldx%lld, du=%lld\n", format, width, height, isInput);
1655 }
1656}
1657
1658void EmulatedFakeCamera3::updateCameraMetaData(CameraMetadata *info) {
1659
1660}
1661
1662status_t EmulatedFakeCamera3::constructStaticInfo() {
1663
1664 status_t ret = OK;
1665 CameraMetadata info;
1666 uint32_t picSizes[64 * 8];
1667 int64_t* duration = NULL;
1668 int count, duration_count, availablejpegsize;
1669 uint8_t maxCount = 10;
1670 char property[PROPERTY_VALUE_MAX];
1671 unsigned int supportrotate;
1672 availablejpegsize = ARRAY_SIZE(mAvailableJpegSize);
1673 memset(mAvailableJpegSize,0,(sizeof(uint32_t))*availablejpegsize);
1674 sp<Sensor> s = new Sensor();
1675 ret = s->startUp(mCameraID);
1676 if (ret != OK) {
1677 DBG_LOGA("sensor start up failed");
1678 return ret;
1679 }
1680
1681 mSensorType = s->getSensorType();
1682
1683 if ( mSensorType == SENSOR_USB) {
1684 char property[PROPERTY_VALUE_MAX];
1685 property_get("rw.camera.usb.faceback", property, "false");
1686 if (strstr(property, "true"))
1687 mFacingBack = 1;
1688 else
1689 mFacingBack = 0;
1690 ALOGI("Setting usb camera cameraID:%d to back camera:%s\n",
1691 mCameraID, property);
1692 } else {
1693 if (s->mSensorFace == SENSOR_FACE_FRONT) {
1694 mFacingBack = 0;
1695 } else if (s->mSensorFace == SENSOR_FACE_BACK) {
1696 mFacingBack = 1;
1697 } else if (s->mSensorFace == SENSOR_FACE_NONE) {
1698 if (gEmulatedCameraFactory.getEmulatedCameraNum() == 1) {
1699 mFacingBack = 1;
1700 } else if ( mCameraID == 0) {
1701 mFacingBack = 1;
1702 } else {
1703 mFacingBack = 0;
1704 }
1705 }
1706
1707 ALOGI("Setting on board camera cameraID:%d to back camera:%d[0 false, 1 true]\n",
1708 mCameraID, mFacingBack);
1709 }
1710
1711 mSupportCap = s->IoctlStateProbe();
1712 if (mSupportCap & IOCTL_MASK_ROTATE) {
1713 supportrotate = true;
1714 } else {
1715 supportrotate = false;
1716 }
1717 // android.lens
1718
1719 // 5 cm min focus distance for back camera, infinity (fixed focus) for front
1720 // TODO read this ioctl from camera driver
1721 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
1722 const float minFocusDistance = 0.0;
1723 info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1724 &minFocusDistance, 1);
1725
1726 // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
1727 const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
1728 info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1729 &minFocusDistance, 1);
1730
1731 static const float focalLength = 3.30f; // mm
1732 info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1733 &focalLength, 1);
1734 static const float aperture = 2.8f;
1735 info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1736 &aperture, 1);
1737 static const float filterDensity = 0;
1738 info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1739 &filterDensity, 1);
1740 static const uint8_t availableOpticalStabilization =
1741 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1742 info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1743 &availableOpticalStabilization, 1);
1744
1745 static const int32_t lensShadingMapSize[] = {1, 1};
1746 info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1747 sizeof(lensShadingMapSize)/sizeof(int32_t));
1748
1749 /*lens facing related camera feature*/
1750 /*camera feature setting in /device/amlogic/xxx/xxx.mk files*/
1751 uint8_t lensFacing = mFacingBack ?
1752 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1753 info.update(ANDROID_LENS_FACING, &lensFacing, 1);
1754
1755 float lensPosition[3];
1756 if (mFacingBack) {
1757 // Back-facing camera is center-top on device
1758 lensPosition[0] = 0;
1759 lensPosition[1] = 20;
1760 lensPosition[2] = -5;
1761 } else {
1762 // Front-facing camera is center-right on device
1763 lensPosition[0] = 20;
1764 lensPosition[1] = 20;
1765 lensPosition[2] = 0;
1766 }
1767#if PLATFORM_SDK_VERSION <= 22
1768 info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
1769 sizeof(float));
1770#endif
1771 static const uint8_t lensCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED;
1772 info.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,&lensCalibration,1);
1773
1774 // android.sensor
1775
1776 static const int32_t testAvailablePattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1777 info.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &testAvailablePattern, 1);
1778 static const int32_t testPattern = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
1779 info.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPattern, 1);
1780 info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1781 Sensor::kExposureTimeRange, 2);
1782
1783 info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1784 &Sensor::kFrameDurationRange[1], 1);
1785
1786 info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
1787 Sensor::kSensitivityRange,
1788 sizeof(Sensor::kSensitivityRange)
1789 /sizeof(int32_t));
1790
1791 info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1792 &Sensor::kColorFilterArrangement, 1);
1793
1794 static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1795 info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1796 sensorPhysicalSize, 2);
1797
1798 info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1799 (int32_t*)&Sensor::kMaxRawValue, 1);
1800
1801 static const int32_t blackLevelPattern[4] = {
1802 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
1803 (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
1804 };
1805 info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1806 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
1807
1808 static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
1809 info.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
1810 if (mSensorType == SENSOR_USB) {
1811 if (mFacingBack) {
1812 property_get("hw.camera.orientation.back", property, "0");
1813 } else {
1814 property_get("hw.camera.orientation.front", property, "0");
1815 }
1816 int32_t orientation = atoi(property);
1817 property_get("hw.camera.usb.orientation_offset", property, "0");
1818 orientation += atoi(property);
1819 orientation %= 360;
1820 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1821 } else {
1822 if (mFacingBack) {
1823 property_get("hw.camera.orientation.back", property, "270");
1824 const int32_t orientation = atoi(property);
1825 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1826 } else {
1827 property_get("hw.camera.orientation.front", property, "90");
1828 const int32_t orientation = atoi(property);
1829 info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1830 }
1831 }
1832
1833 static const int64_t rollingShutterSkew = 0;
1834 info.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1);
1835
1836 //TODO: sensor color calibration fields
1837
1838 // android.flash
1839 static const uint8_t flashAvailable = 0;
1840 info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1841
1842 static const uint8_t flashstate = ANDROID_FLASH_STATE_UNAVAILABLE;
1843 info.update(ANDROID_FLASH_STATE, &flashstate, 1);
1844
1845 static const int64_t flashChargeDuration = 0;
1846 info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
1847
1848 /** android.noise */
1849 static const uint8_t availableNBModes = ANDROID_NOISE_REDUCTION_MODE_OFF;
1850 info.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, &availableNBModes, 1);
1851
1852 // android.tonemap
1853 static const uint8_t availabletonemapModes[] = {
1854 ANDROID_TONEMAP_MODE_FAST,
1855 ANDROID_TONEMAP_MODE_HIGH_QUALITY
1856 };
1857 info.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES, availabletonemapModes,
1858 sizeof(availabletonemapModes)/sizeof(availabletonemapModes[0]));
1859
1860 static const int32_t tonemapCurvePoints = 128;
1861 info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
1862
1863 // android.scaler
1864
1865 static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
1866 info.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
1867
1868 info.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1869 kAvailableFormats,
1870 sizeof(kAvailableFormats)/sizeof(int32_t));
1871
1872 info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1873 (int64_t*)kAvailableRawMinDurations,
1874 sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
1875
1876 //for version 3.2 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
1877 count = sizeof(picSizes)/sizeof(picSizes[0]);
1878 count = s->getStreamConfigurations(picSizes, kAvailableFormats, count);
1879
1880 info.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1881 (int32_t*)picSizes, count);
1882
1883 if (count < availablejpegsize) {
1884 availablejpegsize = count;
1885 }
1886 getValidJpegSize(picSizes,mAvailableJpegSize,availablejpegsize);
1887
1888 maxJpegResolution = getMaxJpegResolution(picSizes,count);
1889 int32_t full_size[4];
1890 if (mFacingBack) {
1891 full_size[0] = 0;
1892 full_size[1] = 0;
1893 full_size[2] = maxJpegResolution.width;
1894 full_size[3] = maxJpegResolution.height;
1895 } else {
1896 full_size[0] = 0;
1897 full_size[1] = 0;
1898 full_size[2] = maxJpegResolution.width;
1899 full_size[3] = maxJpegResolution.height;
1900 }
1901 /*activeArray.width <= pixelArraySize.Width && activeArray.height<= pixelArraySize.Height*/
1902 info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1903 (int32_t*)full_size,
1904 sizeof(full_size)/sizeof(full_size[0]));
1905 info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1906 (int32_t*)(&full_size[2]), 2);
1907
1908 duration = new int64_t[count];
1909 if (duration == NULL) {
1910 DBG_LOGA("allocate memory for duration failed");
1911 return NO_MEMORY;
1912 } else {
1913 memset(duration,0,sizeof(int64_t)*count);
1914 }
1915 duration_count = s->getStreamConfigurationDurations(picSizes, duration, count, true);
1916 info.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1917 duration, duration_count);
1918
1919 memset(duration,0,sizeof(int64_t)*count);
1920 duration_count = s->getStreamConfigurationDurations(picSizes, duration, count, false);
1921 info.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1922 duration, duration_count);
1923
1924 info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1925 (int64_t*)kAvailableProcessedMinDurations,
1926 sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
1927
1928 info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
1929 (int64_t*)kAvailableJpegMinDurations,
1930 sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
1931
1932
1933 // android.jpeg
1934
1935 static const int32_t jpegThumbnailSizes[] = {
1936 0, 0,
1937 128, 72,
1938 160, 120,
1939 320, 240
1940 };
1941 info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1942 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
1943
1944 static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1945 info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1946
1947 // android.stats
1948
1949 static const uint8_t availableFaceDetectModes[] = {
1950 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1951 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
1952 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
1953 };
1954
1955 info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1956 availableFaceDetectModes,
1957 sizeof(availableFaceDetectModes));
1958
1959 static const int32_t maxFaceCount = 8;
1960 info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1961 &maxFaceCount, 1);
1962
1963 static const int32_t histogramSize = 64;
1964 info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1965 &histogramSize, 1);
1966
1967 static const int32_t maxHistogramCount = 1000;
1968 info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1969 &maxHistogramCount, 1);
1970
1971 static const int32_t sharpnessMapSize[2] = {64, 64};
1972 info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1973 sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
1974
1975 static const int32_t maxSharpnessMapValue = 1000;
1976 info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1977 &maxSharpnessMapValue, 1);
1978 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
1979 info.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,&hotPixelMapMode, 1);
1980
1981 static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
1982 info.update(ANDROID_STATISTICS_SCENE_FLICKER,&sceneFlicker, 1);
1983 static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
1984 info.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,&lensShadingMapMode, 1);
1985 // android.control
1986
1987 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
1988 info.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
1989
1990 static const uint8_t availableSceneModes[] = {
1991 // ANDROID_CONTROL_SCENE_MODE_DISABLED,
1992 ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY
1993 };
1994 info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1995 availableSceneModes, sizeof(availableSceneModes));
1996
1997 static const uint8_t availableEffects[] = {
1998 ANDROID_CONTROL_EFFECT_MODE_OFF
1999 };
2000 info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
2001 availableEffects, sizeof(availableEffects));
2002
2003 static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0};
2004 info.update(ANDROID_CONTROL_MAX_REGIONS,
2005 max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
2006
2007 static const uint8_t availableAeModes[] = {
2008 ANDROID_CONTROL_AE_MODE_OFF,
2009 ANDROID_CONTROL_AE_MODE_ON
2010 };
2011 info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
2012 availableAeModes, sizeof(availableAeModes));
2013
2014
2015 static const int32_t availableTargetFpsRanges[] = {
2016 5, 15, 15, 15, 5, 25, 25, 25, 5, 30, 30, 30,
2017 };
2018 info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
2019 availableTargetFpsRanges,
2020 sizeof(availableTargetFpsRanges)/sizeof(int32_t));
2021
2022 uint8_t awbModes[maxCount];
2023 count = s->getAWB(awbModes, maxCount);
2024 if (count < 0) {
2025 static const uint8_t availableAwbModes[] = {
2026 ANDROID_CONTROL_AWB_MODE_OFF,
2027 ANDROID_CONTROL_AWB_MODE_AUTO,
2028 ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
2029 ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
2030 ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
2031 ANDROID_CONTROL_AWB_MODE_SHADE
2032 };
2033 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2034 availableAwbModes, sizeof(availableAwbModes));
2035 } else {
2036 DBG_LOGB("getAWB %d ",count);
2037 info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
2038 awbModes, count);
2039 }
2040
2041 static const uint8_t afstate = ANDROID_CONTROL_AF_STATE_INACTIVE;
2042 info.update(ANDROID_CONTROL_AF_STATE,&afstate,1);
2043
2044 static const uint8_t availableAfModesFront[] = {
2045 ANDROID_CONTROL_AF_MODE_OFF
2046 };
2047
2048 if (mFacingBack) {
2049 uint8_t afMode[maxCount];
2050 count = s->getAutoFocus(afMode, maxCount);
2051 if (count < 0) {
2052 static const uint8_t availableAfModesBack[] = {
2053 ANDROID_CONTROL_AF_MODE_OFF,
2054 //ANDROID_CONTROL_AF_MODE_AUTO,
2055 //ANDROID_CONTROL_AF_MODE_MACRO,
2056 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
2057 //ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE,
2058 };
2059
2060 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2061 availableAfModesBack, sizeof(availableAfModesBack));
2062 } else {
2063 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2064 afMode, count);
2065 }
2066 } else {
2067 info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
2068 availableAfModesFront, sizeof(availableAfModesFront));
2069 }
2070
2071 uint8_t antiBanding[maxCount];
2072 count = s->getAntiBanding(antiBanding, maxCount);
2073 if (count < 0) {
2074 static const uint8_t availableAntibanding[] = {
2075 ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
2076 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
2077 };
2078 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2079 availableAntibanding, sizeof(availableAntibanding));
2080 } else {
2081 info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
2082 antiBanding, count);
2083 }
2084
2085 camera_metadata_rational step;
2086 int maxExp, minExp, def;
2087 ret = s->getExposure(&maxExp, &minExp, &def, &step);
2088 if (ret < 0) {
2089 static const int32_t aeExpCompensation = 0;
2090 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
2091
2092 static const camera_metadata_rational exposureCompensationStep = {
2093 1, 3
2094 };
2095 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2096 &exposureCompensationStep, 1);
2097
2098 int32_t exposureCompensationRange[] = {-6, 6};
2099 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2100 exposureCompensationRange,
2101 sizeof(exposureCompensationRange)/sizeof(int32_t));
2102 } else {
2103 DBG_LOGB("exposure compensation support:(%d, %d)\n", minExp, maxExp);
2104 int32_t exposureCompensationRange[] = {minExp, maxExp};
2105 info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
2106 exposureCompensationRange,
2107 sizeof(exposureCompensationRange)/sizeof(int32_t));
2108 info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
2109 &step, 1);
2110 info.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &def, 1);
2111 }
2112
2113 ret = s->getZoom(&mZoomMin, &mZoomMax, &mZoomStep);
2114 if (ret < 0) {
2115 float maxZoom = 1.0;
2116 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2117 &maxZoom, 1);
2118 } else {
2119 if (mZoomMin != 0) {
2120 float maxZoom = mZoomMax / mZoomMin;
2121 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2122 &maxZoom, 1);
2123 } else {
2124 float maxZoom = 1.0;
2125 info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
2126 &maxZoom, 1);
2127 }
2128 }
2129
2130 static const uint8_t availableVstabModes[] = {
2131 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
2132 };
2133 info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
2134 availableVstabModes, sizeof(availableVstabModes));
2135
2136 static const uint8_t aestate = ANDROID_CONTROL_AE_STATE_CONVERGED;
2137 info.update(ANDROID_CONTROL_AE_STATE,&aestate,1);
2138 static const uint8_t awbstate = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2139 info.update(ANDROID_CONTROL_AWB_STATE,&awbstate,1);
2140 // android.info
2141 const uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2142 //mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
2143 // ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
2144 info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
2145 &supportedHardwareLevel,
2146 /*count*/1);
2147
2148 int32_t android_sync_max_latency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
2149 info.update(ANDROID_SYNC_MAX_LATENCY, &android_sync_max_latency, 1);
2150
2151 uint8_t len[] = {1};
2152 info.update(ANDROID_REQUEST_PIPELINE_DEPTH, (uint8_t *)len, 1);
2153
2154 /*for cts BurstCaptureTest ->testYuvBurst */
2155 uint8_t maxlen[] = {kMaxBufferCount};
2156 info.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, (uint8_t *)maxlen, 1);
2157 uint8_t cap[] = {
2158 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
2159 };
2160 info.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
2161 (uint8_t *)cap, sizeof(cap)/sizeof(cap[0]));
2162
2163
2164 int32_t partialResultCount = 1;
2165 info.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,&partialResultCount,1);
2166 int32_t maxNumOutputStreams[3] = {0,2,1};
2167 info.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,maxNumOutputStreams,3);
2168 uint8_t aberrationMode[] = {ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF};
2169 info.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
2170 aberrationMode, 1);
2171 info.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
2172 aberrationMode, 1);
2173
2174 getAvailableChKeys(&info, supportedHardwareLevel);
2175
2176 if (mCameraInfo != NULL) {
2177 CAMHAL_LOGDA("mCameraInfo is not null, mem leak?");
2178 }
2179 mCameraInfo = info.release();
2180 DBG_LOGB("mCameraID=%d,mCameraInfo=%p\n", mCameraID, mCameraInfo);
2181
2182 if (duration != NULL) {
2183 delete [] duration;
2184 }
2185
2186 s->shutDown();
2187 s.clear();
2188 mPlugged = true;
2189
2190 return OK;
2191}
2192
2193status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
2194 /**
2195 * Extract top-level 3A controls
2196 */
2197 status_t res;
2198
2199 bool facePriority = false;
2200
2201 camera_metadata_entry e;
2202
2203 e = settings.find(ANDROID_CONTROL_MODE);
2204 if (e.count == 0) {
2205 ALOGE("%s: No control mode entry!", __FUNCTION__);
2206 return BAD_VALUE;
2207 }
2208 uint8_t controlMode = e.data.u8[0];
2209
2210 e = settings.find(ANDROID_CONTROL_SCENE_MODE);
2211 if (e.count == 0) {
2212 ALOGE("%s: No scene mode entry!", __FUNCTION__);
2213 return BAD_VALUE;
2214 }
2215 uint8_t sceneMode = e.data.u8[0];
2216
2217 if (controlMode == ANDROID_CONTROL_MODE_OFF) {
2218 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2219 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2220 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2221 update3A(settings);
2222 return OK;
2223 } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
2224 switch(sceneMode) {
2225 case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
2226 mFacePriority = true;
2227 break;
2228 default:
2229 ALOGE("%s: Emulator doesn't support scene mode %d",
2230 __FUNCTION__, sceneMode);
2231 return BAD_VALUE;
2232 }
2233 } else {
2234 mFacePriority = false;
2235 }
2236
2237 // controlMode == AUTO or sceneMode = FACE_PRIORITY
2238 // Process individual 3A controls
2239
2240 res = doFakeAE(settings);
2241 if (res != OK) return res;
2242
2243 res = doFakeAF(settings);
2244 if (res != OK) return res;
2245
2246 res = doFakeAWB(settings);
2247 if (res != OK) return res;
2248
2249 update3A(settings);
2250 return OK;
2251}
2252
2253status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
2254 camera_metadata_entry e;
2255
2256 e = settings.find(ANDROID_CONTROL_AE_MODE);
2257 if (e.count == 0) {
2258 ALOGE("%s: No AE mode entry!", __FUNCTION__);
2259 return BAD_VALUE;
2260 }
2261 uint8_t aeMode = e.data.u8[0];
2262
2263 switch (aeMode) {
2264 case ANDROID_CONTROL_AE_MODE_OFF:
2265 // AE is OFF
2266 mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
2267 return OK;
2268 case ANDROID_CONTROL_AE_MODE_ON:
2269 // OK for AUTO modes
2270 break;
2271 default:
2272 ALOGVV("%s: Emulator doesn't support AE mode %d",
2273 __FUNCTION__, aeMode);
2274 return BAD_VALUE;
2275 }
2276
2277 e = settings.find(ANDROID_CONTROL_AE_LOCK);
2278 if (e.count == 0) {
2279 ALOGE("%s: No AE lock entry!", __FUNCTION__);
2280 return BAD_VALUE;
2281 }
2282 bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON);
2283
2284 e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
2285 bool precaptureTrigger = false;
2286 if (e.count != 0) {
2287 precaptureTrigger =
2288 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
2289 }
2290
2291 if (precaptureTrigger) {
2292 ALOGVV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
2293 } else if (e.count > 0) {
2294 ALOGVV("%s: Pre capture trigger was present? %zu",
2295 __FUNCTION__,
2296 e.count);
2297 }
2298
2299 if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2300 // Run precapture sequence
2301 if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
2302 mAeCounter = 0;
2303 }
2304
2305 if (mFacePriority) {
2306 mAeTargetExposureTime = kFacePriorityExposureTime;
2307 } else {
2308 mAeTargetExposureTime = kNormalExposureTime;
2309 }
2310
2311 if (mAeCounter > kPrecaptureMinFrames &&
2312 (mAeTargetExposureTime - mAeCurrentExposureTime) <
2313 mAeTargetExposureTime / 10) {
2314 // Done with precapture
2315 mAeCounter = 0;
2316 mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
2317 ANDROID_CONTROL_AE_STATE_CONVERGED;
2318 } else {
2319 // Converge some more
2320 mAeCurrentExposureTime +=
2321 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2322 kExposureTrackRate;
2323 mAeCounter++;
2324 mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
2325 }
2326
2327 } else if (!aeLocked) {
2328 // Run standard occasional AE scan
2329 switch (mAeState) {
2330 case ANDROID_CONTROL_AE_STATE_CONVERGED:
2331 case ANDROID_CONTROL_AE_STATE_INACTIVE:
2332 mAeCounter++;
2333 if (mAeCounter > kStableAeMaxFrames) {
2334 mAeTargetExposureTime =
2335 mFacePriority ? kFacePriorityExposureTime :
2336 kNormalExposureTime;
2337 float exposureStep = ((double)rand() / RAND_MAX) *
2338 (kExposureWanderMax - kExposureWanderMin) +
2339 kExposureWanderMin;
2340 mAeTargetExposureTime *= std::pow(2, exposureStep);
2341 mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
2342 }
2343 break;
2344 case ANDROID_CONTROL_AE_STATE_SEARCHING:
2345 mAeCurrentExposureTime +=
2346 (mAeTargetExposureTime - mAeCurrentExposureTime) *
2347 kExposureTrackRate;
2348 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
2349 mAeTargetExposureTime / 10) {
2350 // Close enough
2351 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2352 mAeCounter = 0;
2353 }
2354 break;
2355 case ANDROID_CONTROL_AE_STATE_LOCKED:
2356 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
2357 mAeCounter = 0;
2358 break;
2359 default:
2360 ALOGE("%s: Emulator in unexpected AE state %d",
2361 __FUNCTION__, mAeState);
2362 return INVALID_OPERATION;
2363 }
2364 } else {
2365 // AE is locked
2366 mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
2367 }
2368
2369 return OK;
2370}
2371
2372status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
2373 camera_metadata_entry e;
2374
2375 e = settings.find(ANDROID_CONTROL_AF_MODE);
2376 if (e.count == 0) {
2377 ALOGE("%s: No AF mode entry!", __FUNCTION__);
2378 return BAD_VALUE;
2379 }
2380 uint8_t afMode = e.data.u8[0];
2381
2382 e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
2383 typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
2384 af_trigger_t afTrigger;
2385 // If we have an afTrigger, afTriggerId should be set too
2386 if (e.count != 0) {
2387 afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
2388
2389 e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID);
2390
2391 if (e.count == 0) {
2392 ALOGE("%s: When android.control.afTrigger is set "
2393 " in the request, afTriggerId needs to be set as well",
2394 __FUNCTION__);
2395 return BAD_VALUE;
2396 }
2397
2398 mAfTriggerId = e.data.i32[0];
2399
2400 ALOGVV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
2401 ALOGVV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId);
2402 ALOGVV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
2403 } else {
2404 afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
2405 }
2406 if (!mFacingBack) {
2407 afMode = ANDROID_CONTROL_AF_MODE_OFF;
2408 }
2409
2410 switch (afMode) {
2411 case ANDROID_CONTROL_AF_MODE_OFF:
2412 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2413 return OK;
2414 case ANDROID_CONTROL_AF_MODE_AUTO:
2415 case ANDROID_CONTROL_AF_MODE_MACRO:
2416 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2417 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2418 if (!mFacingBack) {
2419 ALOGE("%s: Front camera doesn't support AF mode %d",
2420 __FUNCTION__, afMode);
2421 return BAD_VALUE;
2422 }
2423 mSensor->setAutoFocuas(afMode);
2424 // OK, handle transitions lower on
2425 break;
2426 default:
2427 ALOGE("%s: Emulator doesn't support AF mode %d",
2428 __FUNCTION__, afMode);
2429 return BAD_VALUE;
2430 }
2431#if 0
2432 e = settings.find(ANDROID_CONTROL_AF_REGIONS);
2433 if (e.count == 0) {
2434 ALOGE("%s:Get ANDROID_CONTROL_AF_REGIONS failed\n", __FUNCTION__);
2435 return BAD_VALUE;
2436 }
2437 int32_t x0 = e.data.i32[0];
2438 int32_t y0 = e.data.i32[1];
2439 int32_t x1 = e.data.i32[2];
2440 int32_t y1 = e.data.i32[3];
2441 mSensor->setFocuasArea(x0, y0, x1, y1);
2442 DBG_LOGB(" x0:%d, y0:%d,x1:%d,y1:%d,\n", x0, y0, x1, y1);
2443#endif
2444
2445
2446 bool afModeChanged = mAfMode != afMode;
2447 mAfMode = afMode;
2448
2449 /**
2450 * Simulate AF triggers. Transition at most 1 state per frame.
2451 * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
2452 */
2453
2454 bool afTriggerStart = false;
2455 bool afTriggerCancel = false;
2456 switch (afTrigger) {
2457 case ANDROID_CONTROL_AF_TRIGGER_IDLE:
2458 break;
2459 case ANDROID_CONTROL_AF_TRIGGER_START:
2460 afTriggerStart = true;
2461 break;
2462 case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
2463 afTriggerCancel = true;
2464 // Cancel trigger always transitions into INACTIVE
2465 mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2466
2467 ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
2468
2469 // Stay in 'inactive' until at least next frame
2470 return OK;
2471 default:
2472 ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
2473 return BAD_VALUE;
2474 }
2475
2476 // If we get down here, we're either in an autofocus mode
2477 // or in a continuous focus mode (and no other modes)
2478
2479 int oldAfState = mAfState;
2480 switch (mAfState) {
2481 case ANDROID_CONTROL_AF_STATE_INACTIVE:
2482 if (afTriggerStart) {
2483 switch (afMode) {
2484 case ANDROID_CONTROL_AF_MODE_AUTO:
2485 // fall-through
2486 case ANDROID_CONTROL_AF_MODE_MACRO:
2487 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2488 break;
2489 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2490 // fall-through
2491 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2492 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2493 break;
2494 }
2495 } else {
2496 // At least one frame stays in INACTIVE
2497 if (!afModeChanged) {
2498 switch (afMode) {
2499 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2500 // fall-through
2501 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2502 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
2503 break;
2504 }
2505 }
2506 }
2507 break;
2508 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2509 /**
2510 * When the AF trigger is activated, the algorithm should finish
2511 * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
2512 * or AF_NOT_FOCUSED as appropriate
2513 */
2514 if (afTriggerStart) {
2515 // Randomly transition to focused or not focused
2516 if (rand() % 3) {
2517 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2518 } else {
2519 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2520 }
2521 }
2522 /**
2523 * When the AF trigger is not involved, the AF algorithm should
2524 * start in INACTIVE state, and then transition into PASSIVE_SCAN
2525 * and PASSIVE_FOCUSED states
2526 */
2527 else if (!afTriggerCancel) {
2528 // Randomly transition to passive focus
2529 if (rand() % 3 == 0) {
2530 mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
2531 }
2532 }
2533
2534 break;
2535 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2536 if (afTriggerStart) {
2537 // Randomly transition to focused or not focused
2538 if (rand() % 3) {
2539 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2540 } else {
2541 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2542 }
2543 }
2544 // TODO: initiate passive scan (PASSIVE_SCAN)
2545 break;
2546 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2547 // Simulate AF sweep completing instantaneously
2548
2549 // Randomly transition to focused or not focused
2550 if (rand() % 3) {
2551 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2552 } else {
2553 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2554 }
2555 break;
2556 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2557 if (afTriggerStart) {
2558 switch (afMode) {
2559 case ANDROID_CONTROL_AF_MODE_AUTO:
2560 // fall-through
2561 case ANDROID_CONTROL_AF_MODE_MACRO:
2562 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2563 break;
2564 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2565 // fall-through
2566 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2567 // continuous autofocus => trigger start has no effect
2568 break;
2569 }
2570 }
2571 break;
2572 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2573 if (afTriggerStart) {
2574 switch (afMode) {
2575 case ANDROID_CONTROL_AF_MODE_AUTO:
2576 // fall-through
2577 case ANDROID_CONTROL_AF_MODE_MACRO:
2578 mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2579 break;
2580 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2581 // fall-through
2582 case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2583 // continuous autofocus => trigger start has no effect
2584 break;
2585 }
2586 }
2587 break;
2588 default:
2589 ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
2590 }
2591
2592 {
2593 char afStateString[100] = {0,};
2594 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2595 oldAfState,
2596 afStateString,
2597 sizeof(afStateString));
2598
2599 char afNewStateString[100] = {0,};
2600 camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2601 mAfState,
2602 afNewStateString,
2603 sizeof(afNewStateString));
2604 ALOGVV("%s: AF state transitioned from %s to %s",
2605 __FUNCTION__, afStateString, afNewStateString);
2606 }
2607
2608
2609 return OK;
2610}
2611
2612status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
2613 camera_metadata_entry e;
2614
2615 e = settings.find(ANDROID_CONTROL_AWB_MODE);
2616 if (e.count == 0) {
2617 ALOGE("%s: No AWB mode entry!", __FUNCTION__);
2618 return BAD_VALUE;
2619 }
2620 uint8_t awbMode = e.data.u8[0];
2621 //DBG_LOGB(" awbMode%d\n", awbMode);
2622
2623 // TODO: Add white balance simulation
2624
2625 switch (awbMode) {
2626 case ANDROID_CONTROL_AWB_MODE_OFF:
2627 mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2628 return OK;
2629 case ANDROID_CONTROL_AWB_MODE_AUTO:
2630 case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
2631 case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
2632 case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
2633 case ANDROID_CONTROL_AWB_MODE_SHADE:
2634 mAwbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; //add for cts
2635 if (mSensorType == SENSOR_USB)
2636 return OK;
2637 else
2638 return mSensor->setAWB(awbMode);
2639 break;
2640 default:
2641 ALOGE("%s: Emulator doesn't support AWB mode %d",
2642 __FUNCTION__, awbMode);
2643 return BAD_VALUE;
2644 }
2645
2646 return OK;
2647}
2648
2649
2650void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
2651 if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) {
2652 settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
2653 &mAeCurrentExposureTime, 1);
2654 settings.update(ANDROID_SENSOR_SENSITIVITY,
2655 &mAeCurrentSensitivity, 1);
2656 }
2657
2658 settings.update(ANDROID_CONTROL_AE_STATE,
2659 &mAeState, 1);
2660 settings.update(ANDROID_CONTROL_AF_STATE,
2661 &mAfState, 1);
2662 settings.update(ANDROID_CONTROL_AWB_STATE,
2663 &mAwbState, 1);
2664 /**
2665 * TODO: Trigger IDs need a think-through
2666 */
2667 settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,
2668 &mAfTriggerId, 1);
2669}
2670
2671void EmulatedFakeCamera3::signalReadoutIdle() {
2672 Mutex::Autolock l(mLock);
2673 CAMHAL_LOGVB("%s , E" , __FUNCTION__);
2674 // Need to chek isIdle again because waiting on mLock may have allowed
2675 // something to be placed in the in-flight queue.
2676 if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
2677 ALOGV("Now idle");
2678 mStatus = STATUS_READY;
2679 }
2680 CAMHAL_LOGVB("%s , X , mStatus = %d " , __FUNCTION__, mStatus);
2681}
2682
2683void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
2684 nsecs_t timestamp) {
2685 switch(e) {
2686 case Sensor::SensorListener::EXPOSURE_START: {
2687 ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
2688 __FUNCTION__, frameNumber, timestamp);
2689 // Trigger shutter notify to framework
2690 camera3_notify_msg_t msg;
2691 msg.type = CAMERA3_MSG_SHUTTER;
2692 msg.message.shutter.frame_number = frameNumber;
2693 msg.message.shutter.timestamp = timestamp;
2694 sendNotify(&msg);
2695 break;
2696 }
2697 case Sensor::SensorListener::ERROR_CAMERA_DEVICE: {
2698 camera3_notify_msg_t msg;
2699 msg.type = CAMERA3_MSG_ERROR;
2700 msg.message.error.frame_number = frameNumber;
2701 msg.message.error.error_stream = NULL;
2702 msg.message.error.error_code = 1;
2703 sendNotify(&msg);
2704 break;
2705 }
2706 default:
2707 ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
2708 e, timestamp);
2709 break;
2710 }
2711}
2712
2713EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) :
2714 mParent(parent), mJpegWaiting(false) {
2715 mExitReadoutThread = false;
2716 mFlushFlag = false;
2717}
2718
2719EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
2720 for (List<Request>::iterator i = mInFlightQueue.begin();
2721 i != mInFlightQueue.end(); i++) {
2722 delete i->buffers;
2723 delete i->sensorBuffers;
2724 }
2725}
2726
2727status_t EmulatedFakeCamera3::ReadoutThread::flushAllRequest(bool flag) {
2728 status_t res;
2729 mFlushFlag = flag;
2730 Mutex::Autolock l(mLock);
2731 CAMHAL_LOGDB("count = %d" , mInFlightQueue.size());
2732 if (mInFlightQueue.size() > 0) {
2733 mParent->mSensor->setFlushFlag(true);
2734 res = mFlush.waitRelative(mLock, kSyncWaitTimeout * 15);
2735 if (res != OK && res != TIMED_OUT) {
2736 ALOGE("%s: Error waiting for mFlush singnal : %d",
2737 __FUNCTION__, res);
2738 return INVALID_OPERATION;
2739 }
2740 DBG_LOGA("finish flush all request");
2741 }
2742 return 0;
2743}
2744
2745void EmulatedFakeCamera3::ReadoutThread::sendFlushSingnal(void) {
2746 Mutex::Autolock l(mLock);
2747 mFlush.signal();
2748}
2749
2750void EmulatedFakeCamera3::ReadoutThread::setFlushFlag(bool flag) {
2751 mFlushFlag = flag;
2752}
2753
2754void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
2755 Mutex::Autolock l(mLock);
2756
2757 mInFlightQueue.push_back(r);
2758 mInFlightSignal.signal();
2759}
2760
2761bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
2762 Mutex::Autolock l(mLock);
2763 return mInFlightQueue.empty() && !mThreadActive;
2764}
2765
2766status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
2767 status_t res;
2768 Mutex::Autolock l(mLock);
2769 CAMHAL_LOGVB("%s , E" , __FUNCTION__);
2770 int loopCount = 0;
2771 while (mInFlightQueue.size() >= kMaxQueueSize) {
2772 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2773 if (res != OK && res != TIMED_OUT) {
2774 ALOGE("%s: Error waiting for in-flight queue to shrink",
2775 __FUNCTION__);
2776 return INVALID_OPERATION;
2777 }
2778 if (loopCount == kMaxWaitLoops) {
2779 ALOGE("%s: Timed out waiting for in-flight queue to shrink",
2780 __FUNCTION__);
2781 return TIMED_OUT;
2782 }
2783 loopCount++;
2784 }
2785 return OK;
2786}
2787
2788status_t EmulatedFakeCamera3::ReadoutThread::setJpegCompressorListener(EmulatedFakeCamera3 *parent) {
2789 status_t res;
2790 res = mParent->mJpegCompressor->setlistener(this);
2791 if (res != NO_ERROR) {
2792 ALOGE("%s: set JpegCompressor Listner failed",__FUNCTION__);
2793 }
2794 return res;
2795}
2796
2797status_t EmulatedFakeCamera3::ReadoutThread::startJpegCompressor(EmulatedFakeCamera3 *parent) {
2798 status_t res;
2799 res = mParent->mJpegCompressor->start();
2800 if (res != NO_ERROR) {
2801 ALOGE("%s: JpegCompressor start failed",__FUNCTION__);
2802 }
2803 return res;
2804}
2805
2806status_t EmulatedFakeCamera3::ReadoutThread::shutdownJpegCompressor(EmulatedFakeCamera3 *parent) {
2807 status_t res;
2808 res = mParent->mJpegCompressor->cancel();
2809 if (res != OK) {
2810 ALOGE("%s: JpegCompressor cancel failed",__FUNCTION__);
2811 }
2812 return res;
2813}
2814
2815void EmulatedFakeCamera3::ReadoutThread::sendExitReadoutThreadSignal(void) {
2816 mExitReadoutThread = true;
2817 mInFlightSignal.signal();
2818}
2819
2820bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
2821 status_t res;
2822 ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
2823
2824 // First wait for a request from the in-flight queue
2825 if (mExitReadoutThread) {
2826 return false;
2827 }
2828
2829 {
2830 Mutex::Autolock l(mLock);
2831 if ((mInFlightQueue.size() == 0) && (mFlushFlag) &&
2832 (mCurrentRequest.settings.isEmpty())) {
2833 mFlush.signal();
2834 }
2835 }
2836
2837 if (mCurrentRequest.settings.isEmpty()) {
2838 Mutex::Autolock l(mLock);
2839 if (mInFlightQueue.empty()) {
2840 res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2841 if (res == TIMED_OUT) {
2842 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
2843 __FUNCTION__);
2844 return true;
2845 } else if (res != NO_ERROR) {
2846 ALOGE("%s: Error waiting for capture requests: %d",
2847 __FUNCTION__, res);
2848 return false;
2849 }
2850 }
2851
2852 if (mExitReadoutThread) {
2853 return false;
2854 }
2855
2856 mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
2857 mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
2858 mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
2859 mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
2860 mCurrentRequest.havethumbnail = mInFlightQueue.begin()->havethumbnail;
2861 mInFlightQueue.erase(mInFlightQueue.begin());
2862 mInFlightSignal.signal();
2863 mThreadActive = true;
2864 ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
2865 mCurrentRequest.frameNumber);
2866 }
2867
2868 // Then wait for it to be delivered from the sensor
2869 ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
2870 __FUNCTION__);
2871
2872 nsecs_t captureTime;
2873 status_t gotFrame =
2874 mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
2875 if (gotFrame == 0) {
2876 ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
2877 __FUNCTION__);
2878 return true;
2879 }
2880
2881 if (gotFrame == -1) {
2882 DBG_LOGA("Sensor thread had exited , here should exit ReadoutThread Loop");
2883 return false;
2884 }
2885
2886 bool workflag =
2887 mParent->mSensor->get_sensor_status();
2888 if (!workflag)
2889 return true;
2890
2891 ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
2892 mCurrentRequest.frameNumber, captureTime);
2893
2894 // Check if we need to JPEG encode a buffer, and send it for async
2895 // compression if so. Otherwise prepare the buffer for return.
2896 bool needJpeg = false;
2897 HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
2898 while (buf != mCurrentRequest.buffers->end()) {
2899 bool goodBuffer = true;
2900 if ( buf->stream->format ==
2901 HAL_PIXEL_FORMAT_BLOB) {
2902 Mutex::Autolock jl(mJpegLock);
2903 needJpeg = true;
2904 CaptureRequest currentcapture;
2905 currentcapture.frameNumber = mCurrentRequest.frameNumber;
2906 currentcapture.sensorBuffers = mCurrentRequest.sensorBuffers;
2907 currentcapture.buf = buf;
2908 currentcapture.mNeedThumbnail = mCurrentRequest.havethumbnail;
2909 mParent->mJpegCompressor->queueRequest(currentcapture);
2910 //this sensorBuffers delete in the jpegcompress;
2911 mCurrentRequest.sensorBuffers = NULL;
2912 buf = mCurrentRequest.buffers->erase(buf);
2913 continue;
2914 }
2915 GraphicBufferMapper::get().unlock(*(buf->buffer));
2916
2917 buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
2918 CAMERA3_BUFFER_STATUS_ERROR;
2919 buf->acquire_fence = -1;
2920 buf->release_fence = -1;
2921
2922 ++buf;
2923 } // end while
2924
2925 // Construct result for all completed buffers and results
2926
2927 camera3_capture_result result;
2928
2929 mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
2930 &captureTime, 1);
2931
2932 const uint8_t pipelineDepth = needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
2933 mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
2934 &pipelineDepth, 1);
2935
2936 memset(&result, 0, sizeof(result));
2937 result.frame_number = mCurrentRequest.frameNumber;
2938 result.result = mCurrentRequest.settings.getAndLock();
2939 result.num_output_buffers = mCurrentRequest.buffers->size();
2940 result.output_buffers = mCurrentRequest.buffers->array();
2941 result.partial_result = 1;
2942
2943 // Go idle if queue is empty, before sending result
2944
2945 bool signalIdle = false;
2946 {
2947 Mutex::Autolock l(mLock);
2948 if (mInFlightQueue.empty()) {
2949 mThreadActive = false;
2950 signalIdle = true;
2951 }
2952 }
2953
2954 if (signalIdle) mParent->signalReadoutIdle();
2955
2956 // Send it off to the framework
2957 ALOGVV("%s: ReadoutThread: Send result to framework",
2958 __FUNCTION__);
2959 mParent->sendCaptureResult(&result);
2960
2961 // Clean up
2962 mCurrentRequest.settings.unlock(result.result);
2963
2964 delete mCurrentRequest.buffers;
2965 mCurrentRequest.buffers = NULL;
2966 if (!needJpeg) {
2967 delete mCurrentRequest.sensorBuffers;
2968 mCurrentRequest.sensorBuffers = NULL;
2969 }
2970 mCurrentRequest.settings.clear();
2971 CAMHAL_LOGVB("%s , X " , __FUNCTION__);
2972 return true;
2973}
2974
2975void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
2976 const StreamBuffer &jpegBuffer, bool success , CaptureRequest &r) {
2977 Mutex::Autolock jl(mJpegLock);
2978 GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer));
2979
2980 mJpegHalBuffer = *(r.buf);
2981 mJpegHalBuffer.status = success ?
2982 CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2983 mJpegHalBuffer.acquire_fence = -1;
2984 mJpegHalBuffer.release_fence = -1;
2985 mJpegWaiting = false;
2986
2987 camera3_capture_result result;
2988 result.frame_number = r.frameNumber;
2989 result.result = NULL;
2990 result.num_output_buffers = 1;
2991 result.output_buffers = &mJpegHalBuffer;
2992 result.partial_result = 1;
2993
2994 if (!success) {
2995 ALOGE("%s: Compression failure, returning error state buffer to"
2996 " framework", __FUNCTION__);
2997 } else {
2998 DBG_LOGB("%s: Compression complete, returning buffer to framework",
2999 __FUNCTION__);
3000 }
3001
3002 mParent->sendCaptureResult(&result);
3003
3004}
3005
3006void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
3007 const StreamBuffer &inputBuffer) {
3008 // Should never get here, since the input buffer has to be returned
3009 // by end of processCaptureRequest
3010 ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
3011}
3012
3013
3014}; // namespace android
3015