summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.h (plain)
blob: 1f6e77f272d44c116024ccc15dc4dea1a1d8d2bf
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/**
18 * This class is a simple simulation of a typical CMOS cellphone imager chip,
19 * which outputs 12-bit Bayer-mosaic raw images.
20 *
21 * Unlike most real image sensors, this one's native color space is linear sRGB.
22 *
23 * The sensor is abstracted as operating as a pipeline 3 stages deep;
24 * conceptually, each frame to be captured goes through these three stages. The
25 * processing step for the sensor is marked off by vertical sync signals, which
26 * indicate the start of readout of the oldest frame. The interval between
27 * processing steps depends on the frame duration of the frame currently being
28 * captured. The stages are 1) configure, 2) capture, and 3) readout. During
29 * configuration, the sensor's registers for settings such as exposure time,
30 * frame duration, and gain are set for the next frame to be captured. In stage
31 * 2, the image data for the frame is actually captured by the sensor. Finally,
32 * in stage 3, the just-captured data is read out and sent to the rest of the
33 * system.
34 *
35 * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
36 * sensor are exposed earlier in time than larger-numbered rows, with the time
37 * offset between each row being equal to the row readout time.
38 *
39 * The characteristics of this sensor don't correspond to any actual sensor,
40 * but are not far off typical sensors.
41 *
42 * Example timing diagram, with three frames:
43 * Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
44 * Frame 2: Frame duration 75 ms, exposure time 65 ms.
45 * Legend:
46 * C = update sensor registers for frame
47 * v = row in reset (vertical blanking interval)
48 * E = row capturing image data
49 * R = row being read out
50 * | = vertical sync signal
51 *time(ms)| 0 55 105 155 230 270
52 * Frame 0| :configure : capture : readout : : :
53 * Row # | ..|CCCC______|_________|_________| : :
54 * 0 | :\ \vvvvvEEEER \ : :
55 * 500 | : \ \vvvvvEEEER \ : :
56 * 1000 | : \ \vvvvvEEEER \ : :
57 * 1500 | : \ \vvvvvEEEER \ : :
58 * 2000 | : \__________\vvvvvEEEER_________\ : :
59 * Frame 1| : configure capture readout : :
60 * Row # | : |CCCC_____|_________|______________| :
61 * 0 | : :\ \vvvvvEEEER \ :
62 * 500 | : : \ \vvvvvEEEER \ :
63 * 1000 | : : \ \vvvvvEEEER \ :
64 * 1500 | : : \ \vvvvvEEEER \ :
65 * 2000 | : : \_________\vvvvvEEEER______________\ :
66 * Frame 2| : : configure capture readout:
67 * Row # | : : |CCCC_____|______________|_______|...
68 * 0 | : : :\ \vEEEEEEEEEEEEER \
69 * 500 | : : : \ \vEEEEEEEEEEEEER \
70 * 1000 | : : : \ \vEEEEEEEEEEEEER \
71 * 1500 | : : : \ \vEEEEEEEEEEEEER \
72 * 2000 | : : : \_________\vEEEEEEEEEEEEER_______\
73 */
74
75#ifndef HW_EMULATOR_CAMERA2_SENSOR_H
76#define HW_EMULATOR_CAMERA2_SENSOR_H
77
78#include "utils/Thread.h"
79#include "utils/Mutex.h"
80#include "utils/Timers.h"
81#include <utils/String8.h>
82
83#include "Scene.h"
84//#include "Base.h"
85#include "camera_hw.h"
86#include <cstdlib>
87
88namespace android {
89
90typedef enum camera_mirror_flip_e {
91 MF_NORMAL = 0,
92 MF_MIRROR,
93 MF_FLIP,
94 MF_MIRROR_FLIP,
95}camera_mirror_flip_t;
96
97
98typedef enum camera_wb_flip_e {
99 CAM_WB_AUTO = 0,
100 CAM_WB_CLOUD,
101 CAM_WB_DAYLIGHT,
102 CAM_WB_INCANDESCENCE,
103 CAM_WB_TUNGSTEN,
104 CAM_WB_FLUORESCENT,
105 CAM_WB_MANUAL,
106 CAM_WB_SHADE,
107 CAM_WB_TWILIGHT,
108 CAM_WB_WARM_FLUORESCENT,
109}camera_wb_flip_t;
110
111typedef enum camera_effect_flip_e {
112 CAM_EFFECT_ENC_NORMAL = 0,
113 CAM_EFFECT_ENC_GRAYSCALE,
114 CAM_EFFECT_ENC_SEPIA,
115 CAM_EFFECT_ENC_SEPIAGREEN,
116 CAM_EFFECT_ENC_SEPIABLUE,
117 CAM_EFFECT_ENC_COLORINV,
118}camera_effect_flip_t;
119
120typedef enum camera_night_mode_flip_e {
121 CAM_NM_AUTO = 0,
122 CAM_NM_ENABLE,
123}camera_night_mode_flip_t;
124
125typedef enum camera_banding_mode_flip_e {
126 CAM_ANTIBANDING_DISABLED= V4L2_CID_POWER_LINE_FREQUENCY_DISABLED,
127 CAM_ANTIBANDING_50HZ = V4L2_CID_POWER_LINE_FREQUENCY_50HZ,
128 CAM_ANTIBANDING_60HZ = V4L2_CID_POWER_LINE_FREQUENCY_60HZ,
129 CAM_ANTIBANDING_AUTO,
130 CAM_ANTIBANDING_OFF,
131}camera_banding_mode_flip_t;
132
133typedef enum camera_flashlight_status_e{
134 FLASHLIGHT_AUTO = 0,
135 FLASHLIGHT_ON,
136 FLASHLIGHT_OFF,
137 FLASHLIGHT_TORCH,
138 FLASHLIGHT_RED_EYE,
139}camera_flashlight_status_t;
140
141typedef enum camera_focus_mode_e {
142 CAM_FOCUS_MODE_RELEASE = 0,
143 CAM_FOCUS_MODE_FIXED,
144 CAM_FOCUS_MODE_INFINITY,
145 CAM_FOCUS_MODE_AUTO,
146 CAM_FOCUS_MODE_MACRO,
147 CAM_FOCUS_MODE_EDOF,
148 CAM_FOCUS_MODE_CONTI_VID,
149 CAM_FOCUS_MODE_CONTI_PIC,
150}camera_focus_mode_t;
151
152typedef enum sensor_type_e{
153 SENSOR_MMAP = 0,
154 SENSOR_ION,
155 SENSOR_ION_MPLANE,
156 SENSOR_DMA,
157 SENSOR_CANVAS_MODE,
158 SENSOR_USB,
159 SENSOR_SHARE_FD,
160}sensor_type_t;
161
162typedef enum sensor_face_type_e{
163 SENSOR_FACE_NONE= 0,
164 SENSOR_FACE_FRONT,
165 SENSOR_FACE_BACK,
166}sensor_face_type_t;
167
168typedef struct usb_frmsize_discrete {
169 uint32_t width;
170 uint32_t height;
171} usb_frmsize_discrete_t;
172
173#define IOCTL_MASK_ROTATE (1<<0)
174
175class Sensor: private Thread, public virtual RefBase {
176 public:
177
178 Sensor();
179 ~Sensor();
180
181 /*
182 * Power control
183 */
184 void sendExitSingalToSensor();
185 status_t startUp(int idx);
186 status_t shutDown();
187
188 int getOutputFormat();
189 int halFormatToSensorFormat(uint32_t pixelfmt);
190 status_t setOutputFormat(int width, int height, int pixelformat, bool isjpeg);
191 void setPictureRotate(int rotate);
192 int getPictureRotate();
193 uint32_t getStreamUsage(int stream_type);
194
195 status_t streamOn();
196 status_t streamOff();
197
198 int getPictureSizes(int32_t picSizes[], int size, bool preview);
199 int getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size);
200 int64_t getMinFrameDuration();
201 int getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size);
202 bool isStreaming();
203 bool isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat);
204 status_t IoctlStateProbe(void);
205 void dump(int fd);
206 /*
207 * Access to scene
208 */
209 Scene &getScene();
210
211 /*
212 * Controls that can be updated every frame
213 */
214
215 int getZoom(int *zoomMin, int *zoomMax, int *zoomStep);
216 int setZoom(int zoomValue);
217 int getExposure(int *mamExp, int *minExp, int *def, camera_metadata_rational *step);
218 status_t setExposure(int expCmp);
219 status_t setEffect(uint8_t effect);
220 int getAntiBanding(uint8_t *antiBanding, uint8_t maxCont);
221 status_t setAntiBanding(uint8_t antiBanding);
222 status_t setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
223 int getAWB(uint8_t *awbMode, uint8_t maxCount);
224 status_t setAWB(uint8_t awbMode);
225 status_t setAutoFocuas(uint8_t afMode);
226 int getAutoFocus(uint8_t *afMode, uint8_t maxCount);
227 void setExposureTime(uint64_t ns);
228 void setFrameDuration(uint64_t ns);
229 void setSensitivity(uint32_t gain);
230 // Buffer must be at least stride*height*2 bytes in size
231 void setDestinationBuffers(Buffers *buffers);
232 // To simplify tracking sensor's current frame
233 void setFrameNumber(uint32_t frameNumber);
234 void setFlushFlag(bool flushFlag);
235 status_t force_reset_sensor();
236 bool get_sensor_status();
237 /*
238 * Controls that cause reconfiguration delay
239 */
240
241 void setBinning(int horizontalFactor, int verticalFactor);
242
243 /*
244 * Synchronizing with sensor operation (vertical sync)
245 */
246
247 // Wait until the sensor outputs its next vertical sync signal, meaning it
248 // is starting readout of its latest frame of data. Returns true if vertical
249 // sync is signaled, false if the wait timed out.
250 status_t waitForVSync(nsecs_t reltime);
251
252 // Wait until a new frame has been read out, and then return the time
253 // capture started. May return immediately if a new frame has been pushed
254 // since the last wait for a new frame. Returns true if new frame is
255 // returned, false if timed out.
256 status_t waitForNewFrame(nsecs_t reltime,
257 nsecs_t *captureTime);
258
259 /*
260 * Interrupt event servicing from the sensor. Only triggers for sensor
261 * cycles that have valid buffers to write to.
262 */
263 struct SensorListener {
264 enum Event {
265 EXPOSURE_START, // Start of exposure
266 ERROR_CAMERA_DEVICE,
267 };
268
269 virtual void onSensorEvent(uint32_t frameNumber, Event e,
270 nsecs_t timestamp) = 0;
271 virtual ~SensorListener();
272 };
273
274 void setSensorListener(SensorListener *listener);
275
276 /**
277 * Static sensor characteristics
278 */
279 static const unsigned int kResolution[2];
280
281 static const nsecs_t kExposureTimeRange[2];
282 static const nsecs_t kFrameDurationRange[2];
283 static const nsecs_t kMinVerticalBlank;
284
285 static const uint8_t kColorFilterArrangement;
286
287 // Output image data characteristics
288 static const uint32_t kMaxRawValue;
289 static const uint32_t kBlackLevel;
290 // Sensor sensitivity, approximate
291
292 static const float kSaturationVoltage;
293 static const uint32_t kSaturationElectrons;
294 static const float kVoltsPerLuxSecond;
295 static const float kElectronsPerLuxSecond;
296
297 static const float kBaseGainFactor;
298
299 static const float kReadNoiseStddevBeforeGain; // In electrons
300 static const float kReadNoiseStddevAfterGain; // In raw digital units
301 static const float kReadNoiseVarBeforeGain;
302 static const float kReadNoiseVarAfterGain;
303
304 // While each row has to read out, reset, and then expose, the (reset +
305 // expose) sequence can be overlapped by other row readouts, so the final
306 // minimum frame duration is purely a function of row readout time, at least
307 // if there's a reasonable number of rows.
308 static const nsecs_t kRowReadoutTime;
309
310 static const int32_t kSensitivityRange[2];
311 static const uint32_t kDefaultSensitivity;
312
313 sensor_type_e getSensorType(void);
314
315 sensor_face_type_e mSensorFace;
316
317 private:
318 Mutex mControlMutex; // Lock before accessing control parameters
319 // Start of control parameters
320 Condition mVSync;
321 bool mGotVSync;
322 uint64_t mExposureTime;
323 uint64_t mFrameDuration;
324 uint32_t mGainFactor;
325 Buffers *mNextBuffers;
326 uint8_t *mKernelBuffer;
327 uintptr_t mKernelPhysAddr;
328 uint32_t mFrameNumber;
329 int mRotateValue;
330 // End of control parameters
331
332 int mEV;
333
334 Mutex mReadoutMutex; // Lock before accessing readout variables
335 // Start of readout variables
336 Condition mReadoutAvailable;
337 Condition mReadoutComplete;
338 Buffers *mCapturedBuffers;
339 nsecs_t mCaptureTime;
340 SensorListener *mListener;
341 // End of readout variables
342
343 uint8_t *mTemp_buffer;
344 bool mExitSensorThread;
345
346 // Time of sensor startup, used for simulation zero-time point
347 nsecs_t mStartupTime;
348
349 //store the v4l2 info
350 struct VideoInfo *vinfo;
351
352 struct timeval mTimeStart, mTimeEnd;
353 struct timeval mTestStart, mTestEnd;
354
355 uint32_t mFramecount;
356 float mCurFps;
357
358 enum sensor_type_e mSensorType;
359 unsigned int mIoctlSupport;
360 unsigned int msupportrotate;
361 uint32_t mTimeOutCount;
362 bool mWait;
363 uint32_t mPre_width;
364 uint32_t mPre_height;
365 bool mFlushFlag;
366 bool mSensorWorkFlag;
367 /**
368 * Inherited Thread virtual overrides, and members only used by the
369 * processing thread
370 */
371 private:
372 virtual status_t readyToRun();
373
374 virtual bool threadLoop();
375
376 nsecs_t mNextCaptureTime;
377 Buffers *mNextCapturedBuffers;
378
379 Scene mScene;
380
381 int captureNewImageWithGe2d();
382 int captureNewImage();
383 void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride);
384 void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride);
385 void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride);
386 void captureNV21(StreamBuffer b, uint32_t gain);
387 void captureYV12(StreamBuffer b, uint32_t gain);
388 void captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride);
389 void YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height);
390 void YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height);
391};
392
393}
394
395#endif // HW_EMULATOR_CAMERA2_SENSOR_H
396