blob: 4d8d308d7d8ef9073f6ee46921727eea6e757025
1 | /* |
2 | * Copyright (C) 2012 The Android Open Source Project |
3 | * |
4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
5 | * you may not use this file except in compliance with the License. |
6 | * You may obtain a copy of the License at |
7 | * |
8 | * http://www.apache.org/licenses/LICENSE-2.0 |
9 | * |
10 | * Unless required by applicable law or agreed to in writing, software |
11 | * distributed under the License is distributed on an "AS IS" BASIS, |
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13 | * See the License for the specific language governing permissions and |
14 | * limitations under the License. |
15 | */ |
16 | |
17 | /** |
18 | * This class is a simple simulation of a typical CMOS cellphone imager chip, |
19 | * which outputs 12-bit Bayer-mosaic raw images. |
20 | * |
21 | * Unlike most real image sensors, this one's native color space is linear sRGB. |
22 | * |
23 | * The sensor is abstracted as operating as a pipeline 3 stages deep; |
24 | * conceptually, each frame to be captured goes through these three stages. The |
25 | * processing step for the sensor is marked off by vertical sync signals, which |
26 | * indicate the start of readout of the oldest frame. The interval between |
27 | * processing steps depends on the frame duration of the frame currently being |
28 | * captured. The stages are 1) configure, 2) capture, and 3) readout. During |
29 | * configuration, the sensor's registers for settings such as exposure time, |
30 | * frame duration, and gain are set for the next frame to be captured. In stage |
31 | * 2, the image data for the frame is actually captured by the sensor. Finally, |
32 | * in stage 3, the just-captured data is read out and sent to the rest of the |
33 | * system. |
34 | * |
35 | * The sensor is assumed to be rolling-shutter, so low-numbered rows of the |
36 | * sensor are exposed earlier in time than larger-numbered rows, with the time |
37 | * offset between each row being equal to the row readout time. |
38 | * |
39 | * The characteristics of this sensor don't correspond to any actual sensor, |
40 | * but are not far off typical sensors. |
41 | * |
42 | * Example timing diagram, with three frames: |
43 | * Frame 0-1: Frame duration 50 ms, exposure time 20 ms. |
44 | * Frame 2: Frame duration 75 ms, exposure time 65 ms. |
45 | * Legend: |
46 | * C = update sensor registers for frame |
47 | * v = row in reset (vertical blanking interval) |
48 | * E = row capturing image data |
49 | * R = row being read out |
50 | * | = vertical sync signal |
51 | *time(ms)| 0 55 105 155 230 270 |
52 | * Frame 0| :configure : capture : readout : : : |
53 | * Row # | ..|CCCC______|_________|_________| : : |
54 | * 0 | :\ \vvvvvEEEER \ : : |
55 | * 500 | : \ \vvvvvEEEER \ : : |
56 | * 1000 | : \ \vvvvvEEEER \ : : |
57 | * 1500 | : \ \vvvvvEEEER \ : : |
58 | * 2000 | : \__________\vvvvvEEEER_________\ : : |
59 | * Frame 1| : configure capture readout : : |
60 | * Row # | : |CCCC_____|_________|______________| : |
61 | * 0 | : :\ \vvvvvEEEER \ : |
62 | * 500 | : : \ \vvvvvEEEER \ : |
63 | * 1000 | : : \ \vvvvvEEEER \ : |
64 | * 1500 | : : \ \vvvvvEEEER \ : |
65 | * 2000 | : : \_________\vvvvvEEEER______________\ : |
66 | * Frame 2| : : configure capture readout: |
67 | * Row # | : : |CCCC_____|______________|_______|... |
68 | * 0 | : : :\ \vEEEEEEEEEEEEER \ |
69 | * 500 | : : : \ \vEEEEEEEEEEEEER \ |
70 | * 1000 | : : : \ \vEEEEEEEEEEEEER \ |
71 | * 1500 | : : : \ \vEEEEEEEEEEEEER \ |
72 | * 2000 | : : : \_________\vEEEEEEEEEEEEER_______\ |
73 | */ |
74 | |
75 | #ifndef HW_EMULATOR_CAMERA2_SENSOR_H |
76 | #define HW_EMULATOR_CAMERA2_SENSOR_H |
77 | |
78 | #include "utils/Thread.h" |
79 | #include "utils/Mutex.h" |
80 | #include "utils/Timers.h" |
81 | #include <utils/String8.h> |
82 | |
83 | #include "Scene.h" |
84 | //#include "Base.h" |
85 | #include "camera_hw.h" |
86 | #include <cstdlib> |
87 | |
88 | namespace android { |
89 | |
90 | typedef enum camera_mirror_flip_e { |
91 | MF_NORMAL = 0, |
92 | MF_MIRROR, |
93 | MF_FLIP, |
94 | MF_MIRROR_FLIP, |
95 | }camera_mirror_flip_t; |
96 | |
97 | |
98 | typedef enum camera_wb_flip_e { |
99 | CAM_WB_AUTO = 0, |
100 | CAM_WB_CLOUD, |
101 | CAM_WB_DAYLIGHT, |
102 | CAM_WB_INCANDESCENCE, |
103 | CAM_WB_TUNGSTEN, |
104 | CAM_WB_FLUORESCENT, |
105 | CAM_WB_MANUAL, |
106 | CAM_WB_SHADE, |
107 | CAM_WB_TWILIGHT, |
108 | CAM_WB_WARM_FLUORESCENT, |
109 | }camera_wb_flip_t; |
110 | |
111 | typedef enum camera_effect_flip_e { |
112 | CAM_EFFECT_ENC_NORMAL = 0, |
113 | CAM_EFFECT_ENC_GRAYSCALE, |
114 | CAM_EFFECT_ENC_SEPIA, |
115 | CAM_EFFECT_ENC_SEPIAGREEN, |
116 | CAM_EFFECT_ENC_SEPIABLUE, |
117 | CAM_EFFECT_ENC_COLORINV, |
118 | }camera_effect_flip_t; |
119 | |
120 | typedef enum camera_night_mode_flip_e { |
121 | CAM_NM_AUTO = 0, |
122 | CAM_NM_ENABLE, |
123 | }camera_night_mode_flip_t; |
124 | |
125 | typedef enum camera_banding_mode_flip_e { |
126 | CAM_ANTIBANDING_DISABLED= V4L2_CID_POWER_LINE_FREQUENCY_DISABLED, |
127 | CAM_ANTIBANDING_50HZ = V4L2_CID_POWER_LINE_FREQUENCY_50HZ, |
128 | CAM_ANTIBANDING_60HZ = V4L2_CID_POWER_LINE_FREQUENCY_60HZ, |
129 | CAM_ANTIBANDING_AUTO, |
130 | CAM_ANTIBANDING_OFF, |
131 | }camera_banding_mode_flip_t; |
132 | |
133 | typedef enum camera_flashlight_status_e{ |
134 | FLASHLIGHT_AUTO = 0, |
135 | FLASHLIGHT_ON, |
136 | FLASHLIGHT_OFF, |
137 | FLASHLIGHT_TORCH, |
138 | FLASHLIGHT_RED_EYE, |
139 | }camera_flashlight_status_t; |
140 | |
141 | typedef enum camera_focus_mode_e { |
142 | CAM_FOCUS_MODE_RELEASE = 0, |
143 | CAM_FOCUS_MODE_FIXED, |
144 | CAM_FOCUS_MODE_INFINITY, |
145 | CAM_FOCUS_MODE_AUTO, |
146 | CAM_FOCUS_MODE_MACRO, |
147 | CAM_FOCUS_MODE_EDOF, |
148 | CAM_FOCUS_MODE_CONTI_VID, |
149 | CAM_FOCUS_MODE_CONTI_PIC, |
150 | }camera_focus_mode_t; |
151 | |
152 | typedef enum sensor_type_e{ |
153 | SENSOR_MMAP = 0, |
154 | SENSOR_ION, |
155 | SENSOR_ION_MPLANE, |
156 | SENSOR_DMA, |
157 | SENSOR_CANVAS_MODE, |
158 | SENSOR_USB, |
159 | SENSOR_SHARE_FD, |
160 | }sensor_type_t; |
161 | |
162 | typedef enum sensor_face_type_e{ |
163 | SENSOR_FACE_NONE= 0, |
164 | SENSOR_FACE_FRONT, |
165 | SENSOR_FACE_BACK, |
166 | }sensor_face_type_t; |
167 | #define IOCTL_MASK_ROTATE (1<<0) |
168 | |
169 | class Sensor: private Thread, public virtual RefBase { |
170 | public: |
171 | |
172 | Sensor(); |
173 | ~Sensor(); |
174 | |
175 | /* |
176 | * Power control |
177 | */ |
178 | |
179 | status_t startUp(int idx); |
180 | status_t shutDown(); |
181 | |
182 | int getOutputFormat(); |
183 | int halFormatToSensorFormat(uint32_t pixelfmt); |
184 | status_t setOutputFormat(int width, int height, int pixelformat, bool isjpeg); |
185 | void setPictureRotate(int rotate); |
186 | int getPictureRotate(); |
187 | uint32_t getStreamUsage(int stream_type); |
188 | |
189 | status_t streamOn(); |
190 | status_t streamOff(); |
191 | |
192 | int getPictureSizes(int32_t picSizes[], int size, bool preview); |
193 | int getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size); |
194 | int64_t getMinFrameDuration(); |
195 | int getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size); |
196 | bool isStreaming(); |
197 | bool isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat); |
198 | status_t IoctlStateProbe(void); |
199 | void dump(int fd); |
200 | /* |
201 | * Access to scene |
202 | */ |
203 | Scene &getScene(); |
204 | |
205 | /* |
206 | * Controls that can be updated every frame |
207 | */ |
208 | |
209 | int getZoom(int *zoomMin, int *zoomMax, int *zoomStep); |
210 | int setZoom(int zoomValue); |
211 | int getExposure(int *mamExp, int *minExp, int *def, camera_metadata_rational *step); |
212 | status_t setExposure(int expCmp); |
213 | status_t setEffect(uint8_t effect); |
214 | int getAntiBanding(uint8_t *antiBanding, uint8_t maxCont); |
215 | status_t setAntiBanding(uint8_t antiBanding); |
216 | status_t setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1); |
217 | int getAWB(uint8_t *awbMode, uint8_t maxCount); |
218 | status_t setAWB(uint8_t awbMode); |
219 | status_t setAutoFocuas(uint8_t afMode); |
220 | int getAutoFocus(uint8_t *afMode, uint8_t maxCount); |
221 | void setExposureTime(uint64_t ns); |
222 | void setFrameDuration(uint64_t ns); |
223 | void setSensitivity(uint32_t gain); |
224 | // Buffer must be at least stride*height*2 bytes in size |
225 | void setDestinationBuffers(Buffers *buffers); |
226 | // To simplify tracking sensor's current frame |
227 | void setFrameNumber(uint32_t frameNumber); |
228 | |
229 | /* |
230 | * Controls that cause reconfiguration delay |
231 | */ |
232 | |
233 | void setBinning(int horizontalFactor, int verticalFactor); |
234 | |
235 | /* |
236 | * Synchronizing with sensor operation (vertical sync) |
237 | */ |
238 | |
239 | // Wait until the sensor outputs its next vertical sync signal, meaning it |
240 | // is starting readout of its latest frame of data. Returns true if vertical |
241 | // sync is signaled, false if the wait timed out. |
242 | bool waitForVSync(nsecs_t reltime); |
243 | |
244 | // Wait until a new frame has been read out, and then return the time |
245 | // capture started. May return immediately if a new frame has been pushed |
246 | // since the last wait for a new frame. Returns true if new frame is |
247 | // returned, false if timed out. |
248 | bool waitForNewFrame(nsecs_t reltime, |
249 | nsecs_t *captureTime); |
250 | |
251 | /* |
252 | * Interrupt event servicing from the sensor. Only triggers for sensor |
253 | * cycles that have valid buffers to write to. |
254 | */ |
255 | struct SensorListener { |
256 | enum Event { |
257 | EXPOSURE_START, // Start of exposure |
258 | ERROR_CAMERA_DEVICE, |
259 | }; |
260 | |
261 | virtual void onSensorEvent(uint32_t frameNumber, Event e, |
262 | nsecs_t timestamp) = 0; |
263 | virtual ~SensorListener(); |
264 | }; |
265 | |
266 | void setSensorListener(SensorListener *listener); |
267 | |
268 | /** |
269 | * Static sensor characteristics |
270 | */ |
271 | static const unsigned int kResolution[2]; |
272 | |
273 | static const nsecs_t kExposureTimeRange[2]; |
274 | static const nsecs_t kFrameDurationRange[2]; |
275 | static const nsecs_t kMinVerticalBlank; |
276 | |
277 | static const uint8_t kColorFilterArrangement; |
278 | |
279 | // Output image data characteristics |
280 | static const uint32_t kMaxRawValue; |
281 | static const uint32_t kBlackLevel; |
282 | // Sensor sensitivity, approximate |
283 | |
284 | static const float kSaturationVoltage; |
285 | static const uint32_t kSaturationElectrons; |
286 | static const float kVoltsPerLuxSecond; |
287 | static const float kElectronsPerLuxSecond; |
288 | |
289 | static const float kBaseGainFactor; |
290 | |
291 | static const float kReadNoiseStddevBeforeGain; // In electrons |
292 | static const float kReadNoiseStddevAfterGain; // In raw digital units |
293 | static const float kReadNoiseVarBeforeGain; |
294 | static const float kReadNoiseVarAfterGain; |
295 | |
296 | // While each row has to read out, reset, and then expose, the (reset + |
297 | // expose) sequence can be overlapped by other row readouts, so the final |
298 | // minimum frame duration is purely a function of row readout time, at least |
299 | // if there's a reasonable number of rows. |
300 | static const nsecs_t kRowReadoutTime; |
301 | |
302 | static const int32_t kSensitivityRange[2]; |
303 | static const uint32_t kDefaultSensitivity; |
304 | |
305 | sensor_type_e getSensorType(void); |
306 | |
307 | sensor_face_type_e mSensorFace; |
308 | |
309 | private: |
310 | Mutex mControlMutex; // Lock before accessing control parameters |
311 | // Start of control parameters |
312 | Condition mVSync; |
313 | bool mGotVSync; |
314 | uint64_t mExposureTime; |
315 | uint64_t mFrameDuration; |
316 | uint32_t mGainFactor; |
317 | Buffers *mNextBuffers; |
318 | uint8_t *mKernelBuffer; |
319 | uintptr_t mKernelPhysAddr; |
320 | uint32_t mFrameNumber; |
321 | int mRotateValue; |
322 | |
323 | // End of control parameters |
324 | |
325 | int mEV; |
326 | |
327 | Mutex mReadoutMutex; // Lock before accessing readout variables |
328 | // Start of readout variables |
329 | Condition mReadoutAvailable; |
330 | Condition mReadoutComplete; |
331 | Buffers *mCapturedBuffers; |
332 | nsecs_t mCaptureTime; |
333 | SensorListener *mListener; |
334 | // End of readout variables |
335 | |
336 | // Time of sensor startup, used for simulation zero-time point |
337 | nsecs_t mStartupTime; |
338 | |
339 | //store the v4l2 info |
340 | struct VideoInfo *vinfo; |
341 | |
342 | struct timeval mTimeStart, mTimeEnd; |
343 | struct timeval mTestStart, mTestEnd; |
344 | |
345 | uint32_t mFramecount; |
346 | float mCurFps; |
347 | |
348 | enum sensor_type_e mSensorType; |
349 | unsigned int mIoctlSupport; |
350 | unsigned int msupportrotate; |
351 | |
352 | /** |
353 | * Inherited Thread virtual overrides, and members only used by the |
354 | * processing thread |
355 | */ |
356 | private: |
357 | virtual status_t readyToRun(); |
358 | |
359 | virtual bool threadLoop(); |
360 | |
361 | nsecs_t mNextCaptureTime; |
362 | Buffers *mNextCapturedBuffers; |
363 | |
364 | Scene mScene; |
365 | |
366 | int captureNewImageWithGe2d(); |
367 | int captureNewImage(); |
368 | void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride); |
369 | void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride); |
370 | void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride); |
371 | void captureNV21(StreamBuffer b, uint32_t gain); |
372 | void captureYV12(StreamBuffer b, uint32_t gain); |
373 | void captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride); |
374 | void YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height); |
375 | void YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height); |
376 | }; |
377 | |
378 | } |
379 | |
380 | #endif // HW_EMULATOR_CAMERA2_SENSOR_H |
381 |