summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.h (plain)
blob: 05c14365a8216aa0b011953a81a602268b5bc940
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/**
18 * This class is a simple simulation of a typical CMOS cellphone imager chip,
19 * which outputs 12-bit Bayer-mosaic raw images.
20 *
21 * Unlike most real image sensors, this one's native color space is linear sRGB.
22 *
23 * The sensor is abstracted as operating as a pipeline 3 stages deep;
24 * conceptually, each frame to be captured goes through these three stages. The
25 * processing step for the sensor is marked off by vertical sync signals, which
26 * indicate the start of readout of the oldest frame. The interval between
27 * processing steps depends on the frame duration of the frame currently being
28 * captured. The stages are 1) configure, 2) capture, and 3) readout. During
29 * configuration, the sensor's registers for settings such as exposure time,
30 * frame duration, and gain are set for the next frame to be captured. In stage
31 * 2, the image data for the frame is actually captured by the sensor. Finally,
32 * in stage 3, the just-captured data is read out and sent to the rest of the
33 * system.
34 *
35 * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
36 * sensor are exposed earlier in time than larger-numbered rows, with the time
37 * offset between each row being equal to the row readout time.
38 *
39 * The characteristics of this sensor don't correspond to any actual sensor,
40 * but are not far off typical sensors.
41 *
42 * Example timing diagram, with three frames:
43 * Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
44 * Frame 2: Frame duration 75 ms, exposure time 65 ms.
45 * Legend:
46 * C = update sensor registers for frame
47 * v = row in reset (vertical blanking interval)
48 * E = row capturing image data
49 * R = row being read out
50 * | = vertical sync signal
51 *time(ms)| 0 55 105 155 230 270
52 * Frame 0| :configure : capture : readout : : :
53 * Row # | ..|CCCC______|_________|_________| : :
54 * 0 | :\ \vvvvvEEEER \ : :
55 * 500 | : \ \vvvvvEEEER \ : :
56 * 1000 | : \ \vvvvvEEEER \ : :
57 * 1500 | : \ \vvvvvEEEER \ : :
58 * 2000 | : \__________\vvvvvEEEER_________\ : :
59 * Frame 1| : configure capture readout : :
60 * Row # | : |CCCC_____|_________|______________| :
61 * 0 | : :\ \vvvvvEEEER \ :
62 * 500 | : : \ \vvvvvEEEER \ :
63 * 1000 | : : \ \vvvvvEEEER \ :
64 * 1500 | : : \ \vvvvvEEEER \ :
65 * 2000 | : : \_________\vvvvvEEEER______________\ :
66 * Frame 2| : : configure capture readout:
67 * Row # | : : |CCCC_____|______________|_______|...
68 * 0 | : : :\ \vEEEEEEEEEEEEER \
69 * 500 | : : : \ \vEEEEEEEEEEEEER \
70 * 1000 | : : : \ \vEEEEEEEEEEEEER \
71 * 1500 | : : : \ \vEEEEEEEEEEEEER \
72 * 2000 | : : : \_________\vEEEEEEEEEEEEER_______\
73 */
74
75#ifndef HW_EMULATOR_CAMERA2_SENSOR_H
76#define HW_EMULATOR_CAMERA2_SENSOR_H
77
78#include "utils/Thread.h"
79#include "utils/Mutex.h"
80#include "utils/Timers.h"
81#include <utils/String8.h>
82
83#include "Scene.h"
84//#include "Base.h"
85#include "camera_hw.h"
86#include <cstdlib>
87
88namespace android {
89
90typedef enum camera_mirror_flip_e {
91 MF_NORMAL = 0,
92 MF_MIRROR,
93 MF_FLIP,
94 MF_MIRROR_FLIP,
95}camera_mirror_flip_t;
96
97
98typedef enum camera_wb_flip_e {
99 CAM_WB_AUTO = 0,
100 CAM_WB_CLOUD,
101 CAM_WB_DAYLIGHT,
102 CAM_WB_INCANDESCENCE,
103 CAM_WB_TUNGSTEN,
104 CAM_WB_FLUORESCENT,
105 CAM_WB_MANUAL,
106 CAM_WB_SHADE,
107 CAM_WB_TWILIGHT,
108 CAM_WB_WARM_FLUORESCENT,
109}camera_wb_flip_t;
110
111typedef enum camera_effect_flip_e {
112 CAM_EFFECT_ENC_NORMAL = 0,
113 CAM_EFFECT_ENC_GRAYSCALE,
114 CAM_EFFECT_ENC_SEPIA,
115 CAM_EFFECT_ENC_SEPIAGREEN,
116 CAM_EFFECT_ENC_SEPIABLUE,
117 CAM_EFFECT_ENC_COLORINV,
118}camera_effect_flip_t;
119
120typedef enum camera_night_mode_flip_e {
121 CAM_NM_AUTO = 0,
122 CAM_NM_ENABLE,
123}camera_night_mode_flip_t;
124
125typedef enum camera_banding_mode_flip_e {
126 CAM_ANTIBANDING_DISABLED= V4L2_CID_POWER_LINE_FREQUENCY_DISABLED,
127 CAM_ANTIBANDING_50HZ = V4L2_CID_POWER_LINE_FREQUENCY_50HZ,
128 CAM_ANTIBANDING_60HZ = V4L2_CID_POWER_LINE_FREQUENCY_60HZ,
129 CAM_ANTIBANDING_AUTO,
130 CAM_ANTIBANDING_OFF,
131}camera_banding_mode_flip_t;
132
133typedef enum camera_flashlight_status_e{
134 FLASHLIGHT_AUTO = 0,
135 FLASHLIGHT_ON,
136 FLASHLIGHT_OFF,
137 FLASHLIGHT_TORCH,
138 FLASHLIGHT_RED_EYE,
139}camera_flashlight_status_t;
140
141typedef enum camera_focus_mode_e {
142 CAM_FOCUS_MODE_RELEASE = 0,
143 CAM_FOCUS_MODE_FIXED,
144 CAM_FOCUS_MODE_INFINITY,
145 CAM_FOCUS_MODE_AUTO,
146 CAM_FOCUS_MODE_MACRO,
147 CAM_FOCUS_MODE_EDOF,
148 CAM_FOCUS_MODE_CONTI_VID,
149 CAM_FOCUS_MODE_CONTI_PIC,
150}camera_focus_mode_t;
151
152typedef enum sensor_type_e{
153 SENSOR_MMAP = 0,
154 SENSOR_ION,
155 SENSOR_ION_MPLANE,
156 SENSOR_DMA,
157 SENSOR_CANVAS_MODE,
158 SENSOR_USB,
159 SENSOR_SHARE_FD,
160}sensor_type_t;
161
162typedef enum sensor_face_type_e{
163 SENSOR_FACE_NONE= 0,
164 SENSOR_FACE_FRONT,
165 SENSOR_FACE_BACK,
166}sensor_face_type_t;
167
168typedef struct usb_frmsize_discrete {
169 uint32_t width;
170 uint32_t height;
171} usb_frmsize_discrete_t;
172
173#define IOCTL_MASK_ROTATE (1<<0)
174
175class Sensor: private Thread, public virtual RefBase {
176 public:
177
178 Sensor();
179 ~Sensor();
180
181 /*
182 * Power control
183 */
184 void sendExitSingalToSensor();
185 status_t startUp(int idx);
186 status_t shutDown();
187
188 int getOutputFormat();
189 int halFormatToSensorFormat(uint32_t pixelfmt);
190 status_t setOutputFormat(int width, int height, int pixelformat, bool isjpeg);
191 void setPictureRotate(int rotate);
192 int getPictureRotate();
193 uint32_t getStreamUsage(int stream_type);
194
195 status_t streamOn();
196 status_t streamOff();
197
198 int getPictureSizes(int32_t picSizes[], int size, bool preview);
199 int getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size);
200 int64_t getMinFrameDuration();
201 int getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size);
202 bool isStreaming();
203 bool isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat);
204 status_t IoctlStateProbe(void);
205 void dump(int fd);
206 /*
207 * Access to scene
208 */
209 Scene &getScene();
210
211 /*
212 * Controls that can be updated every frame
213 */
214
215 int getZoom(int *zoomMin, int *zoomMax, int *zoomStep);
216 int setZoom(int zoomValue);
217 int getExposure(int *mamExp, int *minExp, int *def, camera_metadata_rational *step);
218 status_t setExposure(int expCmp);
219 status_t setEffect(uint8_t effect);
220 int getAntiBanding(uint8_t *antiBanding, uint8_t maxCont);
221 status_t setAntiBanding(uint8_t antiBanding);
222 status_t setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
223 int getAWB(uint8_t *awbMode, uint8_t maxCount);
224 status_t setAWB(uint8_t awbMode);
225 status_t setAutoFocuas(uint8_t afMode);
226 int getAutoFocus(uint8_t *afMode, uint8_t maxCount);
227 void setExposureTime(uint64_t ns);
228 void setFrameDuration(uint64_t ns);
229 void setSensitivity(uint32_t gain);
230 // Buffer must be at least stride*height*2 bytes in size
231 void setDestinationBuffers(Buffers *buffers);
232 // To simplify tracking sensor's current frame
233 void setFrameNumber(uint32_t frameNumber);
234
235 status_t force_reset_sensor();
236 /*
237 * Controls that cause reconfiguration delay
238 */
239
240 void setBinning(int horizontalFactor, int verticalFactor);
241
242 /*
243 * Synchronizing with sensor operation (vertical sync)
244 */
245
246 // Wait until the sensor outputs its next vertical sync signal, meaning it
247 // is starting readout of its latest frame of data. Returns true if vertical
248 // sync is signaled, false if the wait timed out.
249 status_t waitForVSync(nsecs_t reltime);
250
251 // Wait until a new frame has been read out, and then return the time
252 // capture started. May return immediately if a new frame has been pushed
253 // since the last wait for a new frame. Returns true if new frame is
254 // returned, false if timed out.
255 status_t waitForNewFrame(nsecs_t reltime,
256 nsecs_t *captureTime);
257
258 /*
259 * Interrupt event servicing from the sensor. Only triggers for sensor
260 * cycles that have valid buffers to write to.
261 */
262 struct SensorListener {
263 enum Event {
264 EXPOSURE_START, // Start of exposure
265 ERROR_CAMERA_DEVICE,
266 };
267
268 virtual void onSensorEvent(uint32_t frameNumber, Event e,
269 nsecs_t timestamp) = 0;
270 virtual ~SensorListener();
271 };
272
273 void setSensorListener(SensorListener *listener);
274
275 /**
276 * Static sensor characteristics
277 */
278 static const unsigned int kResolution[2];
279
280 static const nsecs_t kExposureTimeRange[2];
281 static const nsecs_t kFrameDurationRange[2];
282 static const nsecs_t kMinVerticalBlank;
283
284 static const uint8_t kColorFilterArrangement;
285
286 // Output image data characteristics
287 static const uint32_t kMaxRawValue;
288 static const uint32_t kBlackLevel;
289 // Sensor sensitivity, approximate
290
291 static const float kSaturationVoltage;
292 static const uint32_t kSaturationElectrons;
293 static const float kVoltsPerLuxSecond;
294 static const float kElectronsPerLuxSecond;
295
296 static const float kBaseGainFactor;
297
298 static const float kReadNoiseStddevBeforeGain; // In electrons
299 static const float kReadNoiseStddevAfterGain; // In raw digital units
300 static const float kReadNoiseVarBeforeGain;
301 static const float kReadNoiseVarAfterGain;
302
303 // While each row has to read out, reset, and then expose, the (reset +
304 // expose) sequence can be overlapped by other row readouts, so the final
305 // minimum frame duration is purely a function of row readout time, at least
306 // if there's a reasonable number of rows.
307 static const nsecs_t kRowReadoutTime;
308
309 static const int32_t kSensitivityRange[2];
310 static const uint32_t kDefaultSensitivity;
311
312 sensor_type_e getSensorType(void);
313
314 sensor_face_type_e mSensorFace;
315
316 private:
317 Mutex mControlMutex; // Lock before accessing control parameters
318 // Start of control parameters
319 Condition mVSync;
320 bool mGotVSync;
321 uint64_t mExposureTime;
322 uint64_t mFrameDuration;
323 uint32_t mGainFactor;
324 Buffers *mNextBuffers;
325 uint8_t *mKernelBuffer;
326 uintptr_t mKernelPhysAddr;
327 uint32_t mFrameNumber;
328 int mRotateValue;
329
330 // End of control parameters
331
332 int mEV;
333
334 Mutex mReadoutMutex; // Lock before accessing readout variables
335 // Start of readout variables
336 Condition mReadoutAvailable;
337 Condition mReadoutComplete;
338 Buffers *mCapturedBuffers;
339 nsecs_t mCaptureTime;
340 SensorListener *mListener;
341 // End of readout variables
342
343 bool mExitSensorThread;
344
345 // Time of sensor startup, used for simulation zero-time point
346 nsecs_t mStartupTime;
347
348 //store the v4l2 info
349 struct VideoInfo *vinfo;
350
351 struct timeval mTimeStart, mTimeEnd;
352 struct timeval mTestStart, mTestEnd;
353
354 uint32_t mFramecount;
355 float mCurFps;
356
357 enum sensor_type_e mSensorType;
358 unsigned int mIoctlSupport;
359 unsigned int msupportrotate;
360 uint32_t mTimeOutCount;
361
362 /**
363 * Inherited Thread virtual overrides, and members only used by the
364 * processing thread
365 */
366 private:
367 virtual status_t readyToRun();
368
369 virtual bool threadLoop();
370
371 nsecs_t mNextCaptureTime;
372 Buffers *mNextCapturedBuffers;
373
374 Scene mScene;
375
376 int captureNewImageWithGe2d();
377 int captureNewImage();
378 void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride);
379 void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride);
380 void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride);
381 void captureNV21(StreamBuffer b, uint32_t gain);
382 void captureYV12(StreamBuffer b, uint32_t gain);
383 void captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride);
384 void YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height);
385 void YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height);
386};
387
388}
389
390#endif // HW_EMULATOR_CAMERA2_SENSOR_H
391