summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.h (plain)
blob: 5b47a48aea023833d3eece1ea51ac824949c9cab
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/**
18 * This class is a simple simulation of a typical CMOS cellphone imager chip,
19 * which outputs 12-bit Bayer-mosaic raw images.
20 *
21 * Unlike most real image sensors, this one's native color space is linear sRGB.
22 *
23 * The sensor is abstracted as operating as a pipeline 3 stages deep;
24 * conceptually, each frame to be captured goes through these three stages. The
25 * processing step for the sensor is marked off by vertical sync signals, which
26 * indicate the start of readout of the oldest frame. The interval between
27 * processing steps depends on the frame duration of the frame currently being
28 * captured. The stages are 1) configure, 2) capture, and 3) readout. During
29 * configuration, the sensor's registers for settings such as exposure time,
30 * frame duration, and gain are set for the next frame to be captured. In stage
31 * 2, the image data for the frame is actually captured by the sensor. Finally,
32 * in stage 3, the just-captured data is read out and sent to the rest of the
33 * system.
34 *
35 * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
36 * sensor are exposed earlier in time than larger-numbered rows, with the time
37 * offset between each row being equal to the row readout time.
38 *
39 * The characteristics of this sensor don't correspond to any actual sensor,
40 * but are not far off typical sensors.
41 *
42 * Example timing diagram, with three frames:
43 * Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
44 * Frame 2: Frame duration 75 ms, exposure time 65 ms.
45 * Legend:
46 * C = update sensor registers for frame
47 * v = row in reset (vertical blanking interval)
48 * E = row capturing image data
49 * R = row being read out
50 * | = vertical sync signal
51 *time(ms)| 0 55 105 155 230 270
52 * Frame 0| :configure : capture : readout : : :
53 * Row # | ..|CCCC______|_________|_________| : :
54 * 0 | :\ \vvvvvEEEER \ : :
55 * 500 | : \ \vvvvvEEEER \ : :
56 * 1000 | : \ \vvvvvEEEER \ : :
57 * 1500 | : \ \vvvvvEEEER \ : :
58 * 2000 | : \__________\vvvvvEEEER_________\ : :
59 * Frame 1| : configure capture readout : :
60 * Row # | : |CCCC_____|_________|______________| :
61 * 0 | : :\ \vvvvvEEEER \ :
62 * 500 | : : \ \vvvvvEEEER \ :
63 * 1000 | : : \ \vvvvvEEEER \ :
64 * 1500 | : : \ \vvvvvEEEER \ :
65 * 2000 | : : \_________\vvvvvEEEER______________\ :
66 * Frame 2| : : configure capture readout:
67 * Row # | : : |CCCC_____|______________|_______|...
68 * 0 | : : :\ \vEEEEEEEEEEEEER \
69 * 500 | : : : \ \vEEEEEEEEEEEEER \
70 * 1000 | : : : \ \vEEEEEEEEEEEEER \
71 * 1500 | : : : \ \vEEEEEEEEEEEEER \
72 * 2000 | : : : \_________\vEEEEEEEEEEEEER_______\
73 */
74
75#ifndef HW_EMULATOR_CAMERA2_SENSOR_H
76#define HW_EMULATOR_CAMERA2_SENSOR_H
77
78#include "utils/Thread.h"
79#include "utils/Mutex.h"
80#include "utils/Timers.h"
81#include <utils/String8.h>
82
83#include "Scene.h"
84//#include "Base.h"
85#include "camera_hw.h"
86#include <cstdlib>
87
88namespace android {
89
90typedef enum camera_mirror_flip_e {
91 MF_NORMAL = 0,
92 MF_MIRROR,
93 MF_FLIP,
94 MF_MIRROR_FLIP,
95}camera_mirror_flip_t;
96
97
98typedef enum camera_wb_flip_e {
99 CAM_WB_AUTO = 0,
100 CAM_WB_CLOUD,
101 CAM_WB_DAYLIGHT,
102 CAM_WB_INCANDESCENCE,
103 CAM_WB_TUNGSTEN,
104 CAM_WB_FLUORESCENT,
105 CAM_WB_MANUAL,
106 CAM_WB_SHADE,
107 CAM_WB_TWILIGHT,
108 CAM_WB_WARM_FLUORESCENT,
109}camera_wb_flip_t;
110
111typedef enum camera_effect_flip_e {
112 CAM_EFFECT_ENC_NORMAL = 0,
113 CAM_EFFECT_ENC_GRAYSCALE,
114 CAM_EFFECT_ENC_SEPIA,
115 CAM_EFFECT_ENC_SEPIAGREEN,
116 CAM_EFFECT_ENC_SEPIABLUE,
117 CAM_EFFECT_ENC_COLORINV,
118}camera_effect_flip_t;
119
120typedef enum camera_night_mode_flip_e {
121 CAM_NM_AUTO = 0,
122 CAM_NM_ENABLE,
123}camera_night_mode_flip_t;
124
125typedef enum camera_banding_mode_flip_e {
126 CAM_ANTIBANDING_DISABLED= V4L2_CID_POWER_LINE_FREQUENCY_DISABLED,
127 CAM_ANTIBANDING_50HZ = V4L2_CID_POWER_LINE_FREQUENCY_50HZ,
128 CAM_ANTIBANDING_60HZ = V4L2_CID_POWER_LINE_FREQUENCY_60HZ,
129 CAM_ANTIBANDING_AUTO,
130 CAM_ANTIBANDING_OFF,
131}camera_banding_mode_flip_t;
132
133typedef enum camera_flashlight_status_e{
134 FLASHLIGHT_AUTO = 0,
135 FLASHLIGHT_ON,
136 FLASHLIGHT_OFF,
137 FLASHLIGHT_TORCH,
138 FLASHLIGHT_RED_EYE,
139}camera_flashlight_status_t;
140
141typedef enum camera_focus_mode_e {
142 CAM_FOCUS_MODE_RELEASE = 0,
143 CAM_FOCUS_MODE_FIXED,
144 CAM_FOCUS_MODE_INFINITY,
145 CAM_FOCUS_MODE_AUTO,
146 CAM_FOCUS_MODE_MACRO,
147 CAM_FOCUS_MODE_EDOF,
148 CAM_FOCUS_MODE_CONTI_VID,
149 CAM_FOCUS_MODE_CONTI_PIC,
150}camera_focus_mode_t;
151
152typedef enum sensor_type_e{
153 SENSOR_MMAP = 0,
154 SENSOR_ION,
155 SENSOR_ION_MPLANE,
156 SENSOR_DMA,
157 SENSOR_CANVAS_MODE,
158 SENSOR_USB,
159 SENSOR_SHARE_FD,
160}sensor_type_t;
161
162typedef enum sensor_face_type_e{
163 SENSOR_FACE_NONE= 0,
164 SENSOR_FACE_FRONT,
165 SENSOR_FACE_BACK,
166}sensor_face_type_t;
167#define IOCTL_MASK_ROTATE (1<<0)
168
169class Sensor: private Thread, public virtual RefBase {
170 public:
171
172 Sensor();
173 ~Sensor();
174
175 /*
176 * Power control
177 */
178
179 status_t startUp(int idx);
180 status_t shutDown();
181
182 int getOutputFormat();
183 int halFormatToSensorFormat(uint32_t pixelfmt);
184 status_t setOutputFormat(int width, int height, int pixelformat, bool isjpeg);
185 void setPictureRotate(int rotate);
186 int getPictureRotate();
187 uint32_t getStreamUsage(int stream_type);
188
189 status_t streamOn();
190 status_t streamOff();
191
192 int getPictureSizes(int32_t picSizes[], int size, bool preview);
193 int getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size);
194 int64_t getMinFrameDuration();
195 int getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size);
196 bool isStreaming();
197 bool isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat);
198 status_t IoctlStateProbe(void);
199 void dump(int fd);
200 /*
201 * Access to scene
202 */
203 Scene &getScene();
204
205 /*
206 * Controls that can be updated every frame
207 */
208
209 int getZoom(int *zoomMin, int *zoomMax, int *zoomStep);
210 int setZoom(int zoomValue);
211 int getExposure(int *mamExp, int *minExp, int *def, camera_metadata_rational *step);
212 status_t setExposure(int expCmp);
213 status_t setEffect(uint8_t effect);
214 int getAntiBanding(uint8_t *antiBanding, uint8_t maxCont);
215 status_t setAntiBanding(uint8_t antiBanding);
216 status_t setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
217 int getAWB(uint8_t *awbMode, uint8_t maxCount);
218 status_t setAWB(uint8_t awbMode);
219 status_t setAutoFocuas(uint8_t afMode);
220 int getAutoFocus(uint8_t *afMode, uint8_t maxCount);
221 void setExposureTime(uint64_t ns);
222 void setFrameDuration(uint64_t ns);
223 void setSensitivity(uint32_t gain);
224 // Buffer must be at least stride*height*2 bytes in size
225 void setDestinationBuffers(Buffers *buffers);
226 // To simplify tracking sensor's current frame
227 void setFrameNumber(uint32_t frameNumber);
228
229 /*
230 * Controls that cause reconfiguration delay
231 */
232
233 void setBinning(int horizontalFactor, int verticalFactor);
234
235 /*
236 * Synchronizing with sensor operation (vertical sync)
237 */
238
239 // Wait until the sensor outputs its next vertical sync signal, meaning it
240 // is starting readout of its latest frame of data. Returns true if vertical
241 // sync is signaled, false if the wait timed out.
242 bool waitForVSync(nsecs_t reltime);
243
244 // Wait until a new frame has been read out, and then return the time
245 // capture started. May return immediately if a new frame has been pushed
246 // since the last wait for a new frame. Returns true if new frame is
247 // returned, false if timed out.
248 bool waitForNewFrame(nsecs_t reltime,
249 nsecs_t *captureTime);
250
251 /*
252 * Interrupt event servicing from the sensor. Only triggers for sensor
253 * cycles that have valid buffers to write to.
254 */
255 struct SensorListener {
256 enum Event {
257 EXPOSURE_START, // Start of exposure
258 };
259
260 virtual void onSensorEvent(uint32_t frameNumber, Event e,
261 nsecs_t timestamp) = 0;
262 virtual ~SensorListener();
263 };
264
265 void setSensorListener(SensorListener *listener);
266
267 /**
268 * Static sensor characteristics
269 */
270 static const unsigned int kResolution[2];
271
272 static const nsecs_t kExposureTimeRange[2];
273 static const nsecs_t kFrameDurationRange[2];
274 static const nsecs_t kMinVerticalBlank;
275
276 static const uint8_t kColorFilterArrangement;
277
278 // Output image data characteristics
279 static const uint32_t kMaxRawValue;
280 static const uint32_t kBlackLevel;
281 // Sensor sensitivity, approximate
282
283 static const float kSaturationVoltage;
284 static const uint32_t kSaturationElectrons;
285 static const float kVoltsPerLuxSecond;
286 static const float kElectronsPerLuxSecond;
287
288 static const float kBaseGainFactor;
289
290 static const float kReadNoiseStddevBeforeGain; // In electrons
291 static const float kReadNoiseStddevAfterGain; // In raw digital units
292 static const float kReadNoiseVarBeforeGain;
293 static const float kReadNoiseVarAfterGain;
294
295 // While each row has to read out, reset, and then expose, the (reset +
296 // expose) sequence can be overlapped by other row readouts, so the final
297 // minimum frame duration is purely a function of row readout time, at least
298 // if there's a reasonable number of rows.
299 static const nsecs_t kRowReadoutTime;
300
301 static const int32_t kSensitivityRange[2];
302 static const uint32_t kDefaultSensitivity;
303
304 sensor_type_e getSensorType(void);
305
306 sensor_face_type_e mSensorFace;
307
308 private:
309 Mutex mControlMutex; // Lock before accessing control parameters
310 // Start of control parameters
311 Condition mVSync;
312 bool mGotVSync;
313 uint64_t mExposureTime;
314 uint64_t mFrameDuration;
315 uint32_t mGainFactor;
316 Buffers *mNextBuffers;
317 uint8_t *mKernelBuffer;
318 uintptr_t mKernelPhysAddr;
319 uint32_t mFrameNumber;
320 int mRotateValue;
321
322 // End of control parameters
323
324 int mEV;
325
326 Mutex mReadoutMutex; // Lock before accessing readout variables
327 // Start of readout variables
328 Condition mReadoutAvailable;
329 Condition mReadoutComplete;
330 Buffers *mCapturedBuffers;
331 nsecs_t mCaptureTime;
332 SensorListener *mListener;
333 // End of readout variables
334
335 // Time of sensor startup, used for simulation zero-time point
336 nsecs_t mStartupTime;
337
338 //store the v4l2 info
339 struct VideoInfo *vinfo;
340
341 struct timeval mTimeStart, mTimeEnd;
342 struct timeval mTestStart, mTestEnd;
343
344 uint32_t mFramecount;
345 float mCurFps;
346
347 enum sensor_type_e mSensorType;
348 unsigned int mIoctlSupport;
349 unsigned int msupportrotate;
350
351 /**
352 * Inherited Thread virtual overrides, and members only used by the
353 * processing thread
354 */
355 private:
356 virtual status_t readyToRun();
357
358 virtual bool threadLoop();
359
360 nsecs_t mNextCaptureTime;
361 Buffers *mNextCapturedBuffers;
362
363 Scene mScene;
364
365 int captureNewImageWithGe2d();
366 int captureNewImage();
367 void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride);
368 void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride);
369 void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride);
370 void captureNV21(StreamBuffer b, uint32_t gain);
371 void captureYV12(StreamBuffer b, uint32_t gain);
372 void captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride);
373 void YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height);
374 void YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height);
375};
376
377}
378
379#endif // HW_EMULATOR_CAMERA2_SENSOR_H
380