summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.h (plain)
blob: 1116dc98b25401375b9c148e21d4ea48c4b2b8b7
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/**
18 * This class is a simple simulation of a typical CMOS cellphone imager chip,
19 * which outputs 12-bit Bayer-mosaic raw images.
20 *
21 * Unlike most real image sensors, this one's native color space is linear sRGB.
22 *
23 * The sensor is abstracted as operating as a pipeline 3 stages deep;
24 * conceptually, each frame to be captured goes through these three stages. The
25 * processing step for the sensor is marked off by vertical sync signals, which
26 * indicate the start of readout of the oldest frame. The interval between
27 * processing steps depends on the frame duration of the frame currently being
28 * captured. The stages are 1) configure, 2) capture, and 3) readout. During
29 * configuration, the sensor's registers for settings such as exposure time,
30 * frame duration, and gain are set for the next frame to be captured. In stage
31 * 2, the image data for the frame is actually captured by the sensor. Finally,
32 * in stage 3, the just-captured data is read out and sent to the rest of the
33 * system.
34 *
35 * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
36 * sensor are exposed earlier in time than larger-numbered rows, with the time
37 * offset between each row being equal to the row readout time.
38 *
39 * The characteristics of this sensor don't correspond to any actual sensor,
40 * but are not far off typical sensors.
41 *
42 * Example timing diagram, with three frames:
43 * Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
44 * Frame 2: Frame duration 75 ms, exposure time 65 ms.
45 * Legend:
46 * C = update sensor registers for frame
47 * v = row in reset (vertical blanking interval)
48 * E = row capturing image data
49 * R = row being read out
50 * | = vertical sync signal
51 *time(ms)| 0 55 105 155 230 270
52 * Frame 0| :configure : capture : readout : : :
53 * Row # | ..|CCCC______|_________|_________| : :
54 * 0 | :\ \vvvvvEEEER \ : :
55 * 500 | : \ \vvvvvEEEER \ : :
56 * 1000 | : \ \vvvvvEEEER \ : :
57 * 1500 | : \ \vvvvvEEEER \ : :
58 * 2000 | : \__________\vvvvvEEEER_________\ : :
59 * Frame 1| : configure capture readout : :
60 * Row # | : |CCCC_____|_________|______________| :
61 * 0 | : :\ \vvvvvEEEER \ :
62 * 500 | : : \ \vvvvvEEEER \ :
63 * 1000 | : : \ \vvvvvEEEER \ :
64 * 1500 | : : \ \vvvvvEEEER \ :
65 * 2000 | : : \_________\vvvvvEEEER______________\ :
66 * Frame 2| : : configure capture readout:
67 * Row # | : : |CCCC_____|______________|_______|...
68 * 0 | : : :\ \vEEEEEEEEEEEEER \
69 * 500 | : : : \ \vEEEEEEEEEEEEER \
70 * 1000 | : : : \ \vEEEEEEEEEEEEER \
71 * 1500 | : : : \ \vEEEEEEEEEEEEER \
72 * 2000 | : : : \_________\vEEEEEEEEEEEEER_______\
73 */
74
75#ifndef HW_EMULATOR_CAMERA2_SENSOR_H
76#define HW_EMULATOR_CAMERA2_SENSOR_H
77
78#include "utils/Thread.h"
79#include "utils/Mutex.h"
80#include "utils/Timers.h"
81#include <utils/String8.h>
82
83#include "Scene.h"
84//#include "Base.h"
85#include "camera_hw.h"
86#include <cstdlib>
87
88namespace android {
89
90typedef enum camera_mirror_flip_e {
91 MF_NORMAL = 0,
92 MF_MIRROR,
93 MF_FLIP,
94 MF_MIRROR_FLIP,
95}camera_mirror_flip_t;
96
97
98typedef enum camera_wb_flip_e {
99 CAM_WB_AUTO = 0,
100 CAM_WB_CLOUD,
101 CAM_WB_DAYLIGHT,
102 CAM_WB_INCANDESCENCE,
103 CAM_WB_TUNGSTEN,
104 CAM_WB_FLUORESCENT,
105 CAM_WB_MANUAL,
106 CAM_WB_SHADE,
107 CAM_WB_TWILIGHT,
108 CAM_WB_WARM_FLUORESCENT,
109}camera_wb_flip_t;
110
111typedef enum camera_effect_flip_e {
112 CAM_EFFECT_ENC_NORMAL = 0,
113 CAM_EFFECT_ENC_GRAYSCALE,
114 CAM_EFFECT_ENC_SEPIA,
115 CAM_EFFECT_ENC_SEPIAGREEN,
116 CAM_EFFECT_ENC_SEPIABLUE,
117 CAM_EFFECT_ENC_COLORINV,
118}camera_effect_flip_t;
119
120typedef enum camera_night_mode_flip_e {
121 CAM_NM_AUTO = 0,
122 CAM_NM_ENABLE,
123}camera_night_mode_flip_t;
124
125typedef enum camera_banding_mode_flip_e {
126 CAM_ANTIBANDING_DISABLED= V4L2_CID_POWER_LINE_FREQUENCY_DISABLED,
127 CAM_ANTIBANDING_50HZ = V4L2_CID_POWER_LINE_FREQUENCY_50HZ,
128 CAM_ANTIBANDING_60HZ = V4L2_CID_POWER_LINE_FREQUENCY_60HZ,
129 CAM_ANTIBANDING_AUTO,
130 CAM_ANTIBANDING_OFF,
131}camera_banding_mode_flip_t;
132
133typedef enum camera_flashlight_status_e{
134 FLASHLIGHT_AUTO = 0,
135 FLASHLIGHT_ON,
136 FLASHLIGHT_OFF,
137 FLASHLIGHT_TORCH,
138 FLASHLIGHT_RED_EYE,
139}camera_flashlight_status_t;
140
141typedef enum camera_focus_mode_e {
142 CAM_FOCUS_MODE_RELEASE = 0,
143 CAM_FOCUS_MODE_FIXED,
144 CAM_FOCUS_MODE_INFINITY,
145 CAM_FOCUS_MODE_AUTO,
146 CAM_FOCUS_MODE_MACRO,
147 CAM_FOCUS_MODE_EDOF,
148 CAM_FOCUS_MODE_CONTI_VID,
149 CAM_FOCUS_MODE_CONTI_PIC,
150}camera_focus_mode_t;
151
152typedef enum sensor_type_e{
153 SENSOR_MMAP = 0,
154 SENSOR_ION,
155 SENSOR_ION_MPLANE,
156 SENSOR_DMA,
157 SENSOR_CANVAS_MODE,
158 SENSOR_USB,
159 SENSOR_SHARE_FD,
160}sensor_type_t;
161
162typedef enum sensor_face_type_e{
163 SENSOR_FACE_NONE= 0,
164 SENSOR_FACE_FRONT,
165 SENSOR_FACE_BACK,
166}sensor_face_type_t;
167#define IOCTL_MASK_ROTATE (1<<0)
168
169class Sensor: private Thread, public virtual RefBase {
170 public:
171
172 Sensor();
173 ~Sensor();
174
175 /*
176 * Power control
177 */
178
179 status_t startUp(int idx);
180 status_t shutDown();
181
182 int getOutputFormat();
183 int halFormatToSensorFormat(uint32_t pixelfmt);
184 status_t setOutputFormat(int width, int height, int pixelformat, bool isjpeg);
185 void setPictureRotate(int rotate);
186 int getPictureRotate();
187 uint32_t getStreamUsage(int stream_type);
188
189 status_t streamOn();
190 status_t streamOff();
191
192 int getPictureSizes(int32_t picSizes[], int size, bool preview);
193 int getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size);
194 int getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size);
195 bool isStreaming();
196 bool isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat);
197 status_t IoctlStateProbe(void);
198 void dump(int fd);
199 /*
200 * Access to scene
201 */
202 Scene &getScene();
203
204 /*
205 * Controls that can be updated every frame
206 */
207
208 int getZoom(int *zoomMin, int *zoomMax, int *zoomStep);
209 int setZoom(int zoomValue);
210 int getExposure(int *mamExp, int *minExp, int *def, camera_metadata_rational *step);
211 status_t setExposure(int expCmp);
212 status_t setEffect(uint8_t effect);
213 int getAntiBanding(uint8_t *antiBanding, uint8_t maxCont);
214 status_t setAntiBanding(uint8_t antiBanding);
215 status_t setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
216 int getAWB(uint8_t *awbMode, uint8_t maxCount);
217 status_t setAWB(uint8_t awbMode);
218 status_t setAutoFocuas(uint8_t afMode);
219 int getAutoFocus(uint8_t *afMode, uint8_t maxCount);
220 void setExposureTime(uint64_t ns);
221 void setFrameDuration(uint64_t ns);
222 void setSensitivity(uint32_t gain);
223 // Buffer must be at least stride*height*2 bytes in size
224 void setDestinationBuffers(Buffers *buffers);
225 // To simplify tracking sensor's current frame
226 void setFrameNumber(uint32_t frameNumber);
227
228 /*
229 * Controls that cause reconfiguration delay
230 */
231
232 void setBinning(int horizontalFactor, int verticalFactor);
233
234 /*
235 * Synchronizing with sensor operation (vertical sync)
236 */
237
238 // Wait until the sensor outputs its next vertical sync signal, meaning it
239 // is starting readout of its latest frame of data. Returns true if vertical
240 // sync is signaled, false if the wait timed out.
241 bool waitForVSync(nsecs_t reltime);
242
243 // Wait until a new frame has been read out, and then return the time
244 // capture started. May return immediately if a new frame has been pushed
245 // since the last wait for a new frame. Returns true if new frame is
246 // returned, false if timed out.
247 bool waitForNewFrame(nsecs_t reltime,
248 nsecs_t *captureTime);
249
250 /*
251 * Interrupt event servicing from the sensor. Only triggers for sensor
252 * cycles that have valid buffers to write to.
253 */
254 struct SensorListener {
255 enum Event {
256 EXPOSURE_START, // Start of exposure
257 };
258
259 virtual void onSensorEvent(uint32_t frameNumber, Event e,
260 nsecs_t timestamp) = 0;
261 virtual ~SensorListener();
262 };
263
264 void setSensorListener(SensorListener *listener);
265
266 /**
267 * Static sensor characteristics
268 */
269 static const unsigned int kResolution[2];
270
271 static const nsecs_t kExposureTimeRange[2];
272 static const nsecs_t kFrameDurationRange[2];
273 static const nsecs_t kMinVerticalBlank;
274
275 static const uint8_t kColorFilterArrangement;
276
277 // Output image data characteristics
278 static const uint32_t kMaxRawValue;
279 static const uint32_t kBlackLevel;
280 // Sensor sensitivity, approximate
281
282 static const float kSaturationVoltage;
283 static const uint32_t kSaturationElectrons;
284 static const float kVoltsPerLuxSecond;
285 static const float kElectronsPerLuxSecond;
286
287 static const float kBaseGainFactor;
288
289 static const float kReadNoiseStddevBeforeGain; // In electrons
290 static const float kReadNoiseStddevAfterGain; // In raw digital units
291 static const float kReadNoiseVarBeforeGain;
292 static const float kReadNoiseVarAfterGain;
293
294 // While each row has to read out, reset, and then expose, the (reset +
295 // expose) sequence can be overlapped by other row readouts, so the final
296 // minimum frame duration is purely a function of row readout time, at least
297 // if there's a reasonable number of rows.
298 static const nsecs_t kRowReadoutTime;
299
300 static const int32_t kSensitivityRange[2];
301 static const uint32_t kDefaultSensitivity;
302
303 sensor_type_e getSensorType(void);
304
305 sensor_face_type_e mSensorFace;
306
307 private:
308 Mutex mControlMutex; // Lock before accessing control parameters
309 // Start of control parameters
310 Condition mVSync;
311 bool mGotVSync;
312 uint64_t mExposureTime;
313 uint64_t mFrameDuration;
314 uint32_t mGainFactor;
315 Buffers *mNextBuffers;
316 uint8_t *mKernelBuffer;
317 uintptr_t mKernelPhysAddr;
318 uint32_t mFrameNumber;
319 int mRotateValue;
320
321 // End of control parameters
322
323 int mEV;
324
325 Mutex mReadoutMutex; // Lock before accessing readout variables
326 // Start of readout variables
327 Condition mReadoutAvailable;
328 Condition mReadoutComplete;
329 Buffers *mCapturedBuffers;
330 nsecs_t mCaptureTime;
331 SensorListener *mListener;
332 // End of readout variables
333
334 // Time of sensor startup, used for simulation zero-time point
335 nsecs_t mStartupTime;
336
337 //store the v4l2 info
338 struct VideoInfo *vinfo;
339
340 struct timeval mTimeStart,mTimeend;
341 unsigned int framecount;
342 unsigned int fps;
343
344 enum sensor_type_e mSensorType;
345 unsigned int mIoctlSupport;
346 unsigned int msupportrotate;
347
348 /**
349 * Inherited Thread virtual overrides, and members only used by the
350 * processing thread
351 */
352 private:
353 virtual status_t readyToRun();
354
355 virtual bool threadLoop();
356
357 nsecs_t mNextCaptureTime;
358 Buffers *mNextCapturedBuffers;
359
360 Scene mScene;
361
362 int captureNewImageWithGe2d();
363 int captureNewImage();
364 void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride);
365 void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride);
366 void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride);
367 void captureNV21(StreamBuffer b, uint32_t gain);
368 void captureYV12(StreamBuffer b, uint32_t gain);
369 void captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride);
370 void YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height);
371 void YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height);
372};
373
374}
375
376#endif // HW_EMULATOR_CAMERA2_SENSOR_H
377