summaryrefslogtreecommitdiff
path: root/v3/fake-pipeline2/Sensor.h (plain)
blob: 4f6cc4c4b2a4c292b1c10e5ba228efc9d4abf491
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/**
18 * This class is a simple simulation of a typical CMOS cellphone imager chip,
19 * which outputs 12-bit Bayer-mosaic raw images.
20 *
21 * Unlike most real image sensors, this one's native color space is linear sRGB.
22 *
23 * The sensor is abstracted as operating as a pipeline 3 stages deep;
24 * conceptually, each frame to be captured goes through these three stages. The
25 * processing step for the sensor is marked off by vertical sync signals, which
26 * indicate the start of readout of the oldest frame. The interval between
27 * processing steps depends on the frame duration of the frame currently being
28 * captured. The stages are 1) configure, 2) capture, and 3) readout. During
29 * configuration, the sensor's registers for settings such as exposure time,
30 * frame duration, and gain are set for the next frame to be captured. In stage
31 * 2, the image data for the frame is actually captured by the sensor. Finally,
32 * in stage 3, the just-captured data is read out and sent to the rest of the
33 * system.
34 *
35 * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
36 * sensor are exposed earlier in time than larger-numbered rows, with the time
37 * offset between each row being equal to the row readout time.
38 *
39 * The characteristics of this sensor don't correspond to any actual sensor,
40 * but are not far off typical sensors.
41 *
42 * Example timing diagram, with three frames:
43 * Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
44 * Frame 2: Frame duration 75 ms, exposure time 65 ms.
45 * Legend:
46 * C = update sensor registers for frame
47 * v = row in reset (vertical blanking interval)
48 * E = row capturing image data
49 * R = row being read out
50 * | = vertical sync signal
51 *time(ms)| 0 55 105 155 230 270
52 * Frame 0| :configure : capture : readout : : :
53 * Row # | ..|CCCC______|_________|_________| : :
54 * 0 | :\ \vvvvvEEEER \ : :
55 * 500 | : \ \vvvvvEEEER \ : :
56 * 1000 | : \ \vvvvvEEEER \ : :
57 * 1500 | : \ \vvvvvEEEER \ : :
58 * 2000 | : \__________\vvvvvEEEER_________\ : :
59 * Frame 1| : configure capture readout : :
60 * Row # | : |CCCC_____|_________|______________| :
61 * 0 | : :\ \vvvvvEEEER \ :
62 * 500 | : : \ \vvvvvEEEER \ :
63 * 1000 | : : \ \vvvvvEEEER \ :
64 * 1500 | : : \ \vvvvvEEEER \ :
65 * 2000 | : : \_________\vvvvvEEEER______________\ :
66 * Frame 2| : : configure capture readout:
67 * Row # | : : |CCCC_____|______________|_______|...
68 * 0 | : : :\ \vEEEEEEEEEEEEER \
69 * 500 | : : : \ \vEEEEEEEEEEEEER \
70 * 1000 | : : : \ \vEEEEEEEEEEEEER \
71 * 1500 | : : : \ \vEEEEEEEEEEEEER \
72 * 2000 | : : : \_________\vEEEEEEEEEEEEER_______\
73 */
74
75#ifndef HW_EMULATOR_CAMERA2_SENSOR_H
76#define HW_EMULATOR_CAMERA2_SENSOR_H
77
78#include "utils/Thread.h"
79#include "utils/Mutex.h"
80#include "utils/Timers.h"
81#include <utils/String8.h>
82
83#include "Scene.h"
84//#include "Base.h"
85#include "camera_hw.h"
86#include <cstdlib>
87
88namespace android {
89
90typedef enum camera_mirror_flip_e {
91 MF_NORMAL = 0,
92 MF_MIRROR,
93 MF_FLIP,
94 MF_MIRROR_FLIP,
95}camera_mirror_flip_t;
96
97
98typedef enum camera_wb_flip_e {
99 CAM_WB_AUTO = 0,
100 CAM_WB_CLOUD,
101 CAM_WB_DAYLIGHT,
102 CAM_WB_INCANDESCENCE,
103 CAM_WB_TUNGSTEN,
104 CAM_WB_FLUORESCENT,
105 CAM_WB_MANUAL,
106 CAM_WB_SHADE,
107 CAM_WB_TWILIGHT,
108 CAM_WB_WARM_FLUORESCENT,
109}camera_wb_flip_t;
110
111typedef enum camera_effect_flip_e {
112 CAM_EFFECT_ENC_NORMAL = 0,
113 CAM_EFFECT_ENC_GRAYSCALE,
114 CAM_EFFECT_ENC_SEPIA,
115 CAM_EFFECT_ENC_SEPIAGREEN,
116 CAM_EFFECT_ENC_SEPIABLUE,
117 CAM_EFFECT_ENC_COLORINV,
118}camera_effect_flip_t;
119
120typedef enum camera_night_mode_flip_e {
121 CAM_NM_AUTO = 0,
122 CAM_NM_ENABLE,
123}camera_night_mode_flip_t;
124
125typedef enum camera_banding_mode_flip_e {
126 CAM_ANTIBANDING_DISABLED= V4L2_CID_POWER_LINE_FREQUENCY_DISABLED,
127 CAM_ANTIBANDING_50HZ = V4L2_CID_POWER_LINE_FREQUENCY_50HZ,
128 CAM_ANTIBANDING_60HZ = V4L2_CID_POWER_LINE_FREQUENCY_60HZ,
129 CAM_ANTIBANDING_AUTO,
130 CAM_ANTIBANDING_OFF,
131}camera_banding_mode_flip_t;
132
133typedef enum camera_flashlight_status_e{
134 FLASHLIGHT_AUTO = 0,
135 FLASHLIGHT_ON,
136 FLASHLIGHT_OFF,
137 FLASHLIGHT_TORCH,
138 FLASHLIGHT_RED_EYE,
139}camera_flashlight_status_t;
140
141typedef enum camera_focus_mode_e {
142 CAM_FOCUS_MODE_RELEASE = 0,
143 CAM_FOCUS_MODE_FIXED,
144 CAM_FOCUS_MODE_INFINITY,
145 CAM_FOCUS_MODE_AUTO,
146 CAM_FOCUS_MODE_MACRO,
147 CAM_FOCUS_MODE_EDOF,
148 CAM_FOCUS_MODE_CONTI_VID,
149 CAM_FOCUS_MODE_CONTI_PIC,
150}camera_focus_mode_t;
151
152typedef enum sensor_type_e{
153 SENSOR_MMAP = 0,
154 SENSOR_ION,
155 SENSOR_ION_MPLANE,
156 SENSOR_DMA,
157 SENSOR_CANVAS_MODE,
158 SENSOR_USB,
159 SENSOR_SHARE_FD,
160}sensor_type_t;
161
162typedef enum sensor_face_type_e{
163 SENSOR_FACE_NONE= 0,
164 SENSOR_FACE_FRONT,
165 SENSOR_FACE_BACK,
166}sensor_face_type_t;
167
168typedef struct usb_frmsize_discrete {
169 uint32_t width;
170 uint32_t height;
171} usb_frmsize_discrete_t;
172
173#define IOCTL_MASK_ROTATE (1<<0)
174
175class Sensor: private Thread, public virtual RefBase {
176 public:
177
178 Sensor();
179 ~Sensor();
180
181 /*
182 * Power control
183 */
184 void sendExitSingalToSensor();
185 status_t startUp(int idx);
186 status_t shutDown();
187
188 int getOutputFormat();
189 int halFormatToSensorFormat(uint32_t pixelfmt);
190 status_t setOutputFormat(int width, int height, int pixelformat, bool isjpeg);
191 void setPictureRotate(int rotate);
192 int getPictureRotate();
193 uint32_t getStreamUsage(int stream_type);
194
195 status_t streamOn();
196 status_t streamOff();
197
198 int getPictureSizes(int32_t picSizes[], int size, bool preview);
199 int getStreamConfigurations(uint32_t picSizes[], const int32_t kAvailableFormats[], int size);
200 int64_t getMinFrameDuration();
201 int getStreamConfigurationDurations(uint32_t picSizes[], int64_t duration[], int size);
202 bool isStreaming();
203 bool isNeedRestart(uint32_t width, uint32_t height, uint32_t pixelformat);
204 status_t IoctlStateProbe(void);
205 void dump(int fd);
206 /*
207 * Access to scene
208 */
209 Scene &getScene();
210
211 /*
212 * Controls that can be updated every frame
213 */
214
215 int getZoom(int *zoomMin, int *zoomMax, int *zoomStep);
216 int setZoom(int zoomValue);
217 int getExposure(int *mamExp, int *minExp, int *def, camera_metadata_rational *step);
218 status_t setExposure(int expCmp);
219 status_t setEffect(uint8_t effect);
220 int getAntiBanding(uint8_t *antiBanding, uint8_t maxCont);
221 status_t setAntiBanding(uint8_t antiBanding);
222 status_t setFocuasArea(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
223 int getAWB(uint8_t *awbMode, uint8_t maxCount);
224 status_t setAWB(uint8_t awbMode);
225 status_t setAutoFocuas(uint8_t afMode);
226 int getAutoFocus(uint8_t *afMode, uint8_t maxCount);
227 void setExposureTime(uint64_t ns);
228 void setFrameDuration(uint64_t ns);
229 void setSensitivity(uint32_t gain);
230 // Buffer must be at least stride*height*2 bytes in size
231 void setDestinationBuffers(Buffers *buffers);
232 // To simplify tracking sensor's current frame
233 void setFrameNumber(uint32_t frameNumber);
234
235 /*
236 * Controls that cause reconfiguration delay
237 */
238
239 void setBinning(int horizontalFactor, int verticalFactor);
240
241 /*
242 * Synchronizing with sensor operation (vertical sync)
243 */
244
245 // Wait until the sensor outputs its next vertical sync signal, meaning it
246 // is starting readout of its latest frame of data. Returns true if vertical
247 // sync is signaled, false if the wait timed out.
248 status_t waitForVSync(nsecs_t reltime);
249
250 // Wait until a new frame has been read out, and then return the time
251 // capture started. May return immediately if a new frame has been pushed
252 // since the last wait for a new frame. Returns true if new frame is
253 // returned, false if timed out.
254 status_t waitForNewFrame(nsecs_t reltime,
255 nsecs_t *captureTime);
256
257 /*
258 * Interrupt event servicing from the sensor. Only triggers for sensor
259 * cycles that have valid buffers to write to.
260 */
261 struct SensorListener {
262 enum Event {
263 EXPOSURE_START, // Start of exposure
264 ERROR_CAMERA_DEVICE,
265 };
266
267 virtual void onSensorEvent(uint32_t frameNumber, Event e,
268 nsecs_t timestamp) = 0;
269 virtual ~SensorListener();
270 };
271
272 void setSensorListener(SensorListener *listener);
273
274 /**
275 * Static sensor characteristics
276 */
277 static const unsigned int kResolution[2];
278
279 static const nsecs_t kExposureTimeRange[2];
280 static const nsecs_t kFrameDurationRange[2];
281 static const nsecs_t kMinVerticalBlank;
282
283 static const uint8_t kColorFilterArrangement;
284
285 // Output image data characteristics
286 static const uint32_t kMaxRawValue;
287 static const uint32_t kBlackLevel;
288 // Sensor sensitivity, approximate
289
290 static const float kSaturationVoltage;
291 static const uint32_t kSaturationElectrons;
292 static const float kVoltsPerLuxSecond;
293 static const float kElectronsPerLuxSecond;
294
295 static const float kBaseGainFactor;
296
297 static const float kReadNoiseStddevBeforeGain; // In electrons
298 static const float kReadNoiseStddevAfterGain; // In raw digital units
299 static const float kReadNoiseVarBeforeGain;
300 static const float kReadNoiseVarAfterGain;
301
302 // While each row has to read out, reset, and then expose, the (reset +
303 // expose) sequence can be overlapped by other row readouts, so the final
304 // minimum frame duration is purely a function of row readout time, at least
305 // if there's a reasonable number of rows.
306 static const nsecs_t kRowReadoutTime;
307
308 static const int32_t kSensitivityRange[2];
309 static const uint32_t kDefaultSensitivity;
310
311 sensor_type_e getSensorType(void);
312
313 sensor_face_type_e mSensorFace;
314
315 private:
316 Mutex mControlMutex; // Lock before accessing control parameters
317 // Start of control parameters
318 Condition mVSync;
319 bool mGotVSync;
320 uint64_t mExposureTime;
321 uint64_t mFrameDuration;
322 uint32_t mGainFactor;
323 Buffers *mNextBuffers;
324 uint8_t *mKernelBuffer;
325 uintptr_t mKernelPhysAddr;
326 uint32_t mFrameNumber;
327 int mRotateValue;
328
329 // End of control parameters
330
331 int mEV;
332
333 Mutex mReadoutMutex; // Lock before accessing readout variables
334 // Start of readout variables
335 Condition mReadoutAvailable;
336 Condition mReadoutComplete;
337 Buffers *mCapturedBuffers;
338 nsecs_t mCaptureTime;
339 SensorListener *mListener;
340 // End of readout variables
341
342 bool mExitSensorThread;
343
344 // Time of sensor startup, used for simulation zero-time point
345 nsecs_t mStartupTime;
346
347 //store the v4l2 info
348 struct VideoInfo *vinfo;
349
350 struct timeval mTimeStart, mTimeEnd;
351 struct timeval mTestStart, mTestEnd;
352
353 uint32_t mFramecount;
354 float mCurFps;
355
356 enum sensor_type_e mSensorType;
357 unsigned int mIoctlSupport;
358 unsigned int msupportrotate;
359
360 /**
361 * Inherited Thread virtual overrides, and members only used by the
362 * processing thread
363 */
364 private:
365 virtual status_t readyToRun();
366
367 virtual bool threadLoop();
368
369 nsecs_t mNextCaptureTime;
370 Buffers *mNextCapturedBuffers;
371
372 Scene mScene;
373
374 int captureNewImageWithGe2d();
375 int captureNewImage();
376 void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride);
377 void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride);
378 void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride);
379 void captureNV21(StreamBuffer b, uint32_t gain);
380 void captureYV12(StreamBuffer b, uint32_t gain);
381 void captureYUYV(uint8_t *img, uint32_t gain, uint32_t stride);
382 void YUYVToNV21(uint8_t *src, uint8_t *dst, int width, int height);
383 void YUYVToYV12(uint8_t *src, uint8_t *dst, int width, int height);
384};
385
386}
387
388#endif // HW_EMULATOR_CAMERA2_SENSOR_H
389