summaryrefslogtreecommitdiff
path: root/guest/hals/camera/fake-pipeline2/Sensor.h
blob: 326af2976c11cc117d0c9deba1a98d70c64f064d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
/*
 * Copyright (C) 2012 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

/**
 * This class is a simple simulation of a typical CMOS cellphone imager chip,
 * which outputs 12-bit Bayer-mosaic raw images.
 *
 * Unlike most real image sensors, this one's native color space is linear sRGB.
 *
 * The sensor is abstracted as operating as a pipeline 3 stages deep;
 * conceptually, each frame to be captured goes through these three stages. The
 * processing step for the sensor is marked off by vertical sync signals, which
 * indicate the start of readout of the oldest frame. The interval between
 * processing steps depends on the frame duration of the frame currently being
 * captured. The stages are 1) configure, 2) capture, and 3) readout. During
 * configuration, the sensor's registers for settings such as exposure time,
 * frame duration, and gain are set for the next frame to be captured. In stage
 * 2, the image data for the frame is actually captured by the sensor. Finally,
 * in stage 3, the just-captured data is read out and sent to the rest of the
 * system.
 *
 * The sensor is assumed to be rolling-shutter, so low-numbered rows of the
 * sensor are exposed earlier in time than larger-numbered rows, with the time
 * offset between each row being equal to the row readout time.
 *
 * The characteristics of this sensor don't correspond to any actual sensor,
 * but are not far off typical sensors.
 *
 * Example timing diagram, with three frames:
 *  Frame 0-1: Frame duration 50 ms, exposure time 20 ms.
 *  Frame   2: Frame duration 75 ms, exposure time 65 ms.
 * Legend:
 *   C = update sensor registers for frame
 *   v = row in reset (vertical blanking interval)
 *   E = row capturing image data
 *   R = row being read out
 *   | = vertical sync signal
 *time(ms)|   0          55        105       155            230     270
 * Frame 0|   :configure : capture : readout :              :       :
 *  Row # | ..|CCCC______|_________|_________|              :       :
 *      0 |   :\          \vvvvvEEEER         \             :       :
 *    500 |   : \          \vvvvvEEEER         \            :       :
 *   1000 |   :  \          \vvvvvEEEER         \           :       :
 *   1500 |   :   \          \vvvvvEEEER         \          :       :
 *   2000 |   :    \__________\vvvvvEEEER_________\         :       :
 * Frame 1|   :           configure  capture      readout   :       :
 *  Row # |   :          |CCCC_____|_________|______________|       :
 *      0 |   :          :\         \vvvvvEEEER              \      :
 *    500 |   :          : \         \vvvvvEEEER              \     :
 *   1000 |   :          :  \         \vvvvvEEEER              \    :
 *   1500 |   :          :   \         \vvvvvEEEER              \   :
 *   2000 |   :          :    \_________\vvvvvEEEER______________\  :
 * Frame 2|   :          :          configure     capture    readout:
 *  Row # |   :          :         |CCCC_____|______________|_______|...
 *      0 |   :          :         :\         \vEEEEEEEEEEEEER       \
 *    500 |   :          :         : \         \vEEEEEEEEEEEEER       \
 *   1000 |   :          :         :  \         \vEEEEEEEEEEEEER       \
 *   1500 |   :          :         :   \         \vEEEEEEEEEEEEER       \
 *   2000 |   :          :         :    \_________\vEEEEEEEEEEEEER_______\
 */

#ifndef HW_EMULATOR_CAMERA2_SENSOR_H
#define HW_EMULATOR_CAMERA2_SENSOR_H

#include "utils/Mutex.h"
#include "utils/Thread.h"
#include "utils/Timers.h"

#include "Base.h"
#include "Scene.h"

namespace android {

class EmulatedFakeCamera2;

class Sensor : private Thread, public virtual RefBase {
 public:
  // width: Width of pixel array
  // height: Height of pixel array
  Sensor(uint32_t width, uint32_t height);
  ~Sensor();

  /*
   * Power control
   */

  status_t startUp();
  status_t shutDown();

  /*
   * Access to scene
   */
  Scene &getScene();

  /*
   * Controls that can be updated every frame
   */

  void setExposureTime(uint64_t ns);
  void setFrameDuration(uint64_t ns);
  void setSensitivity(uint32_t gain);
  // Buffer must be at least stride*height*2 bytes in size
  void setDestinationBuffers(Buffers *buffers);
  // To simplify tracking sensor's current frame
  void setFrameNumber(uint32_t frameNumber);

  /*
   * Controls that cause reconfiguration delay
   */

  void setBinning(int horizontalFactor, int verticalFactor);

  /*
   * Synchronizing with sensor operation (vertical sync)
   */

  // Wait until the sensor outputs its next vertical sync signal, meaning it
  // is starting readout of its latest frame of data. Returns true if vertical
  // sync is signaled, false if the wait timed out.
  bool waitForVSync(nsecs_t reltime);

  // Wait until a new frame has been read out, and then return the time
  // capture started.  May return immediately if a new frame has been pushed
  // since the last wait for a new frame. Returns true if new frame is
  // returned, false if timed out.
  bool waitForNewFrame(nsecs_t reltime, nsecs_t *captureTime);

  /*
   * Interrupt event servicing from the sensor. Only triggers for sensor
   * cycles that have valid buffers to write to.
   */
  struct SensorListener {
    enum Event {
      EXPOSURE_START,  // Start of exposure
    };

    virtual void onSensorEvent(uint32_t frameNumber, Event e,
                               nsecs_t timestamp) = 0;
    virtual ~SensorListener();
  };

  void setSensorListener(SensorListener *listener);

  /**
   * Static sensor characteristics
   */
  const uint32_t mResolution[2];
  const uint32_t mActiveArray[4];

  static const nsecs_t kExposureTimeRange[2];
  static const nsecs_t kFrameDurationRange[2];
  static const nsecs_t kMinVerticalBlank;

  static const uint8_t kColorFilterArrangement;

  // Output image data characteristics
  static const uint32_t kMaxRawValue;
  static const uint32_t kBlackLevel;
  // Sensor sensitivity, approximate

  static const float kSaturationVoltage;
  static const uint32_t kSaturationElectrons;
  static const float kVoltsPerLuxSecond;
  static const float kElectronsPerLuxSecond;

  static const float kBaseGainFactor;

  static const float kReadNoiseStddevBeforeGain;  // In electrons
  static const float kReadNoiseStddevAfterGain;   // In raw digital units
  static const float kReadNoiseVarBeforeGain;
  static const float kReadNoiseVarAfterGain;

  // While each row has to read out, reset, and then expose, the (reset +
  // expose) sequence can be overlapped by other row readouts, so the final
  // minimum frame duration is purely a function of row readout time, at least
  // if there's a reasonable number of rows.
  const nsecs_t mRowReadoutTime;

  static const int32_t kSensitivityRange[2];
  static const uint32_t kDefaultSensitivity;

 private:
  Mutex mControlMutex;  // Lock before accessing control parameters
  // Start of control parameters
  Condition mVSync;
  bool mGotVSync;
  uint64_t mExposureTime;
  uint64_t mFrameDuration;
  uint32_t mGainFactor;
  Buffers *mNextBuffers;
  uint32_t mFrameNumber;

  // End of control parameters

  Mutex mReadoutMutex;  // Lock before accessing readout variables
  // Start of readout variables
  Condition mReadoutAvailable;
  Condition mReadoutComplete;
  Buffers *mCapturedBuffers;
  nsecs_t mCaptureTime;
  SensorListener *mListener;
  // End of readout variables

  // Time of sensor startup, used for simulation zero-time point
  nsecs_t mStartupTime;

  /**
   * Inherited Thread virtual overrides, and members only used by the
   * processing thread
   */
 private:
  virtual status_t readyToRun();

  virtual bool threadLoop();

  nsecs_t mNextCaptureTime;
  Buffers *mNextCapturedBuffers;

  Scene mScene;

  void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride);
  void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride);
  void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride);
  void captureNV21(uint8_t *img, uint32_t gain, uint32_t stride);
  void captureDepth(uint8_t *img, uint32_t gain, uint32_t stride);
  void captureDepthCloud(uint8_t *img);
};

}  // namespace android

#endif  // HW_EMULATOR_CAMERA2_SENSOR_H