1 /* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 /** 18 * This class is a simple simulation of a typical CMOS cellphone imager chip, 19 * which outputs 12-bit Bayer-mosaic raw images. 20 * 21 * Unlike most real image sensors, this one's native color space is linear sRGB. 22 * 23 * The sensor is abstracted as operating as a pipeline 3 stages deep; 24 * conceptually, each frame to be captured goes through these three stages. The 25 * processing step for the sensor is marked off by vertical sync signals, which 26 * indicate the start of readout of the oldest frame. The interval between 27 * processing steps depends on the frame duration of the frame currently being 28 * captured. The stages are 1) configure, 2) capture, and 3) readout. During 29 * configuration, the sensor's registers for settings such as exposure time, 30 * frame duration, and gain are set for the next frame to be captured. In stage 31 * 2, the image data for the frame is actually captured by the sensor. Finally, 32 * in stage 3, the just-captured data is read out and sent to the rest of the 33 * system. 34 * 35 * The sensor is assumed to be rolling-shutter, so low-numbered rows of the 36 * sensor are exposed earlier in time than larger-numbered rows, with the time 37 * offset between each row being equal to the row readout time. 38 * 39 * The characteristics of this sensor don't correspond to any actual sensor, 40 * but are not far off typical sensors. 41 * 42 * Example timing diagram, with three frames: 43 * Frame 0-1: Frame duration 50 ms, exposure time 20 ms. 44 * Frame 2: Frame duration 75 ms, exposure time 65 ms. 45 * Legend: 46 * C = update sensor registers for frame 47 * v = row in reset (vertical blanking interval) 48 * E = row capturing image data 49 * R = row being read out 50 * | = vertical sync signal 51 *time(ms)| 0 55 105 155 230 270 52 * Frame 0| :configure : capture : readout : : : 53 * Row # | ..|CCCC______|_________|_________| : : 54 * 0 | :\ \vvvvvEEEER \ : : 55 * 500 | : \ \vvvvvEEEER \ : : 56 * 1000 | : \ \vvvvvEEEER \ : : 57 * 1500 | : \ \vvvvvEEEER \ : : 58 * 2000 | : \__________\vvvvvEEEER_________\ : : 59 * Frame 1| : configure capture readout : : 60 * Row # | : |CCCC_____|_________|______________| : 61 * 0 | : :\ \vvvvvEEEER \ : 62 * 500 | : : \ \vvvvvEEEER \ : 63 * 1000 | : : \ \vvvvvEEEER \ : 64 * 1500 | : : \ \vvvvvEEEER \ : 65 * 2000 | : : \_________\vvvvvEEEER______________\ : 66 * Frame 2| : : configure capture readout: 67 * Row # | : : |CCCC_____|______________|_______|... 68 * 0 | : : :\ \vEEEEEEEEEEEEER \ 69 * 500 | : : : \ \vEEEEEEEEEEEEER \ 70 * 1000 | : : : \ \vEEEEEEEEEEEEER \ 71 * 1500 | : : : \ \vEEEEEEEEEEEEER \ 72 * 2000 | : : : \_________\vEEEEEEEEEEEEER_______\ 73 */ 74 75 #ifndef HW_EMULATOR_CAMERA2_SENSOR_H 76 #define HW_EMULATOR_CAMERA2_SENSOR_H 77 78 #include <hwl_types.h> 79 80 #include <functional> 81 82 #include "Base.h" 83 #include "EmulatedScene.h" 84 #include "HandleImporter.h" 85 #include "JpegCompressor.h" 86 #include "utils/Mutex.h" 87 #include "utils/StreamConfigurationMap.h" 88 #include "utils/Thread.h" 89 #include "utils/Timers.h" 90 91 namespace android { 92 93 using android::hardware::camera::common::V1_0::helper::HandleImporter; 94 using google_camera_hal::HwlPipelineCallback; 95 using google_camera_hal::HwlPipelineResult; 96 using google_camera_hal::StreamConfiguration; 97 98 /* 99 * Default to sRGB with D65 white point 100 */ 101 struct ColorFilterXYZ { 102 float rX = 3.2406f; 103 float rY = -1.5372f; 104 float rZ = -0.4986f; 105 float grX = -0.9689f; 106 float grY = 1.8758f; 107 float grZ = 0.0415f; 108 float gbX = -0.9689f; 109 float gbY = 1.8758f; 110 float gbZ = 0.0415f; 111 float bX = 0.0557f; 112 float bY = -0.2040f; 113 float bZ = 1.0570f; 114 }; 115 116 struct SensorCharacteristics { 117 size_t width = 0; 118 size_t height = 0; 119 nsecs_t exposure_time_range[2] = {0}; 120 nsecs_t frame_duration_range[2] = {0}; 121 int32_t sensitivity_range[2] = {0}; 122 camera_metadata_enum_android_sensor_info_color_filter_arrangement 123 color_arangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; 124 ColorFilterXYZ color_filter; 125 uint32_t max_raw_value = 0; 126 uint32_t black_level_pattern[4] = {0}; 127 uint32_t max_raw_streams = 0; 128 uint32_t max_processed_streams = 0; 129 uint32_t max_stalling_streams = 0; 130 uint32_t max_input_streams = 0; 131 uint32_t physical_size[2] = {0}; 132 bool is_flash_supported = false; 133 uint32_t lens_shading_map_size[2] = {0}; 134 uint32_t max_pipeline_depth = 0; 135 uint32_t orientation = 0; 136 bool is_front_facing = false; 137 }; 138 139 // Maps logical/physical camera ids to sensor characteristics 140 typedef std::unordered_map<uint32_t, SensorCharacteristics> LogicalCharacteristics; 141 142 class EmulatedSensor : private Thread, public virtual RefBase { 143 public: 144 EmulatedSensor(); 145 ~EmulatedSensor(); 146 OverrideFormat(android_pixel_format_t format)147 static android_pixel_format_t OverrideFormat(android_pixel_format_t format) { 148 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { 149 return HAL_PIXEL_FORMAT_YCBCR_420_888; 150 } 151 152 return format; 153 } 154 IsReprocessPathSupported(android_pixel_format_t input_format,android_pixel_format_t output_format)155 static bool IsReprocessPathSupported(android_pixel_format_t input_format, 156 android_pixel_format_t output_format) { 157 if ((HAL_PIXEL_FORMAT_YCBCR_420_888 == input_format) && 158 ((HAL_PIXEL_FORMAT_YCBCR_420_888 == output_format) || 159 (HAL_PIXEL_FORMAT_BLOB == output_format))) { 160 return true; 161 } 162 163 return false; 164 } 165 166 static bool AreCharacteristicsSupported( 167 const SensorCharacteristics& characteristics); 168 static bool IsStreamCombinationSupported( 169 const StreamConfiguration& config, StreamConfigurationMap& map, 170 const SensorCharacteristics& sensor_chars); 171 172 /* 173 * Power control 174 */ 175 176 status_t StartUp(uint32_t logical_camera_id, 177 std::unique_ptr<LogicalCharacteristics> logical_chars); 178 status_t ShutDown(); 179 180 /* 181 * Physical camera settings control 182 */ 183 struct SensorSettings { 184 nsecs_t exposure_time = 0; 185 nsecs_t frame_duration = 0; 186 uint32_t gain = 0; // ISO 187 uint32_t lens_shading_map_mode; 188 bool report_neutral_color_point = false; 189 bool report_green_split = false; 190 bool report_noise_profile = false; 191 float zoom_ratio = 1.0f; 192 bool report_rotate_and_crop = false; 193 uint8_t rotate_and_crop = ANDROID_SCALER_ROTATE_AND_CROP_NONE; 194 bool report_video_stab = false; 195 uint8_t video_stab = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; 196 bool report_edge_mode = false; 197 uint8_t edge_mode = ANDROID_EDGE_MODE_OFF; 198 }; 199 200 // Maps physical and logical camera ids to individual device settings 201 typedef std::unordered_map<uint32_t, SensorSettings> LogicalCameraSettings; 202 203 void SetCurrentRequest(std::unique_ptr<LogicalCameraSettings> logical_settings, 204 std::unique_ptr<HwlPipelineResult> result, 205 std::unique_ptr<Buffers> input_buffers, 206 std::unique_ptr<Buffers> output_buffers); 207 208 status_t Flush(); 209 210 /* 211 * Synchronizing with sensor operation (vertical sync) 212 */ 213 214 // Wait until the sensor outputs its next vertical sync signal, meaning it 215 // is starting readout of its latest frame of data. Returns true if vertical 216 // sync is signaled, false if the wait timed out. 217 bool WaitForVSync(nsecs_t rel_time); 218 219 static const nsecs_t kSupportedExposureTimeRange[2]; 220 static const nsecs_t kSupportedFrameDurationRange[2]; 221 static const int32_t kSupportedSensitivityRange[2]; 222 static const uint8_t kSupportedColorFilterArrangement; 223 static const uint32_t kDefaultMaxRawValue; 224 static const nsecs_t kDefaultExposureTime; 225 static const int32_t kDefaultSensitivity; 226 static const nsecs_t kDefaultFrameDuration; 227 static const nsecs_t kReturnResultThreshod; 228 static const uint32_t kDefaultBlackLevelPattern[4]; 229 static const camera_metadata_rational kDefaultColorTransform[9]; 230 static const float kDefaultColorCorrectionGains[4]; 231 static const float kDefaultToneMapCurveRed[4]; 232 static const float kDefaultToneMapCurveGreen[4]; 233 static const float kDefaultToneMapCurveBlue[4]; 234 static const uint8_t kPipelineDepth; 235 236 private: 237 // Scene stabilization 238 static const uint32_t kRegularSceneHandshake; 239 static const uint32_t kReducedSceneHandshake; 240 241 /** 242 * Logical characteristics 243 */ 244 std::unique_ptr<LogicalCharacteristics> chars_; 245 246 uint32_t logical_camera_id_ = 0; 247 248 static const nsecs_t kMinVerticalBlank; 249 250 // Sensor sensitivity, approximate 251 252 static const float kSaturationVoltage; 253 static const uint32_t kSaturationElectrons; 254 static const float kVoltsPerLuxSecond; 255 static const float kElectronsPerLuxSecond; 256 257 static const float kReadNoiseStddevBeforeGain; // In electrons 258 static const float kReadNoiseStddevAfterGain; // In raw digital units 259 static const float kReadNoiseVarBeforeGain; 260 static const float kReadNoiseVarAfterGain; 261 static const camera_metadata_rational kNeutralColorPoint[3]; 262 static const float kGreenSplit; 263 264 static const uint32_t kMaxRAWStreams; 265 static const uint32_t kMaxProcessedStreams; 266 static const uint32_t kMaxStallingStreams; 267 static const uint32_t kMaxInputStreams; 268 static const uint32_t kMaxLensShadingMapSize[2]; 269 static const int32_t kFixedBitPrecision; 270 static const int32_t kSaturationPoint; 271 272 std::vector<int32_t> gamma_table_; 273 274 Mutex control_mutex_; // Lock before accessing control parameters 275 // Start of control parameters 276 Condition vsync_; 277 bool got_vsync_; 278 std::unique_ptr<LogicalCameraSettings> current_settings_; 279 std::unique_ptr<HwlPipelineResult> current_result_; 280 std::unique_ptr<Buffers> current_output_buffers_; 281 std::unique_ptr<Buffers> current_input_buffers_; 282 std::unique_ptr<JpegCompressor> jpeg_compressor_; 283 284 // End of control parameters 285 286 unsigned int rand_seed_ = 1; 287 288 /** 289 * Inherited Thread virtual overrides, and members only used by the 290 * processing thread 291 */ 292 bool threadLoop() override; 293 294 nsecs_t next_capture_time_; 295 296 sp<EmulatedScene> scene_; 297 298 void CaptureRaw(uint8_t* img, uint32_t gain, uint32_t width, 299 const SensorCharacteristics& chars); 300 enum RGBLayout { RGB, RGBA, ARGB }; 301 void CaptureRGB(uint8_t* img, uint32_t width, uint32_t height, 302 uint32_t stride, RGBLayout layout, uint32_t gain, 303 const SensorCharacteristics& chars); 304 void CaptureYUV420(YCbCrPlanes yuv_layout, uint32_t width, uint32_t height, 305 uint32_t gain, float zoom_ratio, bool rotate, 306 const SensorCharacteristics& chars); 307 void CaptureDepth(uint8_t* img, uint32_t gain, uint32_t width, uint32_t height, 308 uint32_t stride, const SensorCharacteristics& chars); 309 310 struct YUV420Frame { 311 uint32_t width = 0; 312 uint32_t height = 0; 313 YCbCrPlanes planes; 314 }; 315 316 enum ProcessType { REPROCESS, HIGH_QUALITY, REGULAR }; 317 status_t ProcessYUV420(const YUV420Frame& input, const YUV420Frame& output, 318 uint32_t gain, ProcessType process_type, 319 float zoom_ratio, bool rotate_and_crop, 320 const SensorCharacteristics& chars); 321 322 inline int32_t ApplysRGBGamma(int32_t value, int32_t saturation); 323 324 bool WaitForVSyncLocked(nsecs_t reltime); 325 void CalculateAndAppendNoiseProfile(float gain /*in ISO*/, 326 float base_gain_factor, 327 HalCameraMetadata* result /*out*/); 328 329 void ReturnResults(HwlPipelineCallback callback, 330 std::unique_ptr<LogicalCameraSettings> settings, 331 std::unique_ptr<HwlPipelineResult> result); 332 GetBaseGainFactor(float max_raw_value)333 static float GetBaseGainFactor(float max_raw_value) { 334 return max_raw_value / EmulatedSensor::kSaturationElectrons; 335 } 336 }; 337 338 } // namespace android 339 340 #endif // HW_EMULATOR_CAMERA2_SENSOR_H 341