1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 //#define LOG_NNDEBUG 0
19 #define LOG_TAG "EmulatedSensor"
20 #define ATRACE_TAG ATRACE_TAG_CAMERA
21
22 #ifdef LOG_NNDEBUG
23 #define ALOGVV(...) ALOGV(__VA_ARGS__)
24 #else
25 #define ALOGVV(...) ((void)0)
26 #endif
27
28 #include "EmulatedSensor.h"
29
30 #include <inttypes.h>
31 #include <libyuv.h>
32 #include <system/camera_metadata.h>
33 #include <utils/Log.h>
34 #include <utils/Trace.h>
35
36 #include <cmath>
37 #include <cstdlib>
38
39 #include "utils/ExifUtils.h"
40 #include "utils/HWLUtils.h"
41
42 namespace android {
43
44 using google_camera_hal::HalCameraMetadata;
45 using google_camera_hal::MessageType;
46 using google_camera_hal::NotifyMessage;
47
48 const uint32_t EmulatedSensor::kRegularSceneHandshake = 1; // Scene handshake divider
49 const uint32_t EmulatedSensor::kReducedSceneHandshake = 2; // Scene handshake divider
50
51 // 1 us - 30 sec
52 const nsecs_t EmulatedSensor::kSupportedExposureTimeRange[2] = {1000LL,
53 30000000000LL};
54
55 // ~1/30 s - 30 sec
56 const nsecs_t EmulatedSensor::kSupportedFrameDurationRange[2] = {33331760LL,
57 30000000000LL};
58
59 const int32_t EmulatedSensor::kSupportedSensitivityRange[2] = {100, 1600};
60 const int32_t EmulatedSensor::kDefaultSensitivity = 100; // ISO
61 const nsecs_t EmulatedSensor::kDefaultExposureTime = ms2ns(15);
62 const nsecs_t EmulatedSensor::kDefaultFrameDuration = ms2ns(33);
63 // Deadline within we should return the results as soon as possible to
64 // avoid skewing the frame cycle due to external delays.
65 const nsecs_t EmulatedSensor::kReturnResultThreshod = 3 * kDefaultFrameDuration;
66
67 // Sensor defaults
68 const uint8_t EmulatedSensor::kSupportedColorFilterArrangement =
69 ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB;
70 const uint32_t EmulatedSensor::kDefaultMaxRawValue = 4000;
71 const uint32_t EmulatedSensor::kDefaultBlackLevelPattern[4] = {1000, 1000, 1000,
72 1000};
73
74 const nsecs_t EmulatedSensor::kMinVerticalBlank = 10000L;
75
76 // Sensor sensitivity
77 const float EmulatedSensor::kSaturationVoltage = 0.520f;
78 const uint32_t EmulatedSensor::kSaturationElectrons = 2000;
79 const float EmulatedSensor::kVoltsPerLuxSecond = 0.100f;
80
81 const float EmulatedSensor::kElectronsPerLuxSecond =
82 EmulatedSensor::kSaturationElectrons / EmulatedSensor::kSaturationVoltage *
83 EmulatedSensor::kVoltsPerLuxSecond;
84
85 const float EmulatedSensor::kReadNoiseStddevBeforeGain = 1.177; // in electrons
86 const float EmulatedSensor::kReadNoiseStddevAfterGain =
87 2.100; // in digital counts
88 const float EmulatedSensor::kReadNoiseVarBeforeGain =
89 EmulatedSensor::kReadNoiseStddevBeforeGain *
90 EmulatedSensor::kReadNoiseStddevBeforeGain;
91 const float EmulatedSensor::kReadNoiseVarAfterGain =
92 EmulatedSensor::kReadNoiseStddevAfterGain *
93 EmulatedSensor::kReadNoiseStddevAfterGain;
94
95 const uint32_t EmulatedSensor::kMaxRAWStreams = 1;
96 const uint32_t EmulatedSensor::kMaxProcessedStreams = 3;
97 const uint32_t EmulatedSensor::kMaxStallingStreams = 2;
98 const uint32_t EmulatedSensor::kMaxInputStreams = 1;
99
100 const uint32_t EmulatedSensor::kMaxLensShadingMapSize[2]{64, 64};
101 const int32_t EmulatedSensor::kFixedBitPrecision = 64; // 6-bit
102 // In fixed-point math, saturation point of sensor after gain
103 const int32_t EmulatedSensor::kSaturationPoint = kFixedBitPrecision * 255;
104 const camera_metadata_rational EmulatedSensor::kNeutralColorPoint[3] = {
105 {255, 1}, {255, 1}, {255, 1}};
106 const float EmulatedSensor::kGreenSplit = 1.f; // No divergence
107 // Reduce memory usage by allowing only one buffer in sensor, one in jpeg
108 // compressor and one pending request to avoid stalls.
109 const uint8_t EmulatedSensor::kPipelineDepth = 3;
110
111 const camera_metadata_rational EmulatedSensor::kDefaultColorTransform[9] = {
112 {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}, {0, 1}, {0, 1}, {0, 1}, {1, 1}};
113 const float EmulatedSensor::kDefaultColorCorrectionGains[4] = {1.0f, 1.0f, 1.0f,
114 1.0f};
115
116 const float EmulatedSensor::kDefaultToneMapCurveRed[4] = {.0f, .0f, 1.f, 1.f};
117 const float EmulatedSensor::kDefaultToneMapCurveGreen[4] = {.0f, .0f, 1.f, 1.f};
118 const float EmulatedSensor::kDefaultToneMapCurveBlue[4] = {.0f, .0f, 1.f, 1.f};
119
120 /** A few utility functions for math, normal distributions */
121
122 // Take advantage of IEEE floating-point format to calculate an approximate
123 // square root. Accurate to within +-3.6%
sqrtf_approx(float r)124 float sqrtf_approx(float r) {
125 // Modifier is based on IEEE floating-point representation; the
126 // manipulations boil down to finding approximate log2, dividing by two, and
127 // then inverting the log2. A bias is added to make the relative error
128 // symmetric about the real answer.
129 const int32_t modifier = 0x1FBB4000;
130
131 int32_t r_i = *(int32_t*)(&r);
132 r_i = (r_i >> 1) + modifier;
133
134 return *(float*)(&r_i);
135 }
136
EmulatedSensor()137 EmulatedSensor::EmulatedSensor() : Thread(false), got_vsync_(false) {
138 gamma_table_.resize(kSaturationPoint + 1);
139 for (int32_t i = 0; i <= kSaturationPoint; i++) {
140 gamma_table_[i] = ApplysRGBGamma(i, kSaturationPoint);
141 }
142 }
143
~EmulatedSensor()144 EmulatedSensor::~EmulatedSensor() {
145 ShutDown();
146 }
147
AreCharacteristicsSupported(const SensorCharacteristics & characteristics)148 bool EmulatedSensor::AreCharacteristicsSupported(
149 const SensorCharacteristics& characteristics) {
150 if ((characteristics.width == 0) || (characteristics.height == 0)) {
151 ALOGE("%s: Invalid sensor size %zux%zu", __FUNCTION__,
152 characteristics.width, characteristics.height);
153 return false;
154 }
155
156 if ((characteristics.exposure_time_range[0] >=
157 characteristics.exposure_time_range[1]) ||
158 ((characteristics.exposure_time_range[0] < kSupportedExposureTimeRange[0]) ||
159 (characteristics.exposure_time_range[1] >
160 kSupportedExposureTimeRange[1]))) {
161 ALOGE("%s: Unsupported exposure range", __FUNCTION__);
162 return false;
163 }
164
165 if ((characteristics.frame_duration_range[0] >=
166 characteristics.frame_duration_range[1]) ||
167 ((characteristics.frame_duration_range[0] <
168 kSupportedFrameDurationRange[0]) ||
169 (characteristics.frame_duration_range[1] >
170 kSupportedFrameDurationRange[1]))) {
171 ALOGE("%s: Unsupported frame duration range", __FUNCTION__);
172 return false;
173 }
174
175 if ((characteristics.sensitivity_range[0] >=
176 characteristics.sensitivity_range[1]) ||
177 ((characteristics.sensitivity_range[0] < kSupportedSensitivityRange[0]) ||
178 (characteristics.sensitivity_range[1] > kSupportedSensitivityRange[1])) ||
179 (!((kDefaultSensitivity >= characteristics.sensitivity_range[0]) &&
180 (kDefaultSensitivity <= characteristics.sensitivity_range[1])))) {
181 ALOGE("%s: Unsupported sensitivity range", __FUNCTION__);
182 return false;
183 }
184
185 if (characteristics.color_arangement != kSupportedColorFilterArrangement) {
186 ALOGE("%s: Unsupported color arrangement!", __FUNCTION__);
187 return false;
188 }
189
190 for (const auto& blackLevel : characteristics.black_level_pattern) {
191 if (blackLevel >= characteristics.max_raw_value) {
192 ALOGE("%s: Black level matches or exceeds max RAW value!", __FUNCTION__);
193 return false;
194 }
195 }
196
197 if ((characteristics.frame_duration_range[0] / characteristics.height) == 0) {
198 ALOGE("%s: Zero row readout time!", __FUNCTION__);
199 return false;
200 }
201
202 if (characteristics.max_raw_streams > kMaxRAWStreams) {
203 ALOGE("%s: RAW streams maximum %u exceeds supported maximum %u",
204 __FUNCTION__, characteristics.max_raw_streams, kMaxRAWStreams);
205 return false;
206 }
207
208 if (characteristics.max_processed_streams > kMaxProcessedStreams) {
209 ALOGE("%s: Processed streams maximum %u exceeds supported maximum %u",
210 __FUNCTION__, characteristics.max_processed_streams,
211 kMaxProcessedStreams);
212 return false;
213 }
214
215 if (characteristics.max_stalling_streams > kMaxStallingStreams) {
216 ALOGE("%s: Stalling streams maximum %u exceeds supported maximum %u",
217 __FUNCTION__, characteristics.max_stalling_streams,
218 kMaxStallingStreams);
219 return false;
220 }
221
222 if (characteristics.max_input_streams > kMaxInputStreams) {
223 ALOGE("%s: Input streams maximum %u exceeds supported maximum %u",
224 __FUNCTION__, characteristics.max_input_streams, kMaxInputStreams);
225 return false;
226 }
227
228 if ((characteristics.lens_shading_map_size[0] > kMaxLensShadingMapSize[0]) ||
229 (characteristics.lens_shading_map_size[1] > kMaxLensShadingMapSize[1])) {
230 ALOGE("%s: Lens shading map [%dx%d] exceeds supprorted maximum [%dx%d]",
231 __FUNCTION__, characteristics.lens_shading_map_size[0],
232 characteristics.lens_shading_map_size[1], kMaxLensShadingMapSize[0],
233 kMaxLensShadingMapSize[1]);
234 return false;
235 }
236
237 if (characteristics.max_pipeline_depth < kPipelineDepth) {
238 ALOGE("%s: Pipeline depth %d smaller than supprorted minimum %d",
239 __FUNCTION__, characteristics.max_pipeline_depth, kPipelineDepth);
240 return false;
241 }
242
243 return true;
244 }
245
IsStreamCombinationSupported(const StreamConfiguration & config,StreamConfigurationMap & map,const SensorCharacteristics & sensor_chars)246 bool EmulatedSensor::IsStreamCombinationSupported(
247 const StreamConfiguration& config, StreamConfigurationMap& map,
248 const SensorCharacteristics& sensor_chars) {
249 uint32_t raw_stream_count = 0;
250 uint32_t input_stream_count = 0;
251 uint32_t processed_stream_count = 0;
252 uint32_t stalling_stream_count = 0;
253
254 for (const auto& stream : config.streams) {
255 if (stream.rotation != google_camera_hal::StreamRotation::kRotation0) {
256 ALOGE("%s: Stream rotation: 0x%x not supported!", __FUNCTION__,
257 stream.rotation);
258 return false;
259 }
260
261 if (stream.stream_type == google_camera_hal::StreamType::kInput) {
262 if (sensor_chars.max_input_streams == 0) {
263 ALOGE("%s: Input streams are not supported on this device!",
264 __FUNCTION__);
265 return false;
266 }
267
268 auto supported_outputs = map.GetValidOutputFormatsForInput(stream.format);
269 if (supported_outputs.empty()) {
270 ALOGE("%s: Input stream with format: 0x%x no supported on this device!",
271 __FUNCTION__, stream.format);
272 return false;
273 }
274
275 input_stream_count++;
276 } else {
277 switch (stream.format) {
278 case HAL_PIXEL_FORMAT_BLOB:
279 if ((stream.data_space != HAL_DATASPACE_V0_JFIF) &&
280 (stream.data_space != HAL_DATASPACE_UNKNOWN)) {
281 ALOGE("%s: Unsupported Blob dataspace 0x%x", __FUNCTION__,
282 stream.data_space);
283 return false;
284 }
285 stalling_stream_count++;
286 break;
287 case HAL_PIXEL_FORMAT_RAW16:
288 raw_stream_count++;
289 break;
290 default:
291 processed_stream_count++;
292 }
293 }
294
295 auto output_sizes = map.GetOutputSizes(stream.format);
296 if (output_sizes.empty()) {
297 ALOGE("%s: Unsupported format: 0x%x", __FUNCTION__, stream.format);
298 return false;
299 }
300
301 auto stream_size = std::make_pair(stream.width, stream.height);
302 if (output_sizes.find(stream_size) == output_sizes.end()) {
303 ALOGE("%s: Stream with size %dx%d and format 0x%x is not supported!",
304 __FUNCTION__, stream.width, stream.height, stream.format);
305 return false;
306 }
307 }
308
309 if (raw_stream_count > sensor_chars.max_raw_streams) {
310 ALOGE("%s: RAW streams maximum %u exceeds supported maximum %u",
311 __FUNCTION__, raw_stream_count, sensor_chars.max_raw_streams);
312 return false;
313 }
314
315 if (processed_stream_count > sensor_chars.max_processed_streams) {
316 ALOGE("%s: Processed streams maximum %u exceeds supported maximum %u",
317 __FUNCTION__, processed_stream_count,
318 sensor_chars.max_processed_streams);
319 return false;
320 }
321
322 if (stalling_stream_count > sensor_chars.max_stalling_streams) {
323 ALOGE("%s: Stalling streams maximum %u exceeds supported maximum %u",
324 __FUNCTION__, stalling_stream_count,
325 sensor_chars.max_stalling_streams);
326 return false;
327 }
328
329 if (input_stream_count > sensor_chars.max_input_streams) {
330 ALOGE("%s: Input stream maximum %u exceeds supported maximum %u",
331 __FUNCTION__, input_stream_count, sensor_chars.max_input_streams);
332 return false;
333 }
334
335 return true;
336 }
337
StartUp(uint32_t logical_camera_id,std::unique_ptr<LogicalCharacteristics> logical_chars)338 status_t EmulatedSensor::StartUp(
339 uint32_t logical_camera_id,
340 std::unique_ptr<LogicalCharacteristics> logical_chars) {
341 if (isRunning()) {
342 return OK;
343 }
344
345 if (logical_chars.get() == nullptr) {
346 return BAD_VALUE;
347 }
348
349 chars_ = std::move(logical_chars);
350 auto device_chars = chars_->find(logical_camera_id);
351 if (device_chars == chars_->end()) {
352 ALOGE(
353 "%s: Logical camera id: %u absent from logical camera characteristics!",
354 __FUNCTION__, logical_camera_id);
355 return BAD_VALUE;
356 }
357
358 for (const auto& it : *chars_) {
359 if (!AreCharacteristicsSupported(it.second)) {
360 ALOGE("%s: Sensor characteristics for camera id: %u not supported!",
361 __FUNCTION__, it.first);
362 return BAD_VALUE;
363 }
364 }
365
366 logical_camera_id_ = logical_camera_id;
367 scene_ = new EmulatedScene(
368 device_chars->second.width, device_chars->second.height,
369 kElectronsPerLuxSecond, device_chars->second.orientation,
370 device_chars->second.is_front_facing);
371 scene_->InitializeSensorQueue();
372 jpeg_compressor_ = std::make_unique<JpegCompressor>();
373
374 auto res = run(LOG_TAG, ANDROID_PRIORITY_URGENT_DISPLAY);
375 if (res != OK) {
376 ALOGE("Unable to start up sensor capture thread: %d", res);
377 }
378
379 return res;
380 }
381
ShutDown()382 status_t EmulatedSensor::ShutDown() {
383 int res;
384 res = requestExitAndWait();
385 if (res != OK) {
386 ALOGE("Unable to shut down sensor capture thread: %d", res);
387 }
388 return res;
389 }
390
SetCurrentRequest(std::unique_ptr<LogicalCameraSettings> logical_settings,std::unique_ptr<HwlPipelineResult> result,std::unique_ptr<Buffers> input_buffers,std::unique_ptr<Buffers> output_buffers)391 void EmulatedSensor::SetCurrentRequest(
392 std::unique_ptr<LogicalCameraSettings> logical_settings,
393 std::unique_ptr<HwlPipelineResult> result,
394 std::unique_ptr<Buffers> input_buffers,
395 std::unique_ptr<Buffers> output_buffers) {
396 Mutex::Autolock lock(control_mutex_);
397 current_settings_ = std::move(logical_settings);
398 current_result_ = std::move(result);
399 current_input_buffers_ = std::move(input_buffers);
400 current_output_buffers_ = std::move(output_buffers);
401 }
402
WaitForVSyncLocked(nsecs_t reltime)403 bool EmulatedSensor::WaitForVSyncLocked(nsecs_t reltime) {
404 got_vsync_ = false;
405 while (!got_vsync_) {
406 auto res = vsync_.waitRelative(control_mutex_, reltime);
407 if (res != OK && res != TIMED_OUT) {
408 ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res);
409 return false;
410 }
411 }
412
413 return got_vsync_;
414 }
415
WaitForVSync(nsecs_t reltime)416 bool EmulatedSensor::WaitForVSync(nsecs_t reltime) {
417 Mutex::Autolock lock(control_mutex_);
418
419 return WaitForVSyncLocked(reltime);
420 }
421
Flush()422 status_t EmulatedSensor::Flush() {
423 Mutex::Autolock lock(control_mutex_);
424 auto ret = WaitForVSyncLocked(kSupportedFrameDurationRange[1]);
425
426 // First recreate the jpeg compressor. This will abort any ongoing processing
427 // and flush any pending jobs.
428 jpeg_compressor_ = std::make_unique<JpegCompressor>();
429
430 // Then return any pending frames here
431 if ((current_input_buffers_.get() != nullptr) &&
432 (!current_input_buffers_->empty())) {
433 current_input_buffers_->clear();
434 }
435 if ((current_output_buffers_.get() != nullptr) &&
436 (!current_output_buffers_->empty())) {
437 for (const auto& buffer : *current_output_buffers_) {
438 buffer->stream_buffer.status = BufferStatus::kError;
439 }
440
441 if ((current_result_.get() != nullptr) &&
442 (current_result_->result_metadata.get() != nullptr)) {
443 if (current_output_buffers_->at(0)->callback.notify != nullptr) {
444 NotifyMessage msg{
445 .type = MessageType::kError,
446 .message.error = {
447 .frame_number = current_output_buffers_->at(0)->frame_number,
448 .error_stream_id = -1,
449 .error_code = ErrorCode::kErrorResult,
450 }};
451
452 current_output_buffers_->at(0)->callback.notify(
453 current_result_->pipeline_id, msg);
454 }
455 }
456
457 current_output_buffers_->clear();
458 }
459
460 return ret ? OK : TIMED_OUT;
461 }
462
threadLoop()463 bool EmulatedSensor::threadLoop() {
464 ATRACE_CALL();
465 /**
466 * Sensor capture operation main loop.
467 *
468 */
469
470 /**
471 * Stage 1: Read in latest control parameters
472 */
473 std::unique_ptr<Buffers> next_buffers;
474 std::unique_ptr<Buffers> next_input_buffer;
475 std::unique_ptr<HwlPipelineResult> next_result;
476 std::unique_ptr<LogicalCameraSettings> settings;
477 HwlPipelineCallback callback = {nullptr, nullptr};
478 {
479 Mutex::Autolock lock(control_mutex_);
480 std::swap(settings, current_settings_);
481 std::swap(next_buffers, current_output_buffers_);
482 std::swap(next_input_buffer, current_input_buffers_);
483 std::swap(next_result, current_result_);
484
485 // Signal VSync for start of readout
486 ALOGVV("Sensor VSync");
487 got_vsync_ = true;
488 vsync_.signal();
489 }
490
491 auto frame_duration = EmulatedSensor::kSupportedFrameDurationRange[0];
492 // Frame duration must always be the same among all physical devices
493 if ((settings.get() != nullptr) && (!settings->empty())) {
494 frame_duration = settings->begin()->second.frame_duration;
495 }
496
497 nsecs_t start_real_time = systemTime();
498 // Stagefright cares about system time for timestamps, so base simulated
499 // time on that.
500 nsecs_t frame_end_real_time = start_real_time + frame_duration;
501
502 /**
503 * Stage 2: Capture new image
504 */
505 next_capture_time_ = frame_end_real_time;
506
507 bool reprocess_request = false;
508 if ((next_input_buffer.get() != nullptr) && (!next_input_buffer->empty())) {
509 if (next_input_buffer->size() > 1) {
510 ALOGW("%s: Reprocess supports only single input!", __FUNCTION__);
511 }
512 if (next_input_buffer->at(0)->format != HAL_PIXEL_FORMAT_YCBCR_420_888) {
513 ALOGE(
514 "%s: Reprocess input format: 0x%x not supported! Skipping reprocess!",
515 __FUNCTION__, next_input_buffer->at(0)->format);
516 } else {
517 camera_metadata_ro_entry_t entry;
518 auto ret =
519 next_result->result_metadata->Get(ANDROID_SENSOR_TIMESTAMP, &entry);
520 if ((ret == OK) && (entry.count == 1)) {
521 next_capture_time_ = entry.data.i64[0];
522 } else {
523 ALOGW("%s: Reprocess timestamp absent!", __FUNCTION__);
524 }
525
526 reprocess_request = true;
527 }
528 }
529
530 if ((next_buffers != nullptr) && (settings != nullptr)) {
531 callback = next_buffers->at(0)->callback;
532 if (callback.notify != nullptr) {
533 NotifyMessage msg{
534 .type = MessageType::kShutter,
535 .message.shutter = {
536 .frame_number = next_buffers->at(0)->frame_number,
537 .timestamp_ns = static_cast<uint64_t>(next_capture_time_)}};
538 callback.notify(next_result->pipeline_id, msg);
539 }
540 auto b = next_buffers->begin();
541 while (b != next_buffers->end()) {
542 auto device_settings = settings->find((*b)->camera_id);
543 if (device_settings == settings->end()) {
544 ALOGE("%s: Sensor settings absent for device: %d", __func__,
545 (*b)->camera_id);
546 b = next_buffers->erase(b);
547 continue;
548 }
549
550 auto device_chars = chars_->find((*b)->camera_id);
551 if (device_chars == chars_->end()) {
552 ALOGE("%s: Sensor characteristics absent for device: %d", __func__,
553 (*b)->camera_id);
554 b = next_buffers->erase(b);
555 continue;
556 }
557
558 ALOGVV("Starting next capture: Exposure: %" PRIu64 " ms, gain: %d",
559 ns2ms(device_settings->second.exposure_time),
560 device_settings->second.gain);
561
562 scene_->Initialize(device_chars->second.width,
563 device_chars->second.height, kElectronsPerLuxSecond);
564 scene_->SetExposureDuration((float)device_settings->second.exposure_time /
565 1e9);
566 scene_->SetColorFilterXYZ(device_chars->second.color_filter.rX,
567 device_chars->second.color_filter.rY,
568 device_chars->second.color_filter.rZ,
569 device_chars->second.color_filter.grX,
570 device_chars->second.color_filter.grY,
571 device_chars->second.color_filter.grZ,
572 device_chars->second.color_filter.gbX,
573 device_chars->second.color_filter.gbY,
574 device_chars->second.color_filter.gbZ,
575 device_chars->second.color_filter.bX,
576 device_chars->second.color_filter.bY,
577 device_chars->second.color_filter.bZ);
578 uint32_t handshake_divider =
579 (device_settings->second.video_stab == ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON) ?
580 kReducedSceneHandshake : kRegularSceneHandshake;
581 scene_->CalculateScene(next_capture_time_, handshake_divider);
582
583 (*b)->stream_buffer.status = BufferStatus::kOk;
584 switch ((*b)->format) {
585 case HAL_PIXEL_FORMAT_RAW16:
586 if (!reprocess_request) {
587 CaptureRaw((*b)->plane.img.img, device_settings->second.gain,
588 (*b)->width, device_chars->second);
589 } else {
590 ALOGE("%s: Reprocess requests with output format %x no supported!",
591 __FUNCTION__, (*b)->format);
592 (*b)->stream_buffer.status = BufferStatus::kError;
593 }
594 break;
595 case HAL_PIXEL_FORMAT_RGB_888:
596 if (!reprocess_request) {
597 CaptureRGB((*b)->plane.img.img, (*b)->width, (*b)->height,
598 (*b)->plane.img.stride, RGBLayout::RGB,
599 device_settings->second.gain, device_chars->second);
600 } else {
601 ALOGE("%s: Reprocess requests with output format %x no supported!",
602 __FUNCTION__, (*b)->format);
603 (*b)->stream_buffer.status = BufferStatus::kError;
604 }
605 break;
606 case HAL_PIXEL_FORMAT_RGBA_8888:
607 if (!reprocess_request) {
608 CaptureRGB((*b)->plane.img.img, (*b)->width, (*b)->height,
609 (*b)->plane.img.stride, RGBLayout::RGBA,
610 device_settings->second.gain, device_chars->second);
611 } else {
612 ALOGE("%s: Reprocess requests with output format %x no supported!",
613 __FUNCTION__, (*b)->format);
614 (*b)->stream_buffer.status = BufferStatus::kError;
615 }
616 break;
617 case HAL_PIXEL_FORMAT_BLOB:
618 if ((*b)->dataSpace == HAL_DATASPACE_V0_JFIF) {
619 YUV420Frame yuv_input{
620 .width =
621 reprocess_request ? (*next_input_buffer->begin())->width : 0,
622 .height = reprocess_request
623 ? (*next_input_buffer->begin())->height
624 : 0,
625 .planes = reprocess_request
626 ? (*next_input_buffer->begin())->plane.img_y_crcb
627 : YCbCrPlanes{}};
628 auto jpeg_input = std::make_unique<JpegYUV420Input>();
629 jpeg_input->width = (*b)->width;
630 jpeg_input->height = (*b)->height;
631 auto img =
632 new uint8_t[(jpeg_input->width * jpeg_input->height * 3) / 2];
633 jpeg_input->yuv_planes = {
634 .img_y = img,
635 .img_cb = img + jpeg_input->width * jpeg_input->height,
636 .img_cr = img + (jpeg_input->width * jpeg_input->height * 5) / 4,
637 .y_stride = jpeg_input->width,
638 .cbcr_stride = jpeg_input->width / 2,
639 .cbcr_step = 1};
640 jpeg_input->buffer_owner = true;
641 YUV420Frame yuv_output{.width = jpeg_input->width,
642 .height = jpeg_input->height,
643 .planes = jpeg_input->yuv_planes};
644
645 bool rotate =
646 device_settings->second.rotate_and_crop == ANDROID_SCALER_ROTATE_AND_CROP_90;
647 ProcessType process_type = reprocess_request ? REPROCESS :
648 (device_settings->second.edge_mode == ANDROID_EDGE_MODE_HIGH_QUALITY) ?
649 HIGH_QUALITY : REGULAR;
650 auto ret = ProcessYUV420(
651 yuv_input, yuv_output, device_settings->second.gain,
652 process_type, device_settings->second.zoom_ratio,
653 rotate, device_chars->second);
654 if (ret != 0) {
655 (*b)->stream_buffer.status = BufferStatus::kError;
656 break;
657 }
658
659 auto jpeg_job = std::make_unique<JpegYUV420Job>();
660 jpeg_job->exif_utils = std::unique_ptr<ExifUtils>(
661 ExifUtils::Create(device_chars->second));
662 jpeg_job->input = std::move(jpeg_input);
663 // If jpeg compression is successful, then the jpeg compressor
664 // must set the corresponding status.
665 (*b)->stream_buffer.status = BufferStatus::kError;
666 std::swap(jpeg_job->output, *b);
667 jpeg_job->result_metadata =
668 HalCameraMetadata::Clone(next_result->result_metadata.get());
669
670 Mutex::Autolock lock(control_mutex_);
671 jpeg_compressor_->QueueYUV420(std::move(jpeg_job));
672 } else {
673 ALOGE("%s: Format %x with dataspace %x is TODO", __FUNCTION__,
674 (*b)->format, (*b)->dataSpace);
675 (*b)->stream_buffer.status = BufferStatus::kError;
676 }
677 break;
678 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
679 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
680 YUV420Frame yuv_input{
681 .width =
682 reprocess_request ? (*next_input_buffer->begin())->width : 0,
683 .height =
684 reprocess_request ? (*next_input_buffer->begin())->height : 0,
685 .planes = reprocess_request
686 ? (*next_input_buffer->begin())->plane.img_y_crcb
687 : YCbCrPlanes{}};
688 YUV420Frame yuv_output{.width = (*b)->width,
689 .height = (*b)->height,
690 .planes = (*b)->plane.img_y_crcb};
691 bool rotate =
692 device_settings->second.rotate_and_crop == ANDROID_SCALER_ROTATE_AND_CROP_90;
693 ProcessType process_type = reprocess_request ? REPROCESS :
694 (device_settings->second.edge_mode == ANDROID_EDGE_MODE_HIGH_QUALITY) ?
695 HIGH_QUALITY : REGULAR;
696 auto ret = ProcessYUV420(
697 yuv_input, yuv_output, device_settings->second.gain,
698 process_type, device_settings->second.zoom_ratio,
699 rotate, device_chars->second);
700 if (ret != 0) {
701 (*b)->stream_buffer.status = BufferStatus::kError;
702 }
703 } break;
704 case HAL_PIXEL_FORMAT_Y16:
705 if (!reprocess_request) {
706 if ((*b)->dataSpace == HAL_DATASPACE_DEPTH) {
707 CaptureDepth((*b)->plane.img.img, device_settings->second.gain,
708 (*b)->width, (*b)->height, (*b)->plane.img.stride,
709 device_chars->second);
710 } else {
711 ALOGE("%s: Format %x with dataspace %x is TODO", __FUNCTION__,
712 (*b)->format, (*b)->dataSpace);
713 (*b)->stream_buffer.status = BufferStatus::kError;
714 }
715 } else {
716 ALOGE("%s: Reprocess requests with output format %x no supported!",
717 __FUNCTION__, (*b)->format);
718 (*b)->stream_buffer.status = BufferStatus::kError;
719 }
720 break;
721 default:
722 ALOGE("%s: Unknown format %x, no output", __FUNCTION__, (*b)->format);
723 (*b)->stream_buffer.status = BufferStatus::kError;
724 break;
725 }
726
727 b = next_buffers->erase(b);
728 }
729 }
730
731 if (reprocess_request) {
732 auto input_buffer = next_input_buffer->begin();
733 while (input_buffer != next_input_buffer->end()) {
734 (*input_buffer++)->stream_buffer.status = BufferStatus::kOk;
735 }
736 next_input_buffer->clear();
737 }
738
739 nsecs_t work_done_real_time = systemTime();
740 // Returning the results at this point is not entirely correct from timing
741 // perspective. Under ideal conditions where 'ReturnResults' completes
742 // in less than 'time_accuracy' we need to return the results after the
743 // frame cycle expires. However under real conditions various system
744 // components like SurfaceFlinger, Encoder, LMK etc. could be consuming most
745 // of the resources and the duration of "ReturnResults" can get comparable to
746 // 'kDefaultFrameDuration'. This will skew the frame cycle and can result in
747 // potential frame drops. To avoid this scenario when we are running under
748 // tight deadlines (less than 'kReturnResultThreshod') try to return the
749 // results immediately. In all other cases with more relaxed deadlines
750 // the occasional bump during 'ReturnResults' should not have any
751 // noticeable effect.
752 if ((work_done_real_time + kReturnResultThreshod) > frame_end_real_time) {
753 ReturnResults(callback, std::move(settings), std::move(next_result));
754 }
755
756 work_done_real_time = systemTime();
757 ALOGVV("Sensor vertical blanking interval");
758 const nsecs_t time_accuracy = 2e6; // 2 ms of imprecision is ok
759 if (work_done_real_time < frame_end_real_time - time_accuracy) {
760 timespec t;
761 t.tv_sec = (frame_end_real_time - work_done_real_time) / 1000000000L;
762 t.tv_nsec = (frame_end_real_time - work_done_real_time) % 1000000000L;
763
764 int ret;
765 do {
766 ret = nanosleep(&t, &t);
767 } while (ret != 0);
768 }
769 nsecs_t end_real_time __unused = systemTime();
770 ALOGVV("Frame cycle took %" PRIu64 " ms, target %" PRIu64 " ms",
771 ns2ms(end_real_time - start_real_time), ns2ms(frame_duration));
772
773 ReturnResults(callback, std::move(settings), std::move(next_result));
774
775 return true;
776 };
777
ReturnResults(HwlPipelineCallback callback,std::unique_ptr<LogicalCameraSettings> settings,std::unique_ptr<HwlPipelineResult> result)778 void EmulatedSensor::ReturnResults(
779 HwlPipelineCallback callback,
780 std::unique_ptr<LogicalCameraSettings> settings,
781 std::unique_ptr<HwlPipelineResult> result) {
782 if ((callback.process_pipeline_result != nullptr) &&
783 (result.get() != nullptr) && (result->result_metadata.get() != nullptr)) {
784 auto logical_settings = settings->find(logical_camera_id_);
785 if (logical_settings == settings->end()) {
786 ALOGE("%s: Logical camera id: %u not found in settings!", __FUNCTION__,
787 logical_camera_id_);
788 return;
789 }
790 auto device_chars = chars_->find(logical_camera_id_);
791 if (device_chars == chars_->end()) {
792 ALOGE("%s: Sensor characteristics absent for device: %d", __func__,
793 logical_camera_id_);
794 return;
795 }
796
797 result->result_metadata->Set(ANDROID_SENSOR_TIMESTAMP, &next_capture_time_,
798 1);
799 if (logical_settings->second.lens_shading_map_mode ==
800 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON) {
801 if ((device_chars->second.lens_shading_map_size[0] > 0) &&
802 (device_chars->second.lens_shading_map_size[1] > 0)) {
803 // Perfect lens, no actual shading needed.
804 std::vector<float> lens_shading_map(
805 device_chars->second.lens_shading_map_size[0] *
806 device_chars->second.lens_shading_map_size[1] * 4,
807 1.f);
808
809 result->result_metadata->Set(ANDROID_STATISTICS_LENS_SHADING_MAP,
810 lens_shading_map.data(),
811 lens_shading_map.size());
812 }
813 }
814 if (logical_settings->second.report_video_stab) {
815 result->result_metadata->Set(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
816 &logical_settings->second.video_stab, 1);
817 }
818 if (logical_settings->second.report_edge_mode) {
819 result->result_metadata->Set(ANDROID_EDGE_MODE,
820 &logical_settings->second.edge_mode, 1);
821 }
822 if (logical_settings->second.report_neutral_color_point) {
823 result->result_metadata->Set(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
824 kNeutralColorPoint,
825 ARRAY_SIZE(kNeutralColorPoint));
826 }
827 if (logical_settings->second.report_green_split) {
828 result->result_metadata->Set(ANDROID_SENSOR_GREEN_SPLIT, &kGreenSplit, 1);
829 }
830 if (logical_settings->second.report_noise_profile) {
831 CalculateAndAppendNoiseProfile(
832 logical_settings->second.gain,
833 GetBaseGainFactor(device_chars->second.max_raw_value),
834 result->result_metadata.get());
835 }
836 if (logical_settings->second.report_rotate_and_crop) {
837 result->result_metadata->Set(ANDROID_SCALER_ROTATE_AND_CROP,
838 &logical_settings->second.rotate_and_crop, 1);
839 }
840
841 if (!result->physical_camera_results.empty()) {
842 for (auto& it : result->physical_camera_results) {
843 auto physical_settings = settings->find(it.first);
844 if (physical_settings == settings->end()) {
845 ALOGE("%s: Physical settings for camera id: %u are absent!",
846 __FUNCTION__, it.first);
847 continue;
848 }
849
850 // Sensor timestamp for all physical devices must be the same.
851 it.second->Set(ANDROID_SENSOR_TIMESTAMP, &next_capture_time_, 1);
852 if (physical_settings->second.report_neutral_color_point) {
853 it.second->Set(ANDROID_SENSOR_NEUTRAL_COLOR_POINT, kNeutralColorPoint,
854 ARRAY_SIZE(kNeutralColorPoint));
855 }
856 if (physical_settings->second.report_green_split) {
857 it.second->Set(ANDROID_SENSOR_GREEN_SPLIT, &kGreenSplit, 1);
858 }
859 if (physical_settings->second.report_noise_profile) {
860 auto device_chars = chars_->find(it.first);
861 if (device_chars == chars_->end()) {
862 ALOGE("%s: Sensor characteristics absent for device: %d", __func__,
863 it.first);
864 }
865 CalculateAndAppendNoiseProfile(
866 physical_settings->second.gain,
867 GetBaseGainFactor(device_chars->second.max_raw_value),
868 it.second.get());
869 }
870 }
871 }
872
873 callback.process_pipeline_result(std::move(result));
874 }
875 }
876
CalculateAndAppendNoiseProfile(float gain,float base_gain_factor,HalCameraMetadata * result)877 void EmulatedSensor::CalculateAndAppendNoiseProfile(
878 float gain /*in ISO*/, float base_gain_factor,
879 HalCameraMetadata* result /*out*/) {
880 if (result != nullptr) {
881 float total_gain = gain / 100.0 * base_gain_factor;
882 float noise_var_gain = total_gain * total_gain;
883 float read_noise_var =
884 kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
885 // Noise profile is the same across all 4 CFA channels
886 double noise_profile[2 * 4] = {
887 noise_var_gain, read_noise_var, noise_var_gain, read_noise_var,
888 noise_var_gain, read_noise_var, noise_var_gain, read_noise_var};
889 result->Set(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
890 ARRAY_SIZE(noise_profile));
891 }
892 }
893
CaptureRaw(uint8_t * img,uint32_t gain,uint32_t width,const SensorCharacteristics & chars)894 void EmulatedSensor::CaptureRaw(uint8_t* img, uint32_t gain, uint32_t width,
895 const SensorCharacteristics& chars) {
896 ATRACE_CALL();
897 float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
898 float noise_var_gain = total_gain * total_gain;
899 float read_noise_var =
900 kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
901 //
902 // RGGB
903 int bayer_select[4] = {EmulatedScene::R, EmulatedScene::Gr, EmulatedScene::Gb,
904 EmulatedScene::B};
905 scene_->SetReadoutPixel(0, 0);
906 for (unsigned int y = 0; y < chars.height; y++) {
907 int* bayer_row = bayer_select + (y & 0x1) * 2;
908 uint16_t* px = (uint16_t*)img + y * width;
909 for (unsigned int x = 0; x < chars.width; x++) {
910 uint32_t electron_count;
911 electron_count = scene_->GetPixelElectrons()[bayer_row[x & 0x1]];
912
913 // TODO: Better pixel saturation curve?
914 electron_count = (electron_count < kSaturationElectrons)
915 ? electron_count
916 : kSaturationElectrons;
917
918 // TODO: Better A/D saturation curve?
919 uint16_t raw_count = electron_count * total_gain;
920 raw_count =
921 (raw_count < chars.max_raw_value) ? raw_count : chars.max_raw_value;
922
923 // Calculate noise value
924 // TODO: Use more-correct Gaussian instead of uniform noise
925 float photon_noise_var = electron_count * noise_var_gain;
926 float noise_stddev = sqrtf_approx(read_noise_var + photon_noise_var);
927 // Scaled to roughly match gaussian/uniform noise stddev
928 float noise_sample = rand_r(&rand_seed_) * (2.5 / (1.0 + RAND_MAX)) - 1.25;
929
930 raw_count += chars.black_level_pattern[bayer_row[x & 0x1]];
931 raw_count += noise_stddev * noise_sample;
932
933 *px++ = raw_count;
934 }
935 // TODO: Handle this better
936 // simulatedTime += mRowReadoutTime;
937 }
938 ALOGVV("Raw sensor image captured");
939 }
940
CaptureRGB(uint8_t * img,uint32_t width,uint32_t height,uint32_t stride,RGBLayout layout,uint32_t gain,const SensorCharacteristics & chars)941 void EmulatedSensor::CaptureRGB(uint8_t* img, uint32_t width, uint32_t height,
942 uint32_t stride, RGBLayout layout, uint32_t gain,
943 const SensorCharacteristics& chars) {
944 ATRACE_CALL();
945 float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
946 // In fixed-point math, calculate total scaling from electrons to 8bpp
947 int scale64x = 64 * total_gain * 255 / chars.max_raw_value;
948 uint32_t inc_h = ceil((float)chars.width / width);
949 uint32_t inc_v = ceil((float)chars.height / height);
950
951 for (unsigned int y = 0, outy = 0; y < chars.height; y += inc_v, outy++) {
952 scene_->SetReadoutPixel(0, y);
953 uint8_t* px = img + outy * stride;
954 for (unsigned int x = 0; x < chars.width; x += inc_h) {
955 uint32_t r_count, g_count, b_count;
956 // TODO: Perfect demosaicing is a cheat
957 const uint32_t* pixel = scene_->GetPixelElectrons();
958 r_count = pixel[EmulatedScene::R] * scale64x;
959 g_count = pixel[EmulatedScene::Gr] * scale64x;
960 b_count = pixel[EmulatedScene::B] * scale64x;
961
962 uint8_t r = r_count < 255 * 64 ? r_count / 64 : 255;
963 uint8_t g = g_count < 255 * 64 ? g_count / 64 : 255;
964 uint8_t b = b_count < 255 * 64 ? b_count / 64 : 255;
965 switch (layout) {
966 case RGB:
967 *px++ = r;
968 *px++ = g;
969 *px++ = b;
970 break;
971 case RGBA:
972 *px++ = r;
973 *px++ = g;
974 *px++ = b;
975 *px++ = 255;
976 break;
977 case ARGB:
978 *px++ = 255;
979 *px++ = r;
980 *px++ = g;
981 *px++ = b;
982 break;
983 default:
984 ALOGE("%s: RGB layout: %d not supported", __FUNCTION__, layout);
985 return;
986 }
987 for (unsigned int j = 1; j < inc_h; j++) scene_->GetPixelElectrons();
988 }
989 }
990 ALOGVV("RGB sensor image captured");
991 }
992
CaptureYUV420(YCbCrPlanes yuv_layout,uint32_t width,uint32_t height,uint32_t gain,float zoom_ratio,bool rotate,const SensorCharacteristics & chars)993 void EmulatedSensor::CaptureYUV420(YCbCrPlanes yuv_layout, uint32_t width,
994 uint32_t height, uint32_t gain,
995 float zoom_ratio, bool rotate,
996 const SensorCharacteristics& chars) {
997 ATRACE_CALL();
998 float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
999 // Using fixed-point math with 6 bits of fractional precision.
1000 // In fixed-point math, calculate total scaling from electrons to 8bpp
1001 const int scale64x =
1002 kFixedBitPrecision * total_gain * 255 / chars.max_raw_value;
1003 // Fixed-point coefficients for RGB-YUV transform
1004 // Based on JFIF RGB->YUV transform.
1005 // Cb/Cr offset scaled by 64x twice since they're applied post-multiply
1006 const int rgb_to_y[] = {19, 37, 7};
1007 const int rgb_to_cb[] = {-10, -21, 32, 524288};
1008 const int rgb_to_cr[] = {32, -26, -5, 524288};
1009 // Scale back to 8bpp non-fixed-point
1010 const int scale_out = 64;
1011 const int scale_out_sq = scale_out * scale_out; // after multiplies
1012
1013 // inc = how many pixels to skip while reading every next pixel
1014 const float aspect_ratio = static_cast<float>(width) / height;
1015
1016 // precalculate normalized coordinates and dimensions
1017 const float norm_left_top = 0.5f - 0.5f / zoom_ratio;
1018 const float norm_rot_top = norm_left_top;
1019 const float norm_width = 1 / zoom_ratio;
1020 const float norm_rot_width = norm_width / aspect_ratio;
1021 const float norm_rot_height = norm_width;
1022 const float norm_rot_left =
1023 norm_left_top + (norm_width + norm_rot_width) * 0.5f;
1024
1025 for (unsigned int out_y = 0; out_y < height; out_y++) {
1026 uint8_t* px_y = yuv_layout.img_y + out_y * yuv_layout.y_stride;
1027 uint8_t* px_cb = yuv_layout.img_cb + (out_y / 2) * yuv_layout.cbcr_stride;
1028 uint8_t* px_cr = yuv_layout.img_cr + (out_y / 2) * yuv_layout.cbcr_stride;
1029
1030 for (unsigned int out_x = 0; out_x < width; out_x++) {
1031 int x, y;
1032 float norm_x = out_x / (width * zoom_ratio);
1033 float norm_y = out_y / (height * zoom_ratio);
1034 if (rotate) {
1035 x = static_cast<int>(chars.width *
1036 (norm_rot_left - norm_y * norm_rot_width));
1037 y = static_cast<int>(chars.height *
1038 (norm_rot_top + norm_x * norm_rot_height));
1039 } else {
1040 x = static_cast<int>(chars.width * (norm_left_top + norm_x));
1041 y = static_cast<int>(chars.height * (norm_left_top + norm_y));
1042 }
1043 x = std::min(std::max(x, 0), (int)chars.width - 1);
1044 y = std::min(std::max(y, 0), (int)chars.height - 1);
1045 scene_->SetReadoutPixel(x, y);
1046
1047 int32_t r_count, g_count, b_count;
1048 // TODO: Perfect demosaicing is a cheat
1049 const uint32_t* pixel = rotate ? scene_->GetPixelElectronsColumn()
1050 : scene_->GetPixelElectrons();
1051 r_count = pixel[EmulatedScene::R] * scale64x;
1052 r_count = r_count < kSaturationPoint ? r_count : kSaturationPoint;
1053 g_count = pixel[EmulatedScene::Gr] * scale64x;
1054 g_count = g_count < kSaturationPoint ? g_count : kSaturationPoint;
1055 b_count = pixel[EmulatedScene::B] * scale64x;
1056 b_count = b_count < kSaturationPoint ? b_count : kSaturationPoint;
1057
1058 // Gamma correction
1059 r_count = gamma_table_[r_count];
1060 g_count = gamma_table_[g_count];
1061 b_count = gamma_table_[b_count];
1062
1063 *px_y++ = (rgb_to_y[0] * r_count + rgb_to_y[1] * g_count +
1064 rgb_to_y[2] * b_count) /
1065 scale_out_sq;
1066 if (out_y % 2 == 0 && out_x % 2 == 0) {
1067 *px_cb = (rgb_to_cb[0] * r_count + rgb_to_cb[1] * g_count +
1068 rgb_to_cb[2] * b_count + rgb_to_cb[3]) /
1069 scale_out_sq;
1070 *px_cr = (rgb_to_cr[0] * r_count + rgb_to_cr[1] * g_count +
1071 rgb_to_cr[2] * b_count + rgb_to_cr[3]) /
1072 scale_out_sq;
1073 px_cr += yuv_layout.cbcr_step;
1074 px_cb += yuv_layout.cbcr_step;
1075 }
1076 }
1077 }
1078 ALOGVV("YUV420 sensor image captured");
1079 }
1080
CaptureDepth(uint8_t * img,uint32_t gain,uint32_t width,uint32_t height,uint32_t stride,const SensorCharacteristics & chars)1081 void EmulatedSensor::CaptureDepth(uint8_t* img, uint32_t gain, uint32_t width,
1082 uint32_t height, uint32_t stride,
1083 const SensorCharacteristics& chars) {
1084 ATRACE_CALL();
1085 float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
1086 // In fixed-point math, calculate scaling factor to 13bpp millimeters
1087 int scale64x = 64 * total_gain * 8191 / chars.max_raw_value;
1088 uint32_t inc_h = ceil((float)chars.width / width);
1089 uint32_t inc_v = ceil((float)chars.height / height);
1090
1091 for (unsigned int y = 0, out_y = 0; y < chars.height; y += inc_v, out_y++) {
1092 scene_->SetReadoutPixel(0, y);
1093 uint16_t* px = (uint16_t*)(img + (out_y * stride));
1094 for (unsigned int x = 0; x < chars.width; x += inc_h) {
1095 uint32_t depth_count;
1096 // TODO: Make up real depth scene instead of using green channel
1097 // as depth
1098 const uint32_t* pixel = scene_->GetPixelElectrons();
1099 depth_count = pixel[EmulatedScene::Gr] * scale64x;
1100
1101 *px++ = depth_count < 8191 * 64 ? depth_count / 64 : 0;
1102 for (unsigned int j = 1; j < inc_h; j++) scene_->GetPixelElectrons();
1103 }
1104 // TODO: Handle this better
1105 // simulatedTime += mRowReadoutTime;
1106 }
1107 ALOGVV("Depth sensor image captured");
1108 }
1109
ProcessYUV420(const YUV420Frame & input,const YUV420Frame & output,uint32_t gain,ProcessType process_type,float zoom_ratio,bool rotate_and_crop,const SensorCharacteristics & chars)1110 status_t EmulatedSensor::ProcessYUV420(const YUV420Frame& input,
1111 const YUV420Frame& output, uint32_t gain,
1112 ProcessType process_type, float zoom_ratio,
1113 bool rotate_and_crop,
1114 const SensorCharacteristics& chars) {
1115 ATRACE_CALL();
1116 size_t input_width, input_height;
1117 YCbCrPlanes input_planes, output_planes;
1118 std::vector<uint8_t> temp_yuv, temp_output_uv, temp_input_uv;
1119
1120 switch (process_type) {
1121 case HIGH_QUALITY:
1122 CaptureYUV420(output.planes, output.width, output.height, gain, zoom_ratio,
1123 rotate_and_crop, chars);
1124 return OK;
1125 case REPROCESS:
1126 input_width = input.width;
1127 input_height = input.height;
1128 input_planes = input.planes;
1129
1130 // libyuv only supports planar YUV420 during scaling.
1131 // Split the input U/V plane in separate planes if needed.
1132 if (input_planes.cbcr_step == 2) {
1133 temp_input_uv.resize(input_width * input_height / 2);
1134 auto temp_uv_buffer = temp_input_uv.data();
1135 input_planes.img_cb = temp_uv_buffer;
1136 input_planes.img_cr = temp_uv_buffer + (input_width * input_height) / 4;
1137 input_planes.cbcr_stride = input_width / 2;
1138 if (input.planes.img_cb < input.planes.img_cr) {
1139 libyuv::SplitUVPlane(input.planes.img_cb, input.planes.cbcr_stride,
1140 input_planes.img_cb, input_planes.cbcr_stride,
1141 input_planes.img_cr, input_planes.cbcr_stride,
1142 input_width / 2, input_height / 2);
1143 } else {
1144 libyuv::SplitUVPlane(input.planes.img_cr, input.planes.cbcr_stride,
1145 input_planes.img_cr, input_planes.cbcr_stride,
1146 input_planes.img_cb, input_planes.cbcr_stride,
1147 input_width / 2, input_height / 2);
1148 }
1149 }
1150 break;
1151 case REGULAR:
1152 default:
1153 // Generate the smallest possible frame with the expected AR and
1154 // then scale using libyuv.
1155 float aspect_ratio = static_cast<float>(output.width) / output.height;
1156 zoom_ratio = std::max(1.f, zoom_ratio);
1157 input_width = EmulatedScene::kSceneWidth * aspect_ratio;
1158 input_height = EmulatedScene::kSceneHeight;
1159 temp_yuv.reserve((input_width * input_height * 3) / 2);
1160 auto temp_yuv_buffer = temp_yuv.data();
1161 input_planes = {
1162 .img_y = temp_yuv_buffer,
1163 .img_cb = temp_yuv_buffer + input_width * input_height,
1164 .img_cr = temp_yuv_buffer + (input_width * input_height * 5) / 4,
1165 .y_stride = static_cast<uint32_t>(input_width),
1166 .cbcr_stride = static_cast<uint32_t>(input_width) / 2,
1167 .cbcr_step = 1};
1168 CaptureYUV420(input_planes, input_width, input_height, gain, zoom_ratio,
1169 rotate_and_crop, chars);
1170 }
1171
1172 output_planes = output.planes;
1173 // libyuv only supports planar YUV420 during scaling.
1174 // Treat the output UV space as planar first and then
1175 // interleave in the second step.
1176 if (output_planes.cbcr_step == 2) {
1177 temp_output_uv.resize(output.width * output.height / 2);
1178 auto temp_uv_buffer = temp_output_uv.data();
1179 output_planes.img_cb = temp_uv_buffer;
1180 output_planes.img_cr = temp_uv_buffer + output.width * output.height / 4;
1181 output_planes.cbcr_stride = output.width / 2;
1182 }
1183
1184 auto ret = I420Scale(
1185 input_planes.img_y, input_planes.y_stride, input_planes.img_cb,
1186 input_planes.cbcr_stride, input_planes.img_cr, input_planes.cbcr_stride,
1187 input_width, input_height, output_planes.img_y, output_planes.y_stride,
1188 output_planes.img_cb, output_planes.cbcr_stride, output_planes.img_cr,
1189 output_planes.cbcr_stride, output.width, output.height,
1190 libyuv::kFilterNone);
1191 if (ret != 0) {
1192 ALOGE("%s: Failed during YUV scaling: %d", __FUNCTION__, ret);
1193 return ret;
1194 }
1195
1196 // Merge U/V Planes for the interleaved case
1197 if (output_planes.cbcr_step == 2) {
1198 if (output.planes.img_cb < output.planes.img_cr) {
1199 libyuv::MergeUVPlane(output_planes.img_cb, output_planes.cbcr_stride,
1200 output_planes.img_cr, output_planes.cbcr_stride,
1201 output.planes.img_cb, output.planes.cbcr_stride,
1202 output.width / 2, output.height / 2);
1203 } else {
1204 libyuv::MergeUVPlane(output_planes.img_cr, output_planes.cbcr_stride,
1205 output_planes.img_cb, output_planes.cbcr_stride,
1206 output.planes.img_cr, output.planes.cbcr_stride,
1207 output.width / 2, output.height / 2);
1208 }
1209 }
1210
1211 return ret;
1212 }
1213
ApplysRGBGamma(int32_t value,int32_t saturation)1214 int32_t EmulatedSensor::ApplysRGBGamma(int32_t value, int32_t saturation) {
1215 float n_value = (static_cast<float>(value) / saturation);
1216 n_value = (n_value <= 0.0031308f)
1217 ? n_value * 12.92f
1218 : 1.055f * pow(n_value, 0.4166667f) - 0.055f;
1219 return n_value * saturation;
1220 }
1221
1222 } // namespace android
1223