1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "EmulatedCameraProviderHwlImpl"
19 #include "EmulatedCameraProviderHWLImpl.h"
20
21 #include <android-base/file.h>
22 #include <android-base/strings.h>
23 #include <cutils/properties.h>
24 #include <hardware/camera_common.h>
25 #include <log/log.h>
26
27 #include "EmulatedCameraDeviceHWLImpl.h"
28 #include "EmulatedCameraDeviceSessionHWLImpl.h"
29 #include "EmulatedLogicalRequestState.h"
30 #include "EmulatedSensor.h"
31 #include "EmulatedTorchState.h"
32 #include "utils/HWLUtils.h"
33 #include "vendor_tag_defs.h"
34
35 namespace android {
36
37 // Location of the camera configuration files.
38 const char* EmulatedCameraProviderHwlImpl::kConfigurationFileLocation[] = {
39 "/vendor/etc/config/emu_camera_back.json",
40 "/vendor/etc/config/emu_camera_front.json",
41 "/vendor/etc/config/emu_camera_depth.json",
42 };
43
44 constexpr StreamSize s240pStreamSize = std::pair(240, 180);
45 constexpr StreamSize s720pStreamSize = std::pair(1280, 720);
46 constexpr StreamSize s1440pStreamSize = std::pair(1920, 1440);
47
48 std::unique_ptr<EmulatedCameraProviderHwlImpl>
Create()49 EmulatedCameraProviderHwlImpl::Create() {
50 auto provider = std::unique_ptr<EmulatedCameraProviderHwlImpl>(
51 new EmulatedCameraProviderHwlImpl());
52
53 if (provider == nullptr) {
54 ALOGE("%s: Creating EmulatedCameraProviderHwlImpl failed.", __FUNCTION__);
55 return nullptr;
56 }
57
58 status_t res = provider->Initialize();
59 if (res != OK) {
60 ALOGE("%s: Initializing EmulatedCameraProviderHwlImpl failed: %s (%d).",
61 __FUNCTION__, strerror(-res), res);
62 return nullptr;
63 }
64
65 ALOGI("%s: Created EmulatedCameraProviderHwlImpl", __FUNCTION__);
66
67 return provider;
68 }
69
GetTagFromName(const char * name,uint32_t * tag)70 status_t EmulatedCameraProviderHwlImpl::GetTagFromName(const char* name,
71 uint32_t* tag) {
72 if (name == nullptr || tag == nullptr) {
73 return BAD_VALUE;
74 }
75
76 size_t name_length = strlen(name);
77 // First, find the section by the longest string match
78 const char* section = NULL;
79 size_t section_index = 0;
80 size_t section_length = 0;
81 for (size_t i = 0; i < ANDROID_SECTION_COUNT; ++i) {
82 const char* str = camera_metadata_section_names[i];
83
84 ALOGV("%s: Trying to match against section '%s'", __FUNCTION__, str);
85
86 if (strstr(name, str) == name) { // name begins with the section name
87 size_t str_length = strlen(str);
88
89 ALOGV("%s: Name begins with section name", __FUNCTION__);
90
91 // section name is the longest we've found so far
92 if (section == NULL || section_length < str_length) {
93 section = str;
94 section_index = i;
95 section_length = str_length;
96
97 ALOGV("%s: Found new best section (%s)", __FUNCTION__, section);
98 }
99 }
100 }
101
102 if (section == NULL) {
103 return NAME_NOT_FOUND;
104 } else {
105 ALOGV("%s: Found matched section '%s' (%zu)", __FUNCTION__, section,
106 section_index);
107 }
108
109 // Get the tag name component of the name
110 const char* name_tag_name = name + section_length + 1; // x.y.z -> z
111 if (section_length + 1 >= name_length) {
112 return BAD_VALUE;
113 }
114
115 // Match rest of name against the tag names in that section only
116 uint32_t candidate_tag = 0;
117 // Match built-in tags (typically android.*)
118 uint32_t tag_begin, tag_end; // [tag_begin, tag_end)
119 tag_begin = camera_metadata_section_bounds[section_index][0];
120 tag_end = camera_metadata_section_bounds[section_index][1];
121
122 for (candidate_tag = tag_begin; candidate_tag < tag_end; ++candidate_tag) {
123 const char* tag_name = get_camera_metadata_tag_name(candidate_tag);
124
125 if (strcmp(name_tag_name, tag_name) == 0) {
126 ALOGV("%s: Found matched tag '%s' (%d)", __FUNCTION__, tag_name,
127 candidate_tag);
128 break;
129 }
130 }
131
132 if (candidate_tag == tag_end) {
133 return NAME_NOT_FOUND;
134 }
135
136 *tag = candidate_tag;
137 return OK;
138 }
139
IsMaxSupportedSizeGreaterThanOrEqual(const std::set<StreamSize> & stream_sizes,StreamSize compare_size)140 static bool IsMaxSupportedSizeGreaterThanOrEqual(
141 const std::set<StreamSize>& stream_sizes, StreamSize compare_size) {
142 for (const auto& stream_size : stream_sizes) {
143 if (stream_size.first * stream_size.second >=
144 compare_size.first * compare_size.second) {
145 return true;
146 }
147 }
148 return false;
149 }
150
SupportsCapability(const uint32_t camera_id,const HalCameraMetadata & static_metadata,uint8_t cap)151 static bool SupportsCapability(const uint32_t camera_id,
152 const HalCameraMetadata& static_metadata,
153 uint8_t cap) {
154 camera_metadata_ro_entry_t entry;
155 auto ret = static_metadata.Get(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
156 if (ret != OK || (entry.count == 0)) {
157 ALOGE("Error getting capabilities for camera id %u", camera_id);
158 return false;
159 }
160 for (size_t i = 0; i < entry.count; i++) {
161 if (entry.data.u8[i] == cap) {
162 return true;
163 }
164 }
165 return false;
166 }
167
SupportsMandatoryConcurrentStreams(uint32_t camera_id)168 bool EmulatedCameraProviderHwlImpl::SupportsMandatoryConcurrentStreams(
169 uint32_t camera_id) {
170 HalCameraMetadata& static_metadata = *(static_metadata_[camera_id]);
171 auto map = std::make_unique<StreamConfigurationMap>(static_metadata);
172 auto yuv_output_sizes = map->GetOutputSizes(HAL_PIXEL_FORMAT_YCBCR_420_888);
173 auto blob_output_sizes = map->GetOutputSizes(HAL_PIXEL_FORMAT_BLOB);
174 auto depth16_output_sizes = map->GetOutputSizes(HAL_PIXEL_FORMAT_Y16);
175 auto priv_output_sizes =
176 map->GetOutputSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
177
178 if (!SupportsCapability(
179 camera_id, static_metadata,
180 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) &&
181 IsMaxSupportedSizeGreaterThanOrEqual(depth16_output_sizes,
182 s240pStreamSize)) {
183 ALOGI("%s: Depth only output supported by camera id %u", __FUNCTION__,
184 camera_id);
185 return true;
186 }
187 if (yuv_output_sizes.empty()) {
188 ALOGW("%s: No YUV output supported by camera id %u", __FUNCTION__,
189 camera_id);
190 return false;
191 }
192
193 if (priv_output_sizes.empty()) {
194 ALOGW("No PRIV output supported by camera id %u", camera_id);
195 return false;
196 }
197
198 if (blob_output_sizes.empty()) {
199 ALOGW("No BLOB output supported by camera id %u", camera_id);
200 return false;
201 }
202
203 // According to the HAL spec, if a device supports format sizes > 1440p and
204 // 720p, it must support both 1440p and 720p streams for PRIV, JPEG and YUV
205 // formats. Otherwise it must support 2 streams (YUV / PRIV + JPEG) of the max
206 // supported size.
207
208 // Check for YUV output sizes
209 if (IsMaxSupportedSizeGreaterThanOrEqual(yuv_output_sizes, s1440pStreamSize) &&
210 (yuv_output_sizes.find(s1440pStreamSize) == yuv_output_sizes.end() ||
211 yuv_output_sizes.find(s720pStreamSize) == yuv_output_sizes.end())) {
212 ALOGW("%s: 1440p+720p YUV outputs not found for camera id %u", __FUNCTION__,
213 camera_id);
214 return false;
215 } else if (IsMaxSupportedSizeGreaterThanOrEqual(yuv_output_sizes,
216 s720pStreamSize) &&
217 yuv_output_sizes.find(s720pStreamSize) == yuv_output_sizes.end()) {
218 ALOGW("%s: 720p YUV output not found for camera id %u", __FUNCTION__,
219 camera_id);
220 return false;
221 }
222
223 // Check for PRIV output sizes
224 if (IsMaxSupportedSizeGreaterThanOrEqual(priv_output_sizes, s1440pStreamSize) &&
225 (priv_output_sizes.find(s1440pStreamSize) == priv_output_sizes.end() ||
226 priv_output_sizes.find(s720pStreamSize) == priv_output_sizes.end())) {
227 ALOGW("%s: 1440p + 720p PRIV outputs not found for camera id %u",
228 __FUNCTION__, camera_id);
229 return false;
230 } else if (IsMaxSupportedSizeGreaterThanOrEqual(priv_output_sizes,
231 s720pStreamSize) &&
232 priv_output_sizes.find(s720pStreamSize) == priv_output_sizes.end()) {
233 ALOGW("%s: 720p PRIV output not found for camera id %u", __FUNCTION__,
234 camera_id);
235 return false;
236 }
237
238 // Check for BLOB output sizes
239 if (IsMaxSupportedSizeGreaterThanOrEqual(blob_output_sizes, s1440pStreamSize) &&
240 (blob_output_sizes.find(s1440pStreamSize) == blob_output_sizes.end() ||
241 blob_output_sizes.find(s720pStreamSize) == blob_output_sizes.end())) {
242 ALOGW("%s: 1440p + 720p BLOB outputs not found for camera id %u",
243 __FUNCTION__, camera_id);
244 return false;
245 } else if (IsMaxSupportedSizeGreaterThanOrEqual(blob_output_sizes,
246 s720pStreamSize) &&
247 blob_output_sizes.find(s720pStreamSize) == blob_output_sizes.end()) {
248 ALOGW("%s: 720p BLOB output not found for camera id %u", __FUNCTION__,
249 camera_id);
250 return false;
251 }
252
253 return true;
254 }
255
GetConcurrentStreamingCameraIds(std::vector<std::unordered_set<uint32_t>> * combinations)256 status_t EmulatedCameraProviderHwlImpl::GetConcurrentStreamingCameraIds(
257 std::vector<std::unordered_set<uint32_t>>* combinations) {
258 if (combinations == nullptr) {
259 return BAD_VALUE;
260 }
261 // Collect all camera ids that support the guaranteed stream combinations
262 // (720p YUV and IMPLEMENTATION_DEFINED) and put them in one set. We don't
263 // make all possible combinations since it should be possible to stream all
264 // of them at once in the emulated camera.
265 std::unordered_set<uint32_t> candidate_ids;
266 for (auto& entry : camera_id_map_) {
267 if (SupportsMandatoryConcurrentStreams(entry.first)) {
268 candidate_ids.insert(entry.first);
269 }
270 }
271 combinations->emplace_back(std::move(candidate_ids));
272 return OK;
273 }
274
IsConcurrentStreamCombinationSupported(const std::vector<CameraIdAndStreamConfiguration> & configs,bool * is_supported)275 status_t EmulatedCameraProviderHwlImpl::IsConcurrentStreamCombinationSupported(
276 const std::vector<CameraIdAndStreamConfiguration>& configs,
277 bool* is_supported) {
278 *is_supported = false;
279
280 // Go through the given camera ids, get their sensor characteristics, stream
281 // config maps and call EmulatedSensor::IsStreamCombinationSupported()
282 for (auto& config : configs) {
283 // TODO: Consider caching sensor characteristics and StreamConfigurationMap
284 if (camera_id_map_.find(config.camera_id) == camera_id_map_.end()) {
285 ALOGE("%s: Camera id %u does not exist", __FUNCTION__, config.camera_id);
286 return BAD_VALUE;
287 }
288 auto stream_configuration_map = std::make_unique<StreamConfigurationMap>(
289 *(static_metadata_[config.camera_id]));
290 SensorCharacteristics sensor_chars;
291 status_t ret = GetSensorCharacteristics(
292 (static_metadata_[config.camera_id]).get(), &sensor_chars);
293 if (ret != OK) {
294 ALOGE("%s: Unable to extract sensor chars for camera id %u", __FUNCTION__,
295 config.camera_id);
296 return UNKNOWN_ERROR;
297 }
298 if (!EmulatedSensor::IsStreamCombinationSupported(
299 config.stream_configuration, *stream_configuration_map,
300 sensor_chars)) {
301 return OK;
302 }
303 }
304
305 *is_supported = true;
306 return OK;
307 }
308
IsDigit(const std::string & value)309 bool IsDigit(const std::string& value) {
310 if (value.empty()) {
311 return false;
312 }
313
314 for (const auto& c : value) {
315 if (!std::isdigit(c) && (!std::ispunct(c))) {
316 return false;
317 }
318 }
319
320 return true;
321 }
322
323 template <typename T>
GetEnumValue(uint32_t tag_id,const char * cstring,T * result)324 status_t GetEnumValue(uint32_t tag_id, const char* cstring, T* result /*out*/) {
325 if ((result == nullptr) || (cstring == nullptr)) {
326 return BAD_VALUE;
327 }
328
329 uint32_t enum_value;
330 auto ret =
331 camera_metadata_enum_value(tag_id, cstring, strlen(cstring), &enum_value);
332 if (ret != OK) {
333 ALOGE("%s: Failed to match tag id: 0x%x value: %s", __FUNCTION__, tag_id,
334 cstring);
335 return ret;
336 }
337 *result = enum_value;
338
339 return OK;
340 }
341
GetUInt8Value(const Json::Value & value,uint32_t tag_id,uint8_t * result)342 status_t GetUInt8Value(const Json::Value& value, uint32_t tag_id,
343 uint8_t* result /*out*/) {
344 if (result == nullptr) {
345 return BAD_VALUE;
346 }
347
348 if (value.isString()) {
349 errno = 0;
350 if (IsDigit(value.asString())) {
351 auto int_value = strtol(value.asCString(), nullptr, 10);
352 if ((int_value >= 0) && (int_value <= UINT8_MAX) && (errno == 0)) {
353 *result = int_value;
354 } else {
355 ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
356 return BAD_VALUE;
357 }
358 } else {
359 return GetEnumValue(tag_id, value.asCString(), result);
360 }
361 } else {
362 ALOGE(
363 "%s: Unexpected json type: %d! All value types are expected to be "
364 "strings!",
365 __FUNCTION__, value.type());
366 return BAD_VALUE;
367 }
368
369 return OK;
370 }
371
GetInt32Value(const Json::Value & value,uint32_t tag_id,int32_t * result)372 status_t GetInt32Value(const Json::Value& value, uint32_t tag_id,
373 int32_t* result /*out*/) {
374 if (result == nullptr) {
375 return BAD_VALUE;
376 }
377
378 if (value.isString()) {
379 errno = 0;
380 if (IsDigit(value.asString())) {
381 auto int_value = strtol(value.asCString(), nullptr, 10);
382 if ((int_value >= INT32_MIN) && (int_value <= INT32_MAX) && (errno == 0)) {
383 *result = int_value;
384 } else {
385 ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
386 return BAD_VALUE;
387 }
388 } else {
389 return GetEnumValue(tag_id, value.asCString(), result);
390 }
391 } else {
392 ALOGE(
393 "%s: Unexpected json type: %d! All value types are expected to be "
394 "strings!",
395 __FUNCTION__, value.type());
396 return BAD_VALUE;
397 }
398
399 return OK;
400 }
401
GetInt64Value(const Json::Value & value,uint32_t tag_id,int64_t * result)402 status_t GetInt64Value(const Json::Value& value, uint32_t tag_id,
403 int64_t* result /*out*/) {
404 if (result == nullptr) {
405 return BAD_VALUE;
406 }
407
408 if (value.isString()) {
409 errno = 0;
410 auto int_value = strtoll(value.asCString(), nullptr, 10);
411 if ((int_value >= INT64_MIN) && (int_value <= INT64_MAX) && (errno == 0)) {
412 *result = int_value;
413 } else {
414 ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
415 return BAD_VALUE;
416 }
417 } else {
418 ALOGE(
419 "%s: Unexpected json type: %d! All value types are expected to be "
420 "strings!",
421 __FUNCTION__, value.type());
422 return BAD_VALUE;
423 }
424
425 return OK;
426 }
427
GetFloatValue(const Json::Value & value,uint32_t tag_id,float * result)428 status_t GetFloatValue(const Json::Value& value, uint32_t tag_id,
429 float* result /*out*/) {
430 if (result == nullptr) {
431 return BAD_VALUE;
432 }
433
434 if (value.isString()) {
435 errno = 0;
436 auto float_value = strtof(value.asCString(), nullptr);
437 if (errno == 0) {
438 *result = float_value;
439 } else {
440 ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
441 return BAD_VALUE;
442 }
443 } else {
444 ALOGE(
445 "%s: Unexpected json type: %d! All value types are expected to be "
446 "strings!",
447 __FUNCTION__, value.type());
448 return BAD_VALUE;
449 }
450
451 return OK;
452 }
453
GetDoubleValue(const Json::Value & value,uint32_t tag_id,double * result)454 status_t GetDoubleValue(const Json::Value& value, uint32_t tag_id,
455 double* result /*out*/) {
456 if (result == nullptr) {
457 return BAD_VALUE;
458 }
459
460 if (value.isString()) {
461 errno = 0;
462 auto double_value = strtod(value.asCString(), nullptr);
463 if (errno == 0) {
464 *result = double_value;
465 } else {
466 ALOGE("%s: Failed parsing tag id 0x%x", __func__, tag_id);
467 return BAD_VALUE;
468 }
469 } else {
470 ALOGE(
471 "%s: Unexpected json type: %d! All value types are expected to be "
472 "strings!",
473 __FUNCTION__, value.type());
474 return BAD_VALUE;
475 }
476
477 return OK;
478 }
479
480 template <typename T>
FilterVendorKeys(uint32_t tag_id,std::vector<T> * values)481 void FilterVendorKeys(uint32_t tag_id, std::vector<T>* values) {
482 if ((values == nullptr) || (values->empty())) {
483 return;
484 }
485
486 switch (tag_id) {
487 case ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS:
488 case ANDROID_REQUEST_AVAILABLE_RESULT_KEYS:
489 case ANDROID_REQUEST_AVAILABLE_SESSION_KEYS:
490 case ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: {
491 auto it = values->begin();
492 while (it != values->end()) {
493 // Per spec. the tags we are handling here will be "int32_t".
494 // In this case all vendor defined values will be negative.
495 if (*it < 0) {
496 it = values->erase(it);
497 } else {
498 it++;
499 }
500 }
501 } break;
502 default:
503 // no-op
504 break;
505 }
506 }
507
508 template <typename T, typename func_type>
InsertTag(const Json::Value & json_value,uint32_t tag_id,func_type get_val_func,HalCameraMetadata * meta)509 status_t InsertTag(const Json::Value& json_value, uint32_t tag_id,
510 func_type get_val_func, HalCameraMetadata* meta /*out*/) {
511 if (meta == nullptr) {
512 return BAD_VALUE;
513 }
514
515 std::vector<T> values;
516 T result;
517 status_t ret = -1;
518 values.reserve(json_value.size());
519 for (const auto& val : json_value) {
520 ret = get_val_func(val, tag_id, &result);
521 if (ret != OK) {
522 break;
523 }
524
525 values.push_back(result);
526 }
527
528 if (ret == OK) {
529 FilterVendorKeys(tag_id, &values);
530 ret = meta->Set(tag_id, values.data(), values.size());
531 }
532
533 return ret;
534 }
535
InsertRationalTag(const Json::Value & json_value,uint32_t tag_id,HalCameraMetadata * meta)536 status_t InsertRationalTag(const Json::Value& json_value, uint32_t tag_id,
537 HalCameraMetadata* meta /*out*/) {
538 if (meta == nullptr) {
539 return BAD_VALUE;
540 }
541
542 std::vector<camera_metadata_rational_t> values;
543 status_t ret = OK;
544 if (json_value.isArray() && ((json_value.size() % 2) == 0)) {
545 values.reserve(json_value.size() / 2);
546 auto it = json_value.begin();
547 while (it != json_value.end()) {
548 camera_metadata_rational_t result;
549 ret = GetInt32Value((*it), tag_id, &result.numerator);
550 it++;
551 ret |= GetInt32Value((*it), tag_id, &result.denominator);
552 it++;
553 if (ret != OK) {
554 break;
555 }
556 values.push_back(result);
557 }
558 } else {
559 ALOGE("%s: json type: %d doesn't match with rational tag type",
560 __FUNCTION__, json_value.type());
561 return BAD_VALUE;
562 }
563
564 if (ret == OK) {
565 ret = meta->Set(tag_id, values.data(), values.size());
566 }
567
568 return ret;
569 }
570
ParseCharacteristics(const Json::Value & value,ssize_t id)571 uint32_t EmulatedCameraProviderHwlImpl::ParseCharacteristics(
572 const Json::Value& value, ssize_t id) {
573 if (!value.isObject()) {
574 ALOGE("%s: Configuration root is not an object", __FUNCTION__);
575 return BAD_VALUE;
576 }
577
578 auto static_meta = HalCameraMetadata::Create(1, 10);
579 auto members = value.getMemberNames();
580 for (const auto& member : members) {
581 uint32_t tag_id;
582 auto stat = GetTagFromName(member.c_str(), &tag_id);
583 if (stat != OK) {
584 ALOGE("%s: tag %s not supported, skipping!", __func__, member.c_str());
585 continue;
586 }
587
588 auto tag_type = get_camera_metadata_tag_type(tag_id);
589 auto tag_value = value[member.c_str()];
590 switch (tag_type) {
591 case TYPE_BYTE:
592 InsertTag<uint8_t>(tag_value, tag_id, GetUInt8Value, static_meta.get());
593 break;
594 case TYPE_INT32:
595 InsertTag<int32_t>(tag_value, tag_id, GetInt32Value, static_meta.get());
596 break;
597 case TYPE_INT64:
598 InsertTag<int64_t>(tag_value, tag_id, GetInt64Value, static_meta.get());
599 break;
600 case TYPE_FLOAT:
601 InsertTag<float>(tag_value, tag_id, GetFloatValue, static_meta.get());
602 break;
603 case TYPE_DOUBLE:
604 InsertTag<double>(tag_value, tag_id, GetDoubleValue, static_meta.get());
605 break;
606 case TYPE_RATIONAL:
607 InsertRationalTag(tag_value, tag_id, static_meta.get());
608 break;
609 default:
610 ALOGE("%s: Unsupported tag type: %d!", __FUNCTION__, tag_type);
611 }
612 }
613
614 SensorCharacteristics sensor_characteristics;
615 auto ret =
616 GetSensorCharacteristics(static_meta.get(), &sensor_characteristics);
617 if (ret != OK) {
618 ALOGE("%s: Unable to extract sensor characteristics!", __FUNCTION__);
619 return ret;
620 }
621
622 if (!EmulatedSensor::AreCharacteristicsSupported(sensor_characteristics)) {
623 ALOGE("%s: Sensor characteristics not supported!", __FUNCTION__);
624 return BAD_VALUE;
625 }
626
627 // Although we don't support HdrPlus, this data is still required by HWL
628 int32_t payload_frames = 0;
629 static_meta->Set(google_camera_hal::kHdrplusPayloadFrames, &payload_frames, 1);
630
631 if (id < 0) {
632 static_metadata_.push_back(std::move(static_meta));
633 id = static_metadata_.size() - 1;
634 } else {
635 static_metadata_[id] = std::move(static_meta);
636 }
637
638 return id;
639 }
640
WaitForQemuSfFakeCameraPropertyAvailable()641 status_t EmulatedCameraProviderHwlImpl::WaitForQemuSfFakeCameraPropertyAvailable() {
642 // Camera service may start running before qemu-props sets
643 // qemu.sf.fake_camera to any of the follwing four values:
644 // "none,front,back,both"; so we need to wait.
645 int num_attempts = 100;
646 char prop[PROPERTY_VALUE_MAX];
647 bool timeout = true;
648 for (int i = 0; i < num_attempts; ++i) {
649 if (property_get("qemu.sf.fake_camera", prop, nullptr) != 0) {
650 timeout = false;
651 break;
652 }
653 usleep(5000);
654 }
655 if (timeout) {
656 ALOGE("timeout (%dms) waiting for property qemu.sf.fake_camera to be set\n",
657 5 * num_attempts);
658 return BAD_VALUE;
659 }
660 return OK;
661 }
662
Initialize()663 status_t EmulatedCameraProviderHwlImpl::Initialize() {
664 // GCH expects all physical ids to be bigger than the logical ones.
665 // Resize 'static_metadata_' to fit all logical devices and insert them
666 // accordingly, push any remaining physical cameras in the back.
667 std::string config;
668 size_t logical_id = 0;
669 std::vector<const char*> configurationFileLocation;
670 char prop[PROPERTY_VALUE_MAX];
671 if (!property_get_bool("ro.kernel.qemu", false)) {
672 for (const auto& iter : kConfigurationFileLocation) {
673 configurationFileLocation.emplace_back(iter);
674 }
675 } else {
676 // Android Studio Emulator
677 if (!property_get_bool("ro.kernel.qemu.legacy_fake_camera", false)) {
678 if (WaitForQemuSfFakeCameraPropertyAvailable() == OK) {
679 property_get("qemu.sf.fake_camera", prop, nullptr);
680 if (strcmp(prop, "both") == 0) {
681 configurationFileLocation.emplace_back(kConfigurationFileLocation[0]);
682 configurationFileLocation.emplace_back(kConfigurationFileLocation[1]);
683 } else if (strcmp(prop, "front") == 0) {
684 configurationFileLocation.emplace_back(kConfigurationFileLocation[1]);
685 logical_id = 1;
686 } else if (strcmp(prop, "back") == 0) {
687 configurationFileLocation.emplace_back(kConfigurationFileLocation[0]);
688 logical_id = 1;
689 }
690 }
691 }
692 }
693 static_metadata_.resize(sizeof(configurationFileLocation));
694
695 for (const auto& config_path : configurationFileLocation) {
696 if (!android::base::ReadFileToString(config_path, &config)) {
697 ALOGW("%s: Could not open configuration file: %s", __FUNCTION__,
698 config_path);
699 continue;
700 }
701
702 Json::Reader config_reader;
703 Json::Value root;
704 if (!config_reader.parse(config, root)) {
705 ALOGE("Could not parse configuration file: %s",
706 config_reader.getFormattedErrorMessages().c_str());
707 return BAD_VALUE;
708 }
709
710 if (root.isArray()) {
711 auto device_iter = root.begin();
712 auto result_id = ParseCharacteristics(*device_iter, logical_id);
713 if (logical_id != result_id) {
714 return result_id;
715 }
716 device_iter++;
717
718 // The first device entry is always the logical camera followed by the
719 // physical devices. They must be at least 2.
720 camera_id_map_.emplace(logical_id, std::vector<std::pair<CameraDeviceStatus, uint32_t>>());
721 if (root.size() >= 3) {
722 camera_id_map_[logical_id].reserve(root.size() - 1);
723 size_t current_physical_device = 0;
724 while (device_iter != root.end()) {
725 auto physical_id = ParseCharacteristics(*device_iter, /*id*/ -1);
726 if (physical_id < 0) {
727 return physical_id;
728 }
729 // Only notify unavailable physical camera if there are more than 2
730 // physical cameras backing the logical camera
731 auto device_status = (current_physical_device < 2) ? CameraDeviceStatus::kPresent :
732 CameraDeviceStatus::kNotPresent;
733 camera_id_map_[logical_id].push_back(std::make_pair(device_status, physical_id));
734 device_iter++; current_physical_device++;
735 }
736
737 auto physical_devices = std::make_unique<PhysicalDeviceMap>();
738 for (const auto& physical_device : camera_id_map_[logical_id]) {
739 physical_devices->emplace(
740 physical_device.second, std::make_pair(physical_device.first,
741 HalCameraMetadata::Clone(
742 static_metadata_[physical_device.second].get())));
743 }
744 auto updated_logical_chars =
745 EmulatedLogicalRequestState::AdaptLogicalCharacteristics(
746 HalCameraMetadata::Clone(static_metadata_[logical_id].get()),
747 std::move(physical_devices));
748 if (updated_logical_chars.get() != nullptr) {
749 static_metadata_[logical_id].swap(updated_logical_chars);
750 } else {
751 ALOGE("%s: Failed to updating logical camera characteristics!",
752 __FUNCTION__);
753 return BAD_VALUE;
754 }
755 }
756 } else {
757 auto result_id = ParseCharacteristics(root, logical_id);
758 if (result_id != logical_id) {
759 return result_id;
760 }
761 camera_id_map_.emplace(logical_id, std::vector<std::pair<CameraDeviceStatus, uint32_t>>());
762 }
763
764 logical_id++;
765 }
766
767 return OK;
768 }
769
SetCallback(const HwlCameraProviderCallback & callback)770 status_t EmulatedCameraProviderHwlImpl::SetCallback(
771 const HwlCameraProviderCallback& callback) {
772 torch_cb_ = callback.torch_mode_status_change;
773 physical_camera_status_cb_ = callback.physical_camera_device_status_change;
774
775 return OK;
776 }
777
TriggerDeferredCallbacks()778 status_t EmulatedCameraProviderHwlImpl::TriggerDeferredCallbacks() {
779 std::lock_guard<std::mutex> lock(status_callback_future_lock_);
780 if (status_callback_future_.valid()) {
781 return OK;
782 }
783
784 status_callback_future_ = std::async(
785 std::launch::async,
786 &EmulatedCameraProviderHwlImpl::NotifyPhysicalCameraUnavailable, this);
787 return OK;
788 }
789
WaitForStatusCallbackFuture()790 void EmulatedCameraProviderHwlImpl::WaitForStatusCallbackFuture() {
791 {
792 std::lock_guard<std::mutex> lock(status_callback_future_lock_);
793 if (!status_callback_future_.valid()) {
794 // If there is no future pending, construct a dummy one.
795 status_callback_future_ = std::async([]() { return; });
796 }
797 }
798 status_callback_future_.wait();
799 }
800
NotifyPhysicalCameraUnavailable()801 void EmulatedCameraProviderHwlImpl::NotifyPhysicalCameraUnavailable() {
802 for (const auto& one_map : camera_id_map_) {
803 for (const auto& physical_device : one_map.second) {
804 if (physical_device.first != CameraDeviceStatus::kNotPresent) {
805 continue;
806 }
807
808 uint32_t logical_camera_id = one_map.first;
809 uint32_t physical_camera_id = physical_device.second;
810 physical_camera_status_cb_(
811 logical_camera_id, physical_camera_id,
812 CameraDeviceStatus::kNotPresent);
813 }
814 }
815 }
816
GetVendorTags(std::vector<VendorTagSection> * vendor_tag_sections)817 status_t EmulatedCameraProviderHwlImpl::GetVendorTags(
818 std::vector<VendorTagSection>* vendor_tag_sections) {
819 if (vendor_tag_sections == nullptr) {
820 ALOGE("%s: vendor_tag_sections is nullptr.", __FUNCTION__);
821 return BAD_VALUE;
822 }
823
824 // No vendor specific tags as of now
825 return OK;
826 }
827
GetVisibleCameraIds(std::vector<std::uint32_t> * camera_ids)828 status_t EmulatedCameraProviderHwlImpl::GetVisibleCameraIds(
829 std::vector<std::uint32_t>* camera_ids) {
830 if (camera_ids == nullptr) {
831 ALOGE("%s: camera_ids is nullptr.", __FUNCTION__);
832 return BAD_VALUE;
833 }
834
835 for (const auto& device : camera_id_map_) {
836 camera_ids->push_back(device.first);
837 }
838
839 return OK;
840 }
841
CreateCameraDeviceHwl(uint32_t camera_id,std::unique_ptr<CameraDeviceHwl> * camera_device_hwl)842 status_t EmulatedCameraProviderHwlImpl::CreateCameraDeviceHwl(
843 uint32_t camera_id, std::unique_ptr<CameraDeviceHwl>* camera_device_hwl) {
844 if (camera_device_hwl == nullptr) {
845 ALOGE("%s: camera_device_hwl is nullptr.", __FUNCTION__);
846 return BAD_VALUE;
847 }
848
849 if (camera_id_map_.find(camera_id) == camera_id_map_.end()) {
850 ALOGE("%s: Invalid camera id: %u", __func__, camera_id);
851 return BAD_VALUE;
852 }
853
854 std::unique_ptr<HalCameraMetadata> meta =
855 HalCameraMetadata::Clone(static_metadata_[camera_id].get());
856
857 std::shared_ptr<EmulatedTorchState> torch_state;
858 camera_metadata_ro_entry entry;
859 bool flash_supported = false;
860 auto ret = meta->Get(ANDROID_FLASH_INFO_AVAILABLE, &entry);
861 if ((ret == OK) && (entry.count == 1)) {
862 if (entry.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_TRUE) {
863 flash_supported = true;
864 }
865 }
866
867 if (flash_supported) {
868 torch_state = std::make_shared<EmulatedTorchState>(camera_id, torch_cb_);
869 }
870
871 auto physical_devices = std::make_unique<PhysicalDeviceMap>();
872 for (const auto& physical_device : camera_id_map_[camera_id]) {
873 physical_devices->emplace(
874 physical_device.second, std::make_pair(physical_device.first,
875 HalCameraMetadata::Clone(static_metadata_[physical_device.second].get())));
876 }
877 *camera_device_hwl = EmulatedCameraDeviceHwlImpl::Create(
878 camera_id, std::move(meta), std::move(physical_devices), torch_state);
879 if (*camera_device_hwl == nullptr) {
880 ALOGE("%s: Cannot create EmulatedCameraDeviceHWlImpl.", __FUNCTION__);
881 return BAD_VALUE;
882 }
883
884 return OK;
885 }
886
CreateBufferAllocatorHwl(std::unique_ptr<CameraBufferAllocatorHwl> * camera_buffer_allocator_hwl)887 status_t EmulatedCameraProviderHwlImpl::CreateBufferAllocatorHwl(
888 std::unique_ptr<CameraBufferAllocatorHwl>* camera_buffer_allocator_hwl) {
889 if (camera_buffer_allocator_hwl == nullptr) {
890 ALOGE("%s: camera_buffer_allocator_hwl is nullptr.", __FUNCTION__);
891 return BAD_VALUE;
892 }
893
894 // Currently not supported
895 return INVALID_OPERATION;
896 }
897 } // namespace android
898