1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "EmulatedRequestProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19
20 #include "EmulatedRequestProcessor.h"
21
22 #include <HandleImporter.h>
23 #include <hardware/gralloc.h>
24 #include <log/log.h>
25 #include <sync/sync.h>
26 #include <utils/Timers.h>
27 #include <utils/Trace.h>
28
29 namespace android {
30
31 using android::hardware::camera::common::V1_0::helper::HandleImporter;
32 using google_camera_hal::ErrorCode;
33 using google_camera_hal::HwlPipelineResult;
34 using google_camera_hal::MessageType;
35 using google_camera_hal::NotifyMessage;
36
EmulatedRequestProcessor(uint32_t camera_id,sp<EmulatedSensor> sensor)37 EmulatedRequestProcessor::EmulatedRequestProcessor(uint32_t camera_id,
38 sp<EmulatedSensor> sensor)
39 : camera_id_(camera_id),
40 sensor_(sensor),
41 request_state_(std::make_unique<EmulatedLogicalRequestState>(camera_id)) {
42 ATRACE_CALL();
43 request_thread_ = std::thread([this] { this->RequestProcessorLoop(); });
44 }
45
~EmulatedRequestProcessor()46 EmulatedRequestProcessor::~EmulatedRequestProcessor() {
47 ATRACE_CALL();
48 processor_done_ = true;
49 request_thread_.join();
50
51 auto ret = sensor_->ShutDown();
52 if (ret != OK) {
53 ALOGE("%s: Failed during sensor shutdown %s (%d)", __FUNCTION__,
54 strerror(-ret), ret);
55 }
56 }
57
ProcessPipelineRequests(uint32_t frame_number,const std::vector<HwlPipelineRequest> & requests,const std::vector<EmulatedPipeline> & pipelines)58 status_t EmulatedRequestProcessor::ProcessPipelineRequests(
59 uint32_t frame_number, const std::vector<HwlPipelineRequest>& requests,
60 const std::vector<EmulatedPipeline>& pipelines) {
61 ATRACE_CALL();
62
63 std::unique_lock<std::mutex> lock(process_mutex_);
64
65 for (const auto& request : requests) {
66 if (request.pipeline_id >= pipelines.size()) {
67 ALOGE("%s: Pipeline request with invalid pipeline id: %u", __FUNCTION__,
68 request.pipeline_id);
69 return BAD_VALUE;
70 }
71
72 while (pending_requests_.size() > EmulatedSensor::kPipelineDepth) {
73 auto result = request_condition_.wait_for(
74 lock, std::chrono::nanoseconds(
75 EmulatedSensor::kSupportedFrameDurationRange[1]));
76 if (result == std::cv_status::timeout) {
77 ALOGE("%s Timed out waiting for a pending request slot", __FUNCTION__);
78 return TIMED_OUT;
79 }
80 }
81
82 auto output_buffers = CreateSensorBuffers(
83 frame_number, request.output_buffers,
84 pipelines[request.pipeline_id].streams, request.pipeline_id,
85 pipelines[request.pipeline_id].cb);
86 auto input_buffers = CreateSensorBuffers(
87 frame_number, request.input_buffers,
88 pipelines[request.pipeline_id].streams, request.pipeline_id,
89 pipelines[request.pipeline_id].cb);
90
91 pending_requests_.push(
92 {.settings = HalCameraMetadata::Clone(request.settings.get()),
93 .input_buffers = std::move(input_buffers),
94 .output_buffers = std::move(output_buffers)});
95 }
96
97 return OK;
98 }
99
CreateSensorBuffers(uint32_t frame_number,const std::vector<StreamBuffer> & buffers,const std::unordered_map<uint32_t,EmulatedStream> & streams,uint32_t pipeline_id,HwlPipelineCallback cb)100 std::unique_ptr<Buffers> EmulatedRequestProcessor::CreateSensorBuffers(
101 uint32_t frame_number, const std::vector<StreamBuffer>& buffers,
102 const std::unordered_map<uint32_t, EmulatedStream>& streams,
103 uint32_t pipeline_id, HwlPipelineCallback cb) {
104 if (buffers.empty()) {
105 return nullptr;
106 }
107
108 auto sensor_buffers = std::make_unique<Buffers>();
109 sensor_buffers->reserve(buffers.size());
110 for (const auto& buffer : buffers) {
111 auto sensor_buffer = CreateSensorBuffer(
112 frame_number, streams.at(buffer.stream_id), pipeline_id, cb, buffer);
113 if (sensor_buffer.get() != nullptr) {
114 sensor_buffers->push_back(std::move(sensor_buffer));
115 }
116 }
117
118 return sensor_buffers;
119 }
120
NotifyFailedRequest(const PendingRequest & request)121 void EmulatedRequestProcessor::NotifyFailedRequest(const PendingRequest& request) {
122 if (request.output_buffers->at(0)->callback.notify != nullptr) {
123 // Mark all output buffers for this request in order not to send
124 // ERROR_BUFFER for them.
125 for (auto& output_buffer : *(request.output_buffers)) {
126 output_buffer->is_failed_request = true;
127 }
128
129 auto output_buffer = std::move(request.output_buffers->at(0));
130 NotifyMessage msg = {
131 .type = MessageType::kError,
132 .message.error = {.frame_number = output_buffer->frame_number,
133 .error_stream_id = -1,
134 .error_code = ErrorCode::kErrorRequest}};
135 output_buffer->callback.notify(output_buffer->pipeline_id, msg);
136 }
137 }
138
Flush()139 status_t EmulatedRequestProcessor::Flush() {
140 std::lock_guard<std::mutex> lock(process_mutex_);
141 // First flush in-flight requests
142 auto ret = sensor_->Flush();
143
144 // Then the rest of the pending requests
145 while (!pending_requests_.empty()) {
146 const auto& request = pending_requests_.front();
147 NotifyFailedRequest(request);
148 pending_requests_.pop();
149 }
150
151 return ret;
152 }
153
GetBufferSizeAndStride(const EmulatedStream & stream,uint32_t * size,uint32_t * stride)154 status_t EmulatedRequestProcessor::GetBufferSizeAndStride(
155 const EmulatedStream& stream, uint32_t* size /*out*/,
156 uint32_t* stride /*out*/) {
157 if (size == nullptr) {
158 return BAD_VALUE;
159 }
160
161 switch (stream.override_format) {
162 case HAL_PIXEL_FORMAT_RGB_888:
163 *stride = stream.width * 3;
164 *size = (*stride) * stream.height;
165 break;
166 case HAL_PIXEL_FORMAT_RGBA_8888:
167 *stride = stream.width * 4;
168 ;
169 *size = (*stride) * stream.height;
170 break;
171 case HAL_PIXEL_FORMAT_Y16:
172 if (stream.override_data_space == HAL_DATASPACE_DEPTH) {
173 *stride = AlignTo(AlignTo(stream.width, 2) * 2, 16);
174 *size = (*stride) * AlignTo(stream.height, 2);
175 } else {
176 return BAD_VALUE;
177 }
178 break;
179 case HAL_PIXEL_FORMAT_BLOB:
180 if (stream.override_data_space == HAL_DATASPACE_V0_JFIF) {
181 *size = stream.buffer_size;
182 *stride = *size;
183 } else {
184 return BAD_VALUE;
185 }
186 break;
187 case HAL_PIXEL_FORMAT_RAW16:
188 *stride = stream.width * 2;
189 *size = (*stride) * stream.height;
190 break;
191 default:
192 return BAD_VALUE;
193 }
194
195 return OK;
196 }
197
LockSensorBuffer(const EmulatedStream & stream,HandleImporter & importer,buffer_handle_t buffer,SensorBuffer * sensor_buffer)198 status_t EmulatedRequestProcessor::LockSensorBuffer(
199 const EmulatedStream& stream, HandleImporter& importer /*in*/,
200 buffer_handle_t buffer, SensorBuffer* sensor_buffer /*out*/) {
201 if (sensor_buffer == nullptr) {
202 return BAD_VALUE;
203 }
204
205 auto width = static_cast<int32_t>(stream.width);
206 auto height = static_cast<int32_t>(stream.height);
207 auto usage = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN;
208 if (stream.override_format == HAL_PIXEL_FORMAT_YCBCR_420_888) {
209 IMapper::Rect map_rect = {0, 0, width, height};
210 auto yuv_layout = importer.lockYCbCr(buffer, usage, map_rect);
211 if ((yuv_layout.y != nullptr) && (yuv_layout.cb != nullptr) &&
212 (yuv_layout.cr != nullptr)) {
213 sensor_buffer->plane.img_y_crcb.img_y =
214 static_cast<uint8_t*>(yuv_layout.y);
215 sensor_buffer->plane.img_y_crcb.img_cb =
216 static_cast<uint8_t*>(yuv_layout.cb);
217 sensor_buffer->plane.img_y_crcb.img_cr =
218 static_cast<uint8_t*>(yuv_layout.cr);
219 sensor_buffer->plane.img_y_crcb.y_stride = yuv_layout.yStride;
220 sensor_buffer->plane.img_y_crcb.cbcr_stride = yuv_layout.cStride;
221 sensor_buffer->plane.img_y_crcb.cbcr_step = yuv_layout.chromaStep;
222 if ((yuv_layout.chromaStep == 2) &&
223 std::abs(sensor_buffer->plane.img_y_crcb.img_cb -
224 sensor_buffer->plane.img_y_crcb.img_cr) != 1) {
225 ALOGE("%s: Unsupported YUV layout, chroma step: %u U/V plane delta: %u",
226 __FUNCTION__, yuv_layout.chromaStep,
227 static_cast<unsigned>(
228 std::abs(sensor_buffer->plane.img_y_crcb.img_cb -
229 sensor_buffer->plane.img_y_crcb.img_cr)));
230 return BAD_VALUE;
231 }
232 } else {
233 ALOGE("%s: Failed to lock output buffer!", __FUNCTION__);
234 return BAD_VALUE;
235 }
236 } else {
237 uint32_t buffer_size = 0, stride = 0;
238 auto ret = GetBufferSizeAndStride(stream, &buffer_size, &stride);
239 if (ret != OK) {
240 ALOGE("%s: Unsupported pixel format: 0x%x", __FUNCTION__,
241 stream.override_format);
242 return BAD_VALUE;
243 }
244 if (stream.override_format == HAL_PIXEL_FORMAT_BLOB) {
245 sensor_buffer->plane.img.img =
246 static_cast<uint8_t*>(importer.lock(buffer, usage, buffer_size));
247 } else {
248 IMapper::Rect region{0, 0, width, height};
249 sensor_buffer->plane.img.img =
250 static_cast<uint8_t*>(importer.lock(buffer, usage, region));
251 }
252 if (sensor_buffer->plane.img.img == nullptr) {
253 ALOGE("%s: Failed to lock output buffer!", __FUNCTION__);
254 return BAD_VALUE;
255 }
256 sensor_buffer->plane.img.stride = stride;
257 sensor_buffer->plane.img.buffer_size = buffer_size;
258 }
259
260 return OK;
261 }
262
CreateSensorBuffer(uint32_t frame_number,const EmulatedStream & emulated_stream,uint32_t pipeline_id,HwlPipelineCallback callback,StreamBuffer stream_buffer)263 std::unique_ptr<SensorBuffer> EmulatedRequestProcessor::CreateSensorBuffer(
264 uint32_t frame_number, const EmulatedStream& emulated_stream,
265 uint32_t pipeline_id, HwlPipelineCallback callback,
266 StreamBuffer stream_buffer) {
267 auto buffer = std::make_unique<SensorBuffer>();
268
269 auto stream = emulated_stream;
270 // Make sure input stream formats are correctly mapped here
271 if (stream.is_input) {
272 stream.override_format =
273 EmulatedSensor::OverrideFormat(stream.override_format);
274 }
275 buffer->width = stream.width;
276 buffer->height = stream.height;
277 buffer->format = stream.override_format;
278 buffer->dataSpace = stream.override_data_space;
279 buffer->stream_buffer = stream_buffer;
280 buffer->pipeline_id = pipeline_id;
281 buffer->callback = callback;
282 buffer->frame_number = frame_number;
283 buffer->camera_id = emulated_stream.is_physical_camera_stream
284 ? emulated_stream.physical_camera_id
285 : camera_id_;
286 buffer->is_input = stream.is_input;
287 // In case buffer processing is successful, flip this flag accordingly
288 buffer->stream_buffer.status = BufferStatus::kError;
289
290 if (!buffer->importer.importBuffer(buffer->stream_buffer.buffer)) {
291 ALOGE("%s: Failed importing stream buffer!", __FUNCTION__);
292 buffer.release();
293 buffer = nullptr;
294 }
295
296 if (buffer.get() != nullptr) {
297 auto ret = LockSensorBuffer(stream, buffer->importer,
298 buffer->stream_buffer.buffer, buffer.get());
299 if (ret != OK) {
300 buffer.release();
301 buffer = nullptr;
302 }
303 }
304
305 if ((buffer.get() != nullptr) && (stream_buffer.acquire_fence != nullptr)) {
306 auto fence_status = buffer->importer.importFence(
307 stream_buffer.acquire_fence, buffer->acquire_fence_fd);
308 if (!fence_status) {
309 ALOGE("%s: Failed importing acquire fence!", __FUNCTION__);
310 buffer.release();
311 buffer = nullptr;
312 }
313 }
314
315 return buffer;
316 }
317
AcquireBuffers(Buffers * buffers)318 std::unique_ptr<Buffers> EmulatedRequestProcessor::AcquireBuffers(
319 Buffers* buffers) {
320 if ((buffers == nullptr) || (buffers->empty())) {
321 return nullptr;
322 }
323
324 auto acquired_buffers = std::make_unique<Buffers>();
325 acquired_buffers->reserve(buffers->size());
326 auto output_buffer = buffers->begin();
327 while (output_buffer != buffers->end()) {
328 status_t ret = OK;
329 if ((*output_buffer)->acquire_fence_fd >= 0) {
330 ret = sync_wait((*output_buffer)->acquire_fence_fd,
331 ns2ms(EmulatedSensor::kSupportedFrameDurationRange[1]));
332 if (ret != OK) {
333 ALOGE("%s: Fence sync failed: %s, (%d)", __FUNCTION__, strerror(-ret),
334 ret);
335 }
336 }
337
338 if (ret == OK) {
339 acquired_buffers->push_back(std::move(*output_buffer));
340 }
341
342 output_buffer = buffers->erase(output_buffer);
343 }
344
345 return acquired_buffers;
346 }
347
RequestProcessorLoop()348 void EmulatedRequestProcessor::RequestProcessorLoop() {
349 ATRACE_CALL();
350
351 bool vsync_status_ = true;
352 while (!processor_done_ && vsync_status_) {
353 {
354 std::lock_guard<std::mutex> lock(process_mutex_);
355 if (!pending_requests_.empty()) {
356 status_t ret;
357 const auto& request = pending_requests_.front();
358 auto frame_number = request.output_buffers->at(0)->frame_number;
359 auto notify_callback = request.output_buffers->at(0)->callback;
360 auto pipeline_id = request.output_buffers->at(0)->pipeline_id;
361
362 auto output_buffers = AcquireBuffers(request.output_buffers.get());
363 auto input_buffers = AcquireBuffers(request.input_buffers.get());
364 if (!output_buffers->empty()) {
365 std::unique_ptr<EmulatedSensor::LogicalCameraSettings> logical_settings =
366 std::make_unique<EmulatedSensor::LogicalCameraSettings>();
367
368 std::unique_ptr<std::set<uint32_t>> physical_camera_output_ids =
369 std::make_unique<std::set<uint32_t>>();
370 for (const auto& it : *output_buffers) {
371 if (it->camera_id != camera_id_) {
372 physical_camera_output_ids->emplace(it->camera_id);
373 }
374 }
375
376 // Repeating requests usually include valid settings only during the
377 // initial call. Afterwards an invalid settings pointer means that
378 // there are no changes in the parameters and Hal should re-use the
379 // last valid values.
380 // TODO: Add support for individual physical camera requests.
381 if (request.settings.get() != nullptr) {
382 ret = request_state_->InitializeLogicalSettings(
383 HalCameraMetadata::Clone(request.settings.get()),
384 std::move(physical_camera_output_ids), logical_settings.get());
385 last_settings_ = HalCameraMetadata::Clone(request.settings.get());
386 } else {
387 ret = request_state_->InitializeLogicalSettings(
388 HalCameraMetadata::Clone(last_settings_.get()),
389 std::move(physical_camera_output_ids), logical_settings.get());
390 }
391
392 if (ret == OK) {
393 auto result = request_state_->InitializeLogicalResult(pipeline_id,
394 frame_number);
395 sensor_->SetCurrentRequest(
396 std::move(logical_settings), std::move(result),
397 std::move(input_buffers), std::move(output_buffers));
398 } else {
399 NotifyMessage msg{.type = MessageType::kError,
400 .message.error = {
401 .frame_number = frame_number,
402 .error_stream_id = -1,
403 .error_code = ErrorCode::kErrorResult,
404 }};
405
406 notify_callback.notify(pipeline_id, msg);
407 }
408 } else {
409 // No further processing is needed, just fail the result which will
410 // complete this request.
411 NotifyMessage msg{.type = MessageType::kError,
412 .message.error = {
413 .frame_number = frame_number,
414 .error_stream_id = -1,
415 .error_code = ErrorCode::kErrorResult,
416 }};
417
418 notify_callback.notify(pipeline_id, msg);
419 }
420
421 pending_requests_.pop();
422 request_condition_.notify_one();
423 }
424 }
425
426 vsync_status_ =
427 sensor_->WaitForVSync(EmulatedSensor::kSupportedFrameDurationRange[1]);
428 }
429 }
430
Initialize(std::unique_ptr<HalCameraMetadata> static_meta,PhysicalDeviceMapPtr physical_devices)431 status_t EmulatedRequestProcessor::Initialize(
432 std::unique_ptr<HalCameraMetadata> static_meta,
433 PhysicalDeviceMapPtr physical_devices) {
434 std::lock_guard<std::mutex> lock(process_mutex_);
435 return request_state_->Initialize(std::move(static_meta),
436 std::move(physical_devices));
437 }
438
GetDefaultRequest(RequestTemplate type,std::unique_ptr<HalCameraMetadata> * default_settings)439 status_t EmulatedRequestProcessor::GetDefaultRequest(
440 RequestTemplate type, std::unique_ptr<HalCameraMetadata>* default_settings) {
441 std::lock_guard<std::mutex> lock(process_mutex_);
442 return request_state_->GetDefaultRequest(type, default_settings);
443 }
444
445 } // namespace android
446