summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/android/camera3_hal.cpp17
-rw-r--r--src/android/camera_buffer.h61
-rw-r--r--src/android/camera_capabilities.cpp1165
-rw-r--r--src/android/camera_capabilities.h66
-rw-r--r--src/android/camera_device.cpp2074
-rw-r--r--src/android/camera_device.h126
-rw-r--r--src/android/camera_hal_config.cpp407
-rw-r--r--src/android/camera_hal_config.h39
-rw-r--r--src/android/camera_hal_manager.cpp84
-rw-r--r--src/android/camera_hal_manager.h20
-rw-r--r--src/android/camera_metadata.cpp147
-rw-r--r--src/android/camera_metadata.h70
-rw-r--r--src/android/camera_ops.cpp8
-rw-r--r--src/android/camera_stream.cpp146
-rw-r--r--src/android/camera_stream.h147
-rw-r--r--src/android/camera_worker.cpp129
-rw-r--r--src/android/camera_worker.h71
-rw-r--r--src/android/cros/camera3_hal.cpp24
-rw-r--r--src/android/cros/meson.build13
-rw-r--r--src/android/data/soraka/camera_hal.yaml8
-rw-r--r--src/android/jpeg/encoder.h16
-rw-r--r--src/android/jpeg/encoder_libjpeg.cpp36
-rw-r--r--src/android/jpeg/encoder_libjpeg.h21
-rw-r--r--src/android/jpeg/exif.cpp315
-rw-r--r--src/android/jpeg/exif.h65
-rw-r--r--src/android/jpeg/post_processor_jpeg.cpp196
-rw-r--r--src/android/jpeg/post_processor_jpeg.h45
-rw-r--r--src/android/jpeg/thumbnailer.cpp93
-rw-r--r--src/android/jpeg/thumbnailer.h34
-rw-r--r--src/android/meson.build47
-rw-r--r--src/android/mm/cros_camera_buffer.cpp134
-rw-r--r--src/android/mm/generic_camera_buffer.cpp91
-rw-r--r--src/android/mm/meson.build9
-rw-r--r--src/android/post_processor.h32
-rw-r--r--src/android/yuv/post_processor_yuv.cpp143
-rw-r--r--src/android/yuv/post_processor_yuv.h42
-rw-r--r--src/cam/buffer_writer.h2
-rw-r--r--src/cam/capture.cpp68
-rw-r--r--src/cam/capture.h9
-rw-r--r--src/cam/event_loop.cpp67
-rw-r--r--src/cam/event_loop.h27
-rw-r--r--src/cam/main.cpp62
-rw-r--r--src/cam/main.h1
-rw-r--r--src/cam/meson.build15
-rw-r--r--src/cam/options.cpp17
-rw-r--r--src/cam/options.h2
-rw-r--r--src/cam/stream_options.cpp2
-rw-r--r--src/gstreamer/gstlibcamera.cpp2
-rw-r--r--src/gstreamer/gstlibcameraallocator.cpp8
-rw-r--r--src/gstreamer/gstlibcameraallocator.h4
-rw-r--r--src/gstreamer/gstlibcamerapad.cpp2
-rw-r--r--src/gstreamer/gstlibcamerapool.cpp6
-rw-r--r--src/gstreamer/gstlibcameraprovider.cpp10
-rw-r--r--src/gstreamer/gstlibcamerasrc.cpp109
-rw-r--r--src/gstreamer/meson.build54
-rw-r--r--src/ipa/ipu3/ipu3.cpp349
-rw-r--r--src/ipa/ipu3/ipu3_agc.cpp205
-rw-r--r--src/ipa/ipu3/ipu3_agc.h71
-rw-r--r--src/ipa/ipu3/ipu3_awb.cpp382
-rw-r--r--src/ipa/ipu3/ipu3_awb.h91
-rw-r--r--src/ipa/ipu3/meson.build27
-rw-r--r--src/ipa/libipa/algorithm.cpp39
-rw-r--r--src/ipa/libipa/algorithm.h24
-rw-r--r--src/ipa/libipa/camera_sensor_helper.cpp322
-rw-r--r--src/ipa/libipa/camera_sensor_helper.h89
-rw-r--r--src/ipa/libipa/histogram.cpp153
-rw-r--r--src/ipa/libipa/histogram.h40
-rw-r--r--src/ipa/libipa/ipa_interface_wrapper.cpp285
-rw-r--r--src/ipa/libipa/ipa_interface_wrapper.h61
-rw-r--r--src/ipa/libipa/meson.build12
-rw-r--r--src/ipa/meson.build31
-rw-r--r--src/ipa/raspberrypi/cam_helper.cpp119
-rw-r--r--src/ipa/raspberrypi/cam_helper.hpp51
-rw-r--r--src/ipa/raspberrypi/cam_helper_imx219.cpp134
-rw-r--r--src/ipa/raspberrypi/cam_helper_imx290.cpp67
-rw-r--r--src/ipa/raspberrypi/cam_helper_imx477.cpp214
-rw-r--r--src/ipa/raspberrypi/cam_helper_ov5647.cpp27
-rw-r--r--src/ipa/raspberrypi/cam_helper_ov9281.cpp65
-rw-r--r--src/ipa/raspberrypi/controller/agc_algorithm.hpp8
-rw-r--r--src/ipa/raspberrypi/controller/agc_status.h12
-rw-r--r--src/ipa/raspberrypi/controller/algorithm.hpp6
-rw-r--r--src/ipa/raspberrypi/controller/awb_algorithm.hpp1
-rw-r--r--src/ipa/raspberrypi/controller/camera_mode.h8
-rw-r--r--src/ipa/raspberrypi/controller/controller.cpp19
-rw-r--r--src/ipa/raspberrypi/controller/denoise_algorithm.hpp23
-rw-r--r--src/ipa/raspberrypi/controller/denoise_status.h24
-rw-r--r--src/ipa/raspberrypi/controller/device_status.cpp21
-rw-r--r--src/ipa/raspberrypi/controller/device_status.h39
-rw-r--r--src/ipa/raspberrypi/controller/logging.hpp30
-rw-r--r--src/ipa/raspberrypi/controller/metadata.hpp70
-rw-r--r--src/ipa/raspberrypi/controller/pwl.cpp30
-rw-r--r--src/ipa/raspberrypi/controller/pwl.hpp3
-rw-r--r--src/ipa/raspberrypi/controller/rpi/agc.cpp526
-rw-r--r--src/ipa/raspberrypi/controller/rpi/agc.hpp50
-rw-r--r--src/ipa/raspberrypi/controller/rpi/alsc.cpp78
-rw-r--r--src/ipa/raspberrypi/controller/rpi/awb.cpp213
-rw-r--r--src/ipa/raspberrypi/controller/rpi/awb.hpp17
-rw-r--r--src/ipa/raspberrypi/controller/rpi/black_level.cpp11
-rw-r--r--src/ipa/raspberrypi/controller/rpi/ccm.cpp26
-rw-r--r--src/ipa/raspberrypi/controller/rpi/ccm.hpp3
-rw-r--r--src/ipa/raspberrypi/controller/rpi/contrast.cpp29
-rw-r--r--src/ipa/raspberrypi/controller/rpi/contrast.hpp5
-rw-r--r--src/ipa/raspberrypi/controller/rpi/dpc.cpp8
-rw-r--r--src/ipa/raspberrypi/controller/rpi/focus.cpp2
-rw-r--r--src/ipa/raspberrypi/controller/rpi/geq.cpp22
-rw-r--r--src/ipa/raspberrypi/controller/rpi/lux.cpp30
-rw-r--r--src/ipa/raspberrypi/controller/rpi/lux.hpp7
-rw-r--r--src/ipa/raspberrypi/controller/rpi/noise.cpp14
-rw-r--r--src/ipa/raspberrypi/controller/rpi/noise.hpp2
-rw-r--r--src/ipa/raspberrypi/controller/rpi/sdn.cpp38
-rw-r--r--src/ipa/raspberrypi/controller/rpi/sdn.hpp5
-rw-r--r--src/ipa/raspberrypi/controller/rpi/sharpen.cpp11
-rw-r--r--src/ipa/raspberrypi/controller/sdn_status.h23
-rw-r--r--src/ipa/raspberrypi/data/imx219.json17
-rw-r--r--src/ipa/raspberrypi/data/imx290.json165
-rw-r--r--src/ipa/raspberrypi/data/imx477.json17
-rw-r--r--src/ipa/raspberrypi/data/meson.build5
-rw-r--r--src/ipa/raspberrypi/data/ov5647.json17
-rw-r--r--src/ipa/raspberrypi/data/ov9281.json92
-rw-r--r--src/ipa/raspberrypi/data/se327m12.json341
-rw-r--r--src/ipa/raspberrypi/md_parser.cpp101
-rw-r--r--src/ipa/raspberrypi/md_parser.hpp178
-rw-r--r--src/ipa/raspberrypi/md_parser_rpi.cpp37
-rw-r--r--src/ipa/raspberrypi/md_parser_rpi.hpp32
-rw-r--r--src/ipa/raspberrypi/md_parser_smia.cpp149
-rw-r--r--src/ipa/raspberrypi/meson.build12
-rw-r--r--src/ipa/raspberrypi/raspberrypi.cpp986
-rw-r--r--src/ipa/rkisp1/meson.build6
-rw-r--r--src/ipa/rkisp1/rkisp1.cpp114
-rw-r--r--src/ipa/vimc/data/meson.build2
-rw-r--r--src/ipa/vimc/meson.build6
-rw-r--r--src/ipa/vimc/vimc.cpp42
-rw-r--r--src/lc-compliance/capture_test.cpp128
-rw-r--r--src/lc-compliance/environment.cpp20
-rw-r--r--src/lc-compliance/environment.h31
-rw-r--r--src/lc-compliance/main.cpp193
-rw-r--r--src/lc-compliance/meson.build30
-rw-r--r--src/lc-compliance/simple_capture.cpp191
-rw-r--r--src/lc-compliance/simple_capture.h67
-rw-r--r--src/libcamera/base/bound_method.cpp (renamed from src/libcamera/bound_method.cpp)19
-rw-r--r--src/libcamera/base/class.cpp181
-rw-r--r--src/libcamera/base/event_dispatcher.cpp (renamed from src/libcamera/event_dispatcher.cpp)7
-rw-r--r--src/libcamera/base/event_dispatcher_poll.cpp (renamed from src/libcamera/event_dispatcher_poll.cpp)15
-rw-r--r--src/libcamera/base/event_notifier.cpp (renamed from src/libcamera/event_notifier.cpp)10
-rw-r--r--src/libcamera/base/file.cpp (renamed from src/libcamera/file.cpp)11
-rw-r--r--src/libcamera/base/log.cpp (renamed from src/libcamera/log.cpp)129
-rw-r--r--src/libcamera/base/meson.build49
-rw-r--r--src/libcamera/base/message.cpp (renamed from src/libcamera/message.cpp)9
-rw-r--r--src/libcamera/base/object.cpp (renamed from src/libcamera/object.cpp)25
-rw-r--r--src/libcamera/base/semaphore.cpp (renamed from src/libcamera/semaphore.cpp)6
-rw-r--r--src/libcamera/base/signal.cpp (renamed from src/libcamera/signal.cpp)6
-rw-r--r--src/libcamera/base/thread.cpp (renamed from src/libcamera/thread.cpp)208
-rw-r--r--src/libcamera/base/timer.cpp (renamed from src/libcamera/timer.cpp)16
-rw-r--r--src/libcamera/base/utils.cpp (renamed from src/libcamera/utils.cpp)228
-rw-r--r--src/libcamera/bayer_format.cpp146
-rw-r--r--src/libcamera/byte_stream_buffer.cpp10
-rw-r--r--src/libcamera/camera.cpp310
-rw-r--r--src/libcamera/camera_manager.cpp124
-rw-r--r--src/libcamera/camera_sensor.cpp568
-rw-r--r--src/libcamera/camera_sensor_properties.cpp119
-rw-r--r--src/libcamera/control_ids.cpp.in16
-rw-r--r--src/libcamera/control_ids.yaml431
-rw-r--r--src/libcamera/control_serializer.cpp31
-rw-r--r--src/libcamera/controls.cpp82
-rw-r--r--src/libcamera/delayed_controls.cpp293
-rw-r--r--src/libcamera/device_enumerator.cpp9
-rw-r--r--src/libcamera/device_enumerator_sysfs.cpp3
-rw-r--r--src/libcamera/device_enumerator_udev.cpp4
-rw-r--r--src/libcamera/file_descriptor.cpp4
-rw-r--r--src/libcamera/formats.cpp14
-rw-r--r--src/libcamera/formats.yaml3
-rw-r--r--src/libcamera/framebuffer.cpp (renamed from src/libcamera/buffer.cpp)57
-rw-r--r--src/libcamera/framebuffer_allocator.cpp5
-rw-r--r--src/libcamera/geometry.cpp334
-rw-r--r--src/libcamera/ipa/meson.build17
-rw-r--r--src/libcamera/ipa_context_wrapper.cpp297
-rw-r--r--src/libcamera/ipa_data_serializer.cpp615
-rw-r--r--src/libcamera/ipa_interface.cpp637
-rw-r--r--src/libcamera/ipa_manager.cpp56
-rw-r--r--src/libcamera/ipa_module.cpp26
-rw-r--r--src/libcamera/ipa_proxy.cpp120
-rw-r--r--src/libcamera/ipc_pipe.cpp218
-rw-r--r--src/libcamera/ipc_pipe_unixsocket.cpp145
-rw-r--r--src/libcamera/ipc_unixsocket.cpp5
-rw-r--r--src/libcamera/media_device.cpp20
-rw-r--r--src/libcamera/media_object.cpp26
-rw-r--r--src/libcamera/meson.build86
-rw-r--r--src/libcamera/pipeline/ipu3/cio2.cpp49
-rw-r--r--src/libcamera/pipeline/ipu3/cio2.h20
-rw-r--r--src/libcamera/pipeline/ipu3/frames.cpp143
-rw-r--r--src/libcamera/pipeline/ipu3/frames.h65
-rw-r--r--src/libcamera/pipeline/ipu3/imgu.cpp176
-rw-r--r--src/libcamera/pipeline/ipu3/imgu.h13
-rw-r--r--src/libcamera/pipeline/ipu3/ipu3.cpp685
-rw-r--r--src/libcamera/pipeline/ipu3/meson.build1
-rw-r--r--src/libcamera/pipeline/meson.build2
-rw-r--r--src/libcamera/pipeline/raspberrypi/dma_heaps.cpp2
-rw-r--r--src/libcamera/pipeline/raspberrypi/meson.build1
-rw-r--r--src/libcamera/pipeline/raspberrypi/raspberrypi.cpp1005
-rw-r--r--src/libcamera/pipeline/raspberrypi/rpi_stream.cpp16
-rw-r--r--src/libcamera/pipeline/raspberrypi/rpi_stream.h5
-rw-r--r--src/libcamera/pipeline/raspberrypi/staggered_ctrl.cpp174
-rw-r--r--src/libcamera/pipeline/raspberrypi/staggered_ctrl.h96
-rw-r--r--src/libcamera/pipeline/rkisp1/meson.build1
-rw-r--r--src/libcamera/pipeline/rkisp1/rkisp1.cpp410
-rw-r--r--src/libcamera/pipeline/rkisp1/rkisp1_path.cpp23
-rw-r--r--src/libcamera/pipeline/rkisp1/rkisp1_path.h11
-rw-r--r--src/libcamera/pipeline/rkisp1/timeline.cpp227
-rw-r--r--src/libcamera/pipeline/rkisp1/timeline.h72
-rw-r--r--src/libcamera/pipeline/simple/converter.cpp376
-rw-r--r--src/libcamera/pipeline/simple/converter.h72
-rw-r--r--src/libcamera/pipeline/simple/simple.cpp780
-rw-r--r--src/libcamera/pipeline/uvcvideo/uvcvideo.cpp83
-rw-r--r--src/libcamera/pipeline/vimc/vimc.cpp83
-rw-r--r--src/libcamera/pipeline_handler.cpp98
-rw-r--r--src/libcamera/process.cpp53
-rw-r--r--src/libcamera/property_ids.cpp.in15
-rw-r--r--src/libcamera/property_ids.yaml71
-rw-r--r--src/libcamera/proxy/ipa_proxy_linux.cpp103
-rw-r--r--src/libcamera/proxy/ipa_proxy_thread.cpp172
-rw-r--r--src/libcamera/proxy/meson.build21
-rw-r--r--src/libcamera/proxy/worker/ipa_proxy_linux_worker.cpp90
-rw-r--r--src/libcamera/proxy/worker/meson.build31
-rw-r--r--src/libcamera/request.cpp141
-rw-r--r--src/libcamera/source_paths.cpp139
-rw-r--r--src/libcamera/stream.cpp5
-rw-r--r--src/libcamera/sysfs.cpp9
-rw-r--r--src/libcamera/tracepoints.cpp10
-rw-r--r--src/libcamera/transform.cpp38
-rw-r--r--src/libcamera/v4l2_controls.cpp151
-rw-r--r--src/libcamera/v4l2_device.cpp338
-rw-r--r--src/libcamera/v4l2_pixelformat.cpp5
-rw-r--r--src/libcamera/v4l2_subdevice.cpp15
-rw-r--r--src/libcamera/v4l2_videodevice.cpp140
-rw-r--r--src/meson.build29
-rw-r--r--src/qcam/assets/feathericons/feathericons.qrc12
-rw-r--r--src/qcam/assets/shader/NV_2_planes_VU_f.glsl32
-rw-r--r--src/qcam/assets/shader/RGB.frag22
-rw-r--r--src/qcam/assets/shader/YUV_2_planes.frag (renamed from src/qcam/assets/shader/NV_2_planes_UV_f.glsl)9
-rw-r--r--src/qcam/assets/shader/YUV_3_planes.frag (renamed from src/qcam/assets/shader/NV_3_planes_f.glsl)2
-rw-r--r--src/qcam/assets/shader/YUV_packed.frag82
-rw-r--r--src/qcam/assets/shader/bayer_1x_packed.frag216
-rw-r--r--src/qcam/assets/shader/bayer_8.frag104
-rw-r--r--src/qcam/assets/shader/bayer_8.vert51
-rw-r--r--src/qcam/assets/shader/identity.vert (renamed from src/qcam/assets/shader/NV_vertex_shader.glsl)2
-rw-r--r--src/qcam/assets/shader/shaders.qrc12
-rw-r--r--src/qcam/dng_writer.cpp13
-rw-r--r--src/qcam/dng_writer.h2
-rw-r--r--src/qcam/main.cpp7
-rw-r--r--src/qcam/main_window.cpp53
-rw-r--r--src/qcam/main_window.h29
-rw-r--r--src/qcam/meson.build105
-rw-r--r--src/qcam/message_handler.cpp27
-rw-r--r--src/qcam/message_handler.h26
-rw-r--r--src/qcam/viewfinder.h4
-rw-r--r--src/qcam/viewfinder_gl.cpp433
-rw-r--r--src/qcam/viewfinder_gl.h41
-rw-r--r--src/qcam/viewfinder_qt.h2
-rw-r--r--src/v4l2/meson.build9
-rw-r--r--src/v4l2/v4l2_camera.cpp48
-rw-r--r--src/v4l2/v4l2_camera.h16
-rw-r--r--src/v4l2/v4l2_camera_proxy.cpp13
-rw-r--r--src/v4l2/v4l2_compat_manager.cpp15
263 files changed, 19032 insertions, 8229 deletions
diff --git a/src/android/camera3_hal.cpp b/src/android/camera3_hal.cpp
index d6e04af2..da836bae 100644
--- a/src/android/camera3_hal.cpp
+++ b/src/android/camera3_hal.cpp
@@ -7,7 +7,7 @@
#include <hardware/camera_common.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
#include "camera_device.h"
#include "camera_hal_manager.h"
@@ -16,25 +16,23 @@ using namespace libcamera;
LOG_DEFINE_CATEGORY(HAL)
-static CameraHalManager cameraManager;
-
/*------------------------------------------------------------------------------
* Android Camera HAL callbacks
*/
static int hal_get_number_of_cameras()
{
- return cameraManager.numCameras();
+ return CameraHalManager::instance()->numCameras();
}
static int hal_get_camera_info(int id, struct camera_info *info)
{
- return cameraManager.getCameraInfo(id, info);
+ return CameraHalManager::instance()->getCameraInfo(id, info);
}
static int hal_set_callbacks(const camera_module_callbacks_t *callbacks)
{
- cameraManager.setCallbacks(callbacks);
+ CameraHalManager::instance()->setCallbacks(callbacks);
return 0;
}
@@ -62,7 +60,7 @@ static int hal_init()
{
LOG(HAL, Info) << "Initialising Android camera HAL";
- cameraManager.init();
+ CameraHalManager::instance()->init();
return 0;
}
@@ -77,11 +75,12 @@ static int hal_dev_open(const hw_module_t *module, const char *name,
LOG(HAL, Debug) << "Open camera " << name;
int id = atoi(name);
- CameraDevice *camera = cameraManager.open(id, module);
+
+ auto [camera, ret] = CameraHalManager::instance()->open(id, module);
if (!camera) {
LOG(HAL, Error)
<< "Failed to open camera module '" << id << "'";
- return -ENODEV;
+ return ret == -EBUSY ? -EUSERS : ret;
}
*device = &camera->camera3Device()->common;
diff --git a/src/android/camera_buffer.h b/src/android/camera_buffer.h
new file mode 100644
index 00000000..21373fa2
--- /dev/null
+++ b/src/android/camera_buffer.h
@@ -0,0 +1,61 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera_buffer.h - Frame buffer handling interface definition
+ */
+#ifndef __ANDROID_CAMERA_BUFFER_H__
+#define __ANDROID_CAMERA_BUFFER_H__
+
+#include <hardware/camera3.h>
+
+#include <libcamera/base/class.h>
+#include <libcamera/base/span.h>
+
+class CameraBuffer final : public libcamera::Extensible
+{
+ LIBCAMERA_DECLARE_PRIVATE()
+
+public:
+ CameraBuffer(buffer_handle_t camera3Buffer, int flags);
+ ~CameraBuffer();
+
+ bool isValid() const;
+
+ unsigned int numPlanes() const;
+
+ libcamera::Span<const uint8_t> plane(unsigned int plane) const;
+ libcamera::Span<uint8_t> plane(unsigned int plane);
+
+ size_t jpegBufferSize(size_t maxJpegBufferSize) const;
+};
+
+#define PUBLIC_CAMERA_BUFFER_IMPLEMENTATION \
+CameraBuffer::CameraBuffer(buffer_handle_t camera3Buffer, int flags) \
+ : Extensible(new Private(this, camera3Buffer, flags)) \
+{ \
+} \
+CameraBuffer::~CameraBuffer() \
+{ \
+} \
+bool CameraBuffer::isValid() const \
+{ \
+ return _d()->isValid(); \
+} \
+unsigned int CameraBuffer::numPlanes() const \
+{ \
+ return _d()->numPlanes(); \
+} \
+Span<const uint8_t> CameraBuffer::plane(unsigned int plane) const \
+{ \
+ return const_cast<Private *>(_d())->plane(plane); \
+} \
+Span<uint8_t> CameraBuffer::plane(unsigned int plane) \
+{ \
+ return _d()->plane(plane); \
+} \
+size_t CameraBuffer::jpegBufferSize(size_t maxJpegBufferSize) const \
+{ \
+ return _d()->jpegBufferSize(maxJpegBufferSize); \
+}
+#endif /* __ANDROID_CAMERA_BUFFER_H__ */
diff --git a/src/android/camera_capabilities.cpp b/src/android/camera_capabilities.cpp
new file mode 100644
index 00000000..6b5edb66
--- /dev/null
+++ b/src/android/camera_capabilities.cpp
@@ -0,0 +1,1165 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera_capabilities.cpp - Camera static properties manager
+ */
+
+#include "camera_capabilities.h"
+
+#include <array>
+#include <cmath>
+
+#include <hardware/camera3.h>
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/control_ids.h>
+#include <libcamera/controls.h>
+#include <libcamera/property_ids.h>
+
+#include "libcamera/internal/formats.h"
+
+using namespace libcamera;
+
+LOG_DECLARE_CATEGORY(HAL)
+
+namespace {
+
+/*
+ * \var camera3Resolutions
+ * \brief The list of image resolutions defined as mandatory to be supported by
+ * the Android Camera3 specification
+ */
+const std::vector<Size> camera3Resolutions = {
+ { 320, 240 },
+ { 640, 480 },
+ { 1280, 720 },
+ { 1920, 1080 }
+};
+
+/*
+ * \struct Camera3Format
+ * \brief Data associated with an Android format identifier
+ * \var libcameraFormats List of libcamera pixel formats compatible with the
+ * Android format
+ * \var name The human-readable representation of the Android format code
+ */
+struct Camera3Format {
+ std::vector<PixelFormat> libcameraFormats;
+ bool mandatory;
+ const char *name;
+};
+
+/*
+ * \var camera3FormatsMap
+ * \brief Associate Android format code with ancillary data
+ */
+const std::map<int, const Camera3Format> camera3FormatsMap = {
+ {
+ HAL_PIXEL_FORMAT_BLOB, {
+ { formats::MJPEG },
+ true,
+ "BLOB"
+ }
+ }, {
+ HAL_PIXEL_FORMAT_YCbCr_420_888, {
+ { formats::NV12, formats::NV21 },
+ true,
+ "YCbCr_420_888"
+ }
+ }, {
+ /*
+ * \todo Translate IMPLEMENTATION_DEFINED inspecting the gralloc
+ * usage flag. For now, copy the YCbCr_420 configuration.
+ */
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, {
+ { formats::NV12, formats::NV21 },
+ true,
+ "IMPLEMENTATION_DEFINED"
+ }
+ }, {
+ HAL_PIXEL_FORMAT_RAW10, {
+ {
+ formats::SBGGR10_CSI2P,
+ formats::SGBRG10_CSI2P,
+ formats::SGRBG10_CSI2P,
+ formats::SRGGB10_CSI2P
+ },
+ false,
+ "RAW10"
+ }
+ }, {
+ HAL_PIXEL_FORMAT_RAW12, {
+ {
+ formats::SBGGR12_CSI2P,
+ formats::SGBRG12_CSI2P,
+ formats::SGRBG12_CSI2P,
+ formats::SRGGB12_CSI2P
+ },
+ false,
+ "RAW12"
+ }
+ }, {
+ HAL_PIXEL_FORMAT_RAW16, {
+ {
+ formats::SBGGR16,
+ formats::SGBRG16,
+ formats::SGRBG16,
+ formats::SRGGB16
+ },
+ false,
+ "RAW16"
+ }
+ },
+};
+
+} /* namespace */
+
+int CameraCapabilities::initialize(std::shared_ptr<libcamera::Camera> camera,
+ int orientation, int facing)
+{
+ camera_ = camera;
+ orientation_ = orientation;
+ facing_ = facing;
+
+ /* Acquire the camera and initialize available stream configurations. */
+ int ret = camera_->acquire();
+ if (ret) {
+ LOG(HAL, Error) << "Failed to temporarily acquire the camera";
+ return ret;
+ }
+
+ ret = initializeStreamConfigurations();
+ camera_->release();
+ if (ret)
+ return ret;
+
+ return initializeStaticMetadata();
+}
+
+std::vector<Size> CameraCapabilities::getYUVResolutions(CameraConfiguration *cameraConfig,
+ const PixelFormat &pixelFormat,
+ const std::vector<Size> &resolutions)
+{
+ std::vector<Size> supportedResolutions;
+
+ StreamConfiguration &cfg = cameraConfig->at(0);
+ for (const Size &res : resolutions) {
+ cfg.pixelFormat = pixelFormat;
+ cfg.size = res;
+
+ CameraConfiguration::Status status = cameraConfig->validate();
+ if (status != CameraConfiguration::Valid) {
+ LOG(HAL, Debug) << cfg.toString() << " not supported";
+ continue;
+ }
+
+ LOG(HAL, Debug) << cfg.toString() << " supported";
+
+ supportedResolutions.push_back(res);
+ }
+
+ return supportedResolutions;
+}
+
+std::vector<Size> CameraCapabilities::getRawResolutions(const libcamera::PixelFormat &pixelFormat)
+{
+ std::unique_ptr<CameraConfiguration> cameraConfig =
+ camera_->generateConfiguration({ StreamRole::Raw });
+ StreamConfiguration &cfg = cameraConfig->at(0);
+ const StreamFormats &formats = cfg.formats();
+ std::vector<Size> supportedResolutions = formats.sizes(pixelFormat);
+
+ return supportedResolutions;
+}
+
+/*
+ * Initialize the format conversion map to translate from Android format
+ * identifier to libcamera pixel formats and fill in the list of supported
+ * stream configurations to be reported to the Android camera framework through
+ * the camera static metadata.
+ */
+int CameraCapabilities::initializeStreamConfigurations()
+{
+ /*
+ * Get the maximum output resolutions
+ * \todo Get this from the camera properties once defined
+ */
+ std::unique_ptr<CameraConfiguration> cameraConfig =
+ camera_->generateConfiguration({ StillCapture });
+ if (!cameraConfig) {
+ LOG(HAL, Error) << "Failed to get maximum resolution";
+ return -EINVAL;
+ }
+ StreamConfiguration &cfg = cameraConfig->at(0);
+
+ /*
+ * \todo JPEG - Adjust the maximum available resolution by taking the
+ * JPEG encoder requirements into account (alignment and aspect ratio).
+ */
+ const Size maxRes = cfg.size;
+ LOG(HAL, Debug) << "Maximum supported resolution: " << maxRes.toString();
+
+ /*
+ * Build the list of supported image resolutions.
+ *
+ * The resolutions listed in camera3Resolution are mandatory to be
+ * supported, up to the camera maximum resolution.
+ *
+ * Augment the list by adding resolutions calculated from the camera
+ * maximum one.
+ */
+ std::vector<Size> cameraResolutions;
+ std::copy_if(camera3Resolutions.begin(), camera3Resolutions.end(),
+ std::back_inserter(cameraResolutions),
+ [&](const Size &res) { return res < maxRes; });
+
+ /*
+ * The Camera3 specification suggests adding 1/2 and 1/4 of the maximum
+ * resolution.
+ */
+ for (unsigned int divider = 2;; divider <<= 1) {
+ Size derivedSize{
+ maxRes.width / divider,
+ maxRes.height / divider,
+ };
+
+ if (derivedSize.width < 320 ||
+ derivedSize.height < 240)
+ break;
+
+ cameraResolutions.push_back(derivedSize);
+ }
+ cameraResolutions.push_back(maxRes);
+
+ /* Remove duplicated entries from the list of supported resolutions. */
+ std::sort(cameraResolutions.begin(), cameraResolutions.end());
+ auto last = std::unique(cameraResolutions.begin(), cameraResolutions.end());
+ cameraResolutions.erase(last, cameraResolutions.end());
+
+ /*
+ * Build the list of supported camera formats.
+ *
+ * To each Android format a list of compatible libcamera formats is
+ * associated. The first libcamera format that tests successful is added
+ * to the format translation map used when configuring the streams.
+ * It is then tested against the list of supported camera resolutions to
+ * build the stream configuration map reported through the camera static
+ * metadata.
+ */
+ Size maxJpegSize;
+ for (const auto &format : camera3FormatsMap) {
+ int androidFormat = format.first;
+ const Camera3Format &camera3Format = format.second;
+ const std::vector<PixelFormat> &libcameraFormats =
+ camera3Format.libcameraFormats;
+
+ LOG(HAL, Debug) << "Trying to map Android format "
+ << camera3Format.name;
+
+ /*
+ * JPEG is always supported, either produced directly by the
+ * camera, or encoded in the HAL.
+ */
+ if (androidFormat == HAL_PIXEL_FORMAT_BLOB) {
+ formatsMap_[androidFormat] = formats::MJPEG;
+ LOG(HAL, Debug) << "Mapped Android format "
+ << camera3Format.name << " to "
+ << formats::MJPEG.toString()
+ << " (fixed mapping)";
+ continue;
+ }
+
+ /*
+ * Test the libcamera formats that can produce images
+ * compatible with the format defined by Android.
+ */
+ PixelFormat mappedFormat;
+ for (const PixelFormat &pixelFormat : libcameraFormats) {
+
+ LOG(HAL, Debug) << "Testing " << pixelFormat.toString();
+
+ /*
+ * The stream configuration size can be adjusted,
+ * not the pixel format.
+ *
+ * \todo This could be simplified once all pipeline
+ * handlers will report the StreamFormats list of
+ * supported formats.
+ */
+ cfg.pixelFormat = pixelFormat;
+
+ CameraConfiguration::Status status = cameraConfig->validate();
+ if (status != CameraConfiguration::Invalid &&
+ cfg.pixelFormat == pixelFormat) {
+ mappedFormat = pixelFormat;
+ break;
+ }
+ }
+
+ if (!mappedFormat.isValid()) {
+ /* If the format is not mandatory, skip it. */
+ if (!camera3Format.mandatory)
+ continue;
+
+ LOG(HAL, Error)
+ << "Failed to map mandatory Android format "
+ << camera3Format.name << " ("
+ << utils::hex(androidFormat) << "): aborting";
+ return -EINVAL;
+ }
+
+ /*
+ * Record the mapping and then proceed to generate the
+ * stream configurations map, by testing the image resolutions.
+ */
+ formatsMap_[androidFormat] = mappedFormat;
+ LOG(HAL, Debug) << "Mapped Android format "
+ << camera3Format.name << " to "
+ << mappedFormat.toString();
+
+ std::vector<Size> resolutions;
+ const PixelFormatInfo &info = PixelFormatInfo::info(mappedFormat);
+ if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW)
+ resolutions = getRawResolutions(mappedFormat);
+ else
+ resolutions = getYUVResolutions(cameraConfig.get(),
+ mappedFormat,
+ cameraResolutions);
+
+ for (const Size &res : resolutions) {
+ streamConfigurations_.push_back({ res, androidFormat });
+
+ /*
+ * If the format is HAL_PIXEL_FORMAT_YCbCr_420_888
+ * from which JPEG is produced, add an entry for
+ * the JPEG stream.
+ *
+ * \todo Wire the JPEG encoder to query the supported
+ * sizes provided a list of formats it can encode.
+ *
+ * \todo Support JPEG streams produced by the camera
+ * natively.
+ */
+ if (androidFormat == HAL_PIXEL_FORMAT_YCbCr_420_888) {
+ streamConfigurations_.push_back(
+ { res, HAL_PIXEL_FORMAT_BLOB });
+ maxJpegSize = std::max(maxJpegSize, res);
+ }
+ }
+
+ /*
+ * \todo Calculate the maximum JPEG buffer size by asking the
+ * encoder giving the maximum frame size required.
+ */
+ maxJpegBufferSize_ = maxJpegSize.width * maxJpegSize.height * 1.5;
+ }
+
+ LOG(HAL, Debug) << "Collected stream configuration map: ";
+ for (const auto &entry : streamConfigurations_)
+ LOG(HAL, Debug) << "{ " << entry.resolution.toString() << " - "
+ << utils::hex(entry.androidFormat) << " }";
+
+ return 0;
+}
+
+int CameraCapabilities::initializeStaticMetadata()
+{
+ staticMetadata_ = std::make_unique<CameraMetadata>(64, 1024);
+ if (!staticMetadata_->isValid()) {
+ LOG(HAL, Error) << "Failed to allocate static metadata";
+ staticMetadata_.reset();
+ return -EINVAL;
+ }
+
+ const ControlInfoMap &controlsInfo = camera_->controls();
+ const ControlList &properties = camera_->properties();
+
+ /* Color correction static metadata. */
+ {
+ std::vector<uint8_t> data;
+ data.reserve(3);
+ const auto &infoMap = controlsInfo.find(&controls::draft::ColorCorrectionAberrationMode);
+ if (infoMap != controlsInfo.end()) {
+ for (const auto &value : infoMap->second.values())
+ data.push_back(value.get<int32_t>());
+ } else {
+ data.push_back(ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF);
+ }
+ staticMetadata_->addEntry(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
+ data);
+ }
+
+ /* Control static metadata. */
+ std::vector<uint8_t> aeAvailableAntiBandingModes = {
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
+ aeAvailableAntiBandingModes);
+
+ std::vector<uint8_t> aeAvailableModes = {
+ ANDROID_CONTROL_AE_MODE_ON,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_MODES,
+ aeAvailableModes);
+
+ int64_t minFrameDurationNsec = -1;
+ int64_t maxFrameDurationNsec = -1;
+ const auto frameDurationsInfo = controlsInfo.find(&controls::FrameDurationLimits);
+ if (frameDurationsInfo != controlsInfo.end()) {
+ minFrameDurationNsec = frameDurationsInfo->second.min().get<int64_t>() * 1000;
+ maxFrameDurationNsec = frameDurationsInfo->second.max().get<int64_t>() * 1000;
+
+ /*
+ * Adjust the minimum frame duration to comply with Android
+ * requirements. The camera service mandates all preview/record
+ * streams to have a minimum frame duration < 33,366 milliseconds
+ * (see MAX_PREVIEW_RECORD_DURATION_NS in the camera service
+ * implementation).
+ *
+ * If we're close enough (+ 500 useconds) to that value, round
+ * the minimum frame duration of the camera to an accepted
+ * value.
+ */
+ static constexpr int64_t MAX_PREVIEW_RECORD_DURATION_NS = 1e9 / 29.97;
+ if (minFrameDurationNsec > MAX_PREVIEW_RECORD_DURATION_NS &&
+ minFrameDurationNsec < MAX_PREVIEW_RECORD_DURATION_NS + 500000)
+ minFrameDurationNsec = MAX_PREVIEW_RECORD_DURATION_NS - 1000;
+
+ /*
+ * The AE routine frame rate limits are computed using the frame
+ * duration limits, as libcamera clips the AE routine to the
+ * frame durations.
+ */
+ int32_t maxFps = std::round(1e9 / minFrameDurationNsec);
+ int32_t minFps = std::round(1e9 / maxFrameDurationNsec);
+ minFps = std::max(1, minFps);
+
+ /*
+ * Force rounding errors so that we have the proper frame
+ * durations for when we reuse these variables later
+ */
+ minFrameDurationNsec = 1e9 / maxFps;
+ maxFrameDurationNsec = 1e9 / minFps;
+
+ /*
+ * Register to the camera service {min, max} and {max, max}
+ * intervals as requested by the metadata documentation.
+ */
+ int32_t availableAeFpsTarget[] = {
+ minFps, maxFps, maxFps, maxFps
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+ availableAeFpsTarget);
+ }
+
+ std::vector<int32_t> aeCompensationRange = {
+ 0, 0,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
+ aeCompensationRange);
+
+ const camera_metadata_rational_t aeCompensationStep[] = {
+ { 0, 1 }
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_STEP,
+ aeCompensationStep);
+
+ std::vector<uint8_t> availableAfModes = {
+ ANDROID_CONTROL_AF_MODE_OFF,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ availableAfModes);
+
+ std::vector<uint8_t> availableEffects = {
+ ANDROID_CONTROL_EFFECT_MODE_OFF,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_EFFECTS,
+ availableEffects);
+
+ std::vector<uint8_t> availableSceneModes = {
+ ANDROID_CONTROL_SCENE_MODE_DISABLED,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
+ availableSceneModes);
+
+ std::vector<uint8_t> availableStabilizationModes = {
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+ availableStabilizationModes);
+
+ /*
+ * \todo Inspect the camera capabilities to report the available
+ * AWB modes. Default to AUTO as CTS tests require it.
+ */
+ std::vector<uint8_t> availableAwbModes = {
+ ANDROID_CONTROL_AWB_MODE_AUTO,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
+ availableAwbModes);
+
+ std::vector<int32_t> availableMaxRegions = {
+ 0, 0, 0,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_MAX_REGIONS,
+ availableMaxRegions);
+
+ std::vector<uint8_t> sceneModesOverride = {
+ ANDROID_CONTROL_AE_MODE_ON,
+ ANDROID_CONTROL_AWB_MODE_AUTO,
+ ANDROID_CONTROL_AF_MODE_OFF,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
+ sceneModesOverride);
+
+ uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
+ aeLockAvailable);
+
+ uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
+ staticMetadata_->addEntry(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
+ awbLockAvailable);
+
+ char availableControlModes = ANDROID_CONTROL_MODE_AUTO;
+ staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_MODES,
+ availableControlModes);
+
+ /* JPEG static metadata. */
+
+ /*
+ * Create the list of supported thumbnail sizes by inspecting the
+ * available JPEG resolutions collected in streamConfigurations_ and
+ * generate one entry for each aspect ratio.
+ *
+ * The JPEG thumbnailer can freely scale, so pick an arbitrary
+ * (160, 160) size as the bounding rectangle, which is then cropped to
+ * the different supported aspect ratios.
+ */
+ constexpr Size maxJpegThumbnail(160, 160);
+ std::vector<Size> thumbnailSizes;
+ thumbnailSizes.push_back({ 0, 0 });
+ for (const auto &entry : streamConfigurations_) {
+ if (entry.androidFormat != HAL_PIXEL_FORMAT_BLOB)
+ continue;
+
+ Size thumbnailSize = maxJpegThumbnail
+ .boundedToAspectRatio({ entry.resolution.width,
+ entry.resolution.height });
+ thumbnailSizes.push_back(thumbnailSize);
+ }
+
+ std::sort(thumbnailSizes.begin(), thumbnailSizes.end());
+ auto last = std::unique(thumbnailSizes.begin(), thumbnailSizes.end());
+ thumbnailSizes.erase(last, thumbnailSizes.end());
+
+ /* Transform sizes in to a list of integers that can be consumed. */
+ std::vector<int32_t> thumbnailEntries;
+ thumbnailEntries.reserve(thumbnailSizes.size() * 2);
+ for (const auto &size : thumbnailSizes) {
+ thumbnailEntries.push_back(size.width);
+ thumbnailEntries.push_back(size.height);
+ }
+ staticMetadata_->addEntry(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+ thumbnailEntries);
+
+ staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, maxJpegBufferSize_);
+
+ /* Sensor static metadata. */
+ std::array<int32_t, 2> pixelArraySize;
+ {
+ const Size &size = properties.get(properties::PixelArraySize);
+ pixelArraySize[0] = size.width;
+ pixelArraySize[1] = size.height;
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
+ pixelArraySize);
+ }
+
+ if (properties.contains(properties::UnitCellSize)) {
+ const Size &cellSize = properties.get<Size>(properties::UnitCellSize);
+ std::array<float, 2> physicalSize{
+ cellSize.width * pixelArraySize[0] / 1e6f,
+ cellSize.height * pixelArraySize[1] / 1e6f
+ };
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
+ physicalSize);
+ }
+
+ {
+ const Span<const Rectangle> &rects =
+ properties.get(properties::PixelArrayActiveAreas);
+ std::vector<int32_t> data{
+ static_cast<int32_t>(rects[0].x),
+ static_cast<int32_t>(rects[0].y),
+ static_cast<int32_t>(rects[0].width),
+ static_cast<int32_t>(rects[0].height),
+ };
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+ data);
+ }
+
+ int32_t sensitivityRange[] = {
+ 32, 2400,
+ };
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
+ sensitivityRange);
+
+ /* Report the color filter arrangement if the camera reports it. */
+ if (properties.contains(properties::draft::ColorFilterArrangement)) {
+ uint8_t filterArr = properties.get(properties::draft::ColorFilterArrangement);
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
+ filterArr);
+ }
+
+ const auto &exposureInfo = controlsInfo.find(&controls::ExposureTime);
+ if (exposureInfo != controlsInfo.end()) {
+ int64_t exposureTimeRange[2] = {
+ exposureInfo->second.min().get<int32_t>() * 1000LL,
+ exposureInfo->second.max().get<int32_t>() * 1000LL,
+ };
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
+ exposureTimeRange, 2);
+ }
+
+ staticMetadata_->addEntry(ANDROID_SENSOR_ORIENTATION, orientation_);
+
+ std::vector<int32_t> testPatternModes = {
+ ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
+ };
+ const auto &testPatternsInfo =
+ controlsInfo.find(&controls::draft::TestPatternMode);
+ if (testPatternsInfo != controlsInfo.end()) {
+ const auto &values = testPatternsInfo->second.values();
+ ASSERT(!values.empty());
+ for (const auto &value : values) {
+ switch (value.get<int32_t>()) {
+ case controls::draft::TestPatternModeOff:
+ /*
+ * ANDROID_SENSOR_TEST_PATTERN_MODE_OFF is
+ * already in testPatternModes.
+ */
+ break;
+
+ case controls::draft::TestPatternModeSolidColor:
+ testPatternModes.push_back(
+ ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR);
+ break;
+
+ case controls::draft::TestPatternModeColorBars:
+ testPatternModes.push_back(
+ ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS);
+ break;
+
+ case controls::draft::TestPatternModeColorBarsFadeToGray:
+ testPatternModes.push_back(
+ ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY);
+ break;
+
+ case controls::draft::TestPatternModePn9:
+ testPatternModes.push_back(
+ ANDROID_SENSOR_TEST_PATTERN_MODE_PN9);
+ break;
+
+ case controls::draft::TestPatternModeCustom1:
+ /* We don't support this yet. */
+ break;
+
+ default:
+ LOG(HAL, Error) << "Unknown test pattern mode: "
+ << value.get<int32_t>();
+ continue;
+ }
+ }
+ }
+ staticMetadata_->addEntry(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
+ testPatternModes);
+
+ uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
+ timestampSource);
+
+ if (maxFrameDurationNsec > 0)
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
+ maxFrameDurationNsec);
+
+ /* Statistics static metadata. */
+ uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
+ staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
+ faceDetectMode);
+
+ int32_t maxFaceCount = 0;
+ staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
+ maxFaceCount);
+
+ {
+ std::vector<uint8_t> data;
+ data.reserve(2);
+ const auto &infoMap = controlsInfo.find(&controls::draft::LensShadingMapMode);
+ if (infoMap != controlsInfo.end()) {
+ for (const auto &value : infoMap->second.values())
+ data.push_back(value.get<int32_t>());
+ } else {
+ data.push_back(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF);
+ }
+ staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
+ data);
+ }
+
+ /* Sync static metadata. */
+ int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
+ staticMetadata_->addEntry(ANDROID_SYNC_MAX_LATENCY, maxLatency);
+
+ /* Flash static metadata. */
+ char flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
+ staticMetadata_->addEntry(ANDROID_FLASH_INFO_AVAILABLE,
+ flashAvailable);
+
+ /* Lens static metadata. */
+ std::vector<float> lensApertures = {
+ 2.53 / 100,
+ };
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
+ lensApertures);
+
+ uint8_t lensFacing;
+ switch (facing_) {
+ default:
+ case CAMERA_FACING_FRONT:
+ lensFacing = ANDROID_LENS_FACING_FRONT;
+ break;
+ case CAMERA_FACING_BACK:
+ lensFacing = ANDROID_LENS_FACING_BACK;
+ break;
+ case CAMERA_FACING_EXTERNAL:
+ lensFacing = ANDROID_LENS_FACING_EXTERNAL;
+ break;
+ }
+ staticMetadata_->addEntry(ANDROID_LENS_FACING, lensFacing);
+
+ std::vector<float> lensFocalLengths = {
+ 1,
+ };
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
+ lensFocalLengths);
+
+ std::vector<uint8_t> opticalStabilizations = {
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,
+ };
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
+ opticalStabilizations);
+
+ float hypeFocalDistance = 0;
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
+ hypeFocalDistance);
+
+ float minFocusDistance = 0;
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
+ minFocusDistance);
+
+ /* Noise reduction modes. */
+ {
+ std::vector<uint8_t> data;
+ data.reserve(5);
+ const auto &infoMap = controlsInfo.find(&controls::draft::NoiseReductionMode);
+ if (infoMap != controlsInfo.end()) {
+ for (const auto &value : infoMap->second.values())
+ data.push_back(value.get<int32_t>());
+ } else {
+ data.push_back(ANDROID_NOISE_REDUCTION_MODE_OFF);
+ }
+ staticMetadata_->addEntry(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+ data);
+ }
+
+ /* Scaler static metadata. */
+
+ /*
+ * \todo The digital zoom factor is a property that depends on the
+ * desired output configuration and the sensor frame size input to the
+ * ISP. This information is not available to the Android HAL, not at
+ * initialization time at least.
+ *
+ * As a workaround rely on pipeline handlers initializing the
+ * ScalerCrop control with the camera default configuration and use the
+ * maximum and minimum crop rectangles to calculate the digital zoom
+ * factor.
+ */
+ float maxZoom = 1.0f;
+ const auto scalerCrop = controlsInfo.find(&controls::ScalerCrop);
+ if (scalerCrop != controlsInfo.end()) {
+ Rectangle min = scalerCrop->second.min().get<Rectangle>();
+ Rectangle max = scalerCrop->second.max().get<Rectangle>();
+ maxZoom = std::min(1.0f * max.width / min.width,
+ 1.0f * max.height / min.height);
+ }
+ staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+ maxZoom);
+
+ std::vector<uint32_t> availableStreamConfigurations;
+ availableStreamConfigurations.reserve(streamConfigurations_.size() * 4);
+ for (const auto &entry : streamConfigurations_) {
+ availableStreamConfigurations.push_back(entry.androidFormat);
+ availableStreamConfigurations.push_back(entry.resolution.width);
+ availableStreamConfigurations.push_back(entry.resolution.height);
+ availableStreamConfigurations.push_back(
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
+ }
+ staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+ availableStreamConfigurations);
+
+ std::vector<int64_t> availableStallDurations = {
+ ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920, 33333333,
+ };
+ staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
+ availableStallDurations);
+
+ /* Use the minimum frame duration for all the YUV/RGB formats. */
+ if (minFrameDurationNsec > 0) {
+ std::vector<int64_t> minFrameDurations;
+ minFrameDurations.reserve(streamConfigurations_.size() * 4);
+ for (const auto &entry : streamConfigurations_) {
+ minFrameDurations.push_back(entry.androidFormat);
+ minFrameDurations.push_back(entry.resolution.width);
+ minFrameDurations.push_back(entry.resolution.height);
+ minFrameDurations.push_back(minFrameDurationNsec);
+ }
+ staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
+ minFrameDurations);
+ }
+
+ uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
+ staticMetadata_->addEntry(ANDROID_SCALER_CROPPING_TYPE, croppingType);
+
+ /* Info static metadata. */
+ uint8_t supportedHWLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
+ staticMetadata_->addEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
+ supportedHWLevel);
+
+ /* Request static metadata. */
+ int32_t partialResultCount = 1;
+ staticMetadata_->addEntry(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
+ partialResultCount);
+
+ {
+ /* Default the value to 2 if not reported by the camera. */
+ uint8_t maxPipelineDepth = 2;
+ const auto &infoMap = controlsInfo.find(&controls::draft::PipelineDepth);
+ if (infoMap != controlsInfo.end())
+ maxPipelineDepth = infoMap->second.max().get<int32_t>();
+ staticMetadata_->addEntry(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
+ maxPipelineDepth);
+ }
+
+ /* LIMITED does not support reprocessing. */
+ uint32_t maxNumInputStreams = 0;
+ staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
+ maxNumInputStreams);
+
+ std::vector<uint8_t> availableCapabilities = {
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
+ };
+
+ /* Report if camera supports RAW. */
+ bool rawStreamAvailable = false;
+ std::unique_ptr<CameraConfiguration> cameraConfig =
+ camera_->generateConfiguration({ StreamRole::Raw });
+ if (cameraConfig && !cameraConfig->empty()) {
+ const PixelFormatInfo &info =
+ PixelFormatInfo::info(cameraConfig->at(0).pixelFormat);
+ /* Only advertise RAW support if RAW16 is possible. */
+ if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW &&
+ info.bitsPerPixel == 16) {
+ rawStreamAvailable = true;
+ availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
+ }
+ }
+
+ /* Number of { RAW, YUV, JPEG } supported output streams */
+ int32_t numOutStreams[] = { rawStreamAvailable, 2, 1 };
+ staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
+ numOutStreams);
+
+ staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
+ availableCapabilities);
+
+ std::vector<int32_t> availableCharacteristicsKeys = {
+ ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
+ ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
+ ANDROID_CONTROL_AE_AVAILABLE_MODES,
+ ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+ ANDROID_CONTROL_AE_COMPENSATION_RANGE,
+ ANDROID_CONTROL_AE_COMPENSATION_STEP,
+ ANDROID_CONTROL_AE_LOCK_AVAILABLE,
+ ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ ANDROID_CONTROL_AVAILABLE_EFFECTS,
+ ANDROID_CONTROL_AVAILABLE_MODES,
+ ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
+ ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+ ANDROID_CONTROL_AWB_AVAILABLE_MODES,
+ ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
+ ANDROID_CONTROL_MAX_REGIONS,
+ ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
+ ANDROID_FLASH_INFO_AVAILABLE,
+ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
+ ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+ ANDROID_JPEG_MAX_SIZE,
+ ANDROID_LENS_FACING,
+ ANDROID_LENS_INFO_AVAILABLE_APERTURES,
+ ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
+ ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
+ ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
+ ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
+ ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
+ ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
+ ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
+ ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
+ ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
+ ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+ ANDROID_SCALER_CROPPING_TYPE,
+ ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
+ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+ ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
+ ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
+ ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
+ ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
+ ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
+ ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
+ ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
+ ANDROID_SENSOR_ORIENTATION,
+ ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
+ ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
+ ANDROID_SYNC_MAX_LATENCY,
+ };
+ staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
+ availableCharacteristicsKeys);
+
+ std::vector<int32_t> availableRequestKeys = {
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ ANDROID_CONTROL_AE_LOCK,
+ ANDROID_CONTROL_AE_MODE,
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+ ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ ANDROID_CONTROL_AF_MODE,
+ ANDROID_CONTROL_AF_TRIGGER,
+ ANDROID_CONTROL_AWB_LOCK,
+ ANDROID_CONTROL_AWB_MODE,
+ ANDROID_CONTROL_CAPTURE_INTENT,
+ ANDROID_CONTROL_EFFECT_MODE,
+ ANDROID_CONTROL_MODE,
+ ANDROID_CONTROL_SCENE_MODE,
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+ ANDROID_FLASH_MODE,
+ ANDROID_JPEG_ORIENTATION,
+ ANDROID_JPEG_QUALITY,
+ ANDROID_JPEG_THUMBNAIL_QUALITY,
+ ANDROID_JPEG_THUMBNAIL_SIZE,
+ ANDROID_LENS_APERTURE,
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+ ANDROID_NOISE_REDUCTION_MODE,
+ ANDROID_SCALER_CROP_REGION,
+ ANDROID_STATISTICS_FACE_DETECT_MODE
+ };
+ staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
+ availableRequestKeys);
+
+ std::vector<int32_t> availableResultKeys = {
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ ANDROID_CONTROL_AE_LOCK,
+ ANDROID_CONTROL_AE_MODE,
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+ ANDROID_CONTROL_AE_STATE,
+ ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ ANDROID_CONTROL_AF_MODE,
+ ANDROID_CONTROL_AF_STATE,
+ ANDROID_CONTROL_AF_TRIGGER,
+ ANDROID_CONTROL_AWB_LOCK,
+ ANDROID_CONTROL_AWB_MODE,
+ ANDROID_CONTROL_AWB_STATE,
+ ANDROID_CONTROL_CAPTURE_INTENT,
+ ANDROID_CONTROL_EFFECT_MODE,
+ ANDROID_CONTROL_MODE,
+ ANDROID_CONTROL_SCENE_MODE,
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+ ANDROID_FLASH_MODE,
+ ANDROID_FLASH_STATE,
+ ANDROID_JPEG_GPS_COORDINATES,
+ ANDROID_JPEG_GPS_PROCESSING_METHOD,
+ ANDROID_JPEG_GPS_TIMESTAMP,
+ ANDROID_JPEG_ORIENTATION,
+ ANDROID_JPEG_QUALITY,
+ ANDROID_JPEG_SIZE,
+ ANDROID_JPEG_THUMBNAIL_QUALITY,
+ ANDROID_JPEG_THUMBNAIL_SIZE,
+ ANDROID_LENS_APERTURE,
+ ANDROID_LENS_FOCAL_LENGTH,
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+ ANDROID_LENS_STATE,
+ ANDROID_NOISE_REDUCTION_MODE,
+ ANDROID_REQUEST_PIPELINE_DEPTH,
+ ANDROID_SCALER_CROP_REGION,
+ ANDROID_SENSOR_EXPOSURE_TIME,
+ ANDROID_SENSOR_FRAME_DURATION,
+ ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
+ ANDROID_SENSOR_TEST_PATTERN_MODE,
+ ANDROID_SENSOR_TIMESTAMP,
+ ANDROID_STATISTICS_FACE_DETECT_MODE,
+ ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
+ ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
+ ANDROID_STATISTICS_SCENE_FLICKER,
+ };
+ staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
+ availableResultKeys);
+
+ if (!staticMetadata_->isValid()) {
+ LOG(HAL, Error) << "Failed to construct static metadata";
+ staticMetadata_.reset();
+ return -EINVAL;
+ }
+
+ if (staticMetadata_->resized()) {
+ auto [entryCount, dataCount] = staticMetadata_->usage();
+ LOG(HAL, Info)
+ << "Static metadata resized: " << entryCount
+ << " entries and " << dataCount << " bytes used";
+ }
+
+ return 0;
+}
+
+/* Translate Android format code to libcamera pixel format. */
+PixelFormat CameraCapabilities::toPixelFormat(int format) const
+{
+ auto it = formatsMap_.find(format);
+ if (it == formatsMap_.end()) {
+ LOG(HAL, Error) << "Requested format " << utils::hex(format)
+ << " not supported";
+ return PixelFormat();
+ }
+
+ return it->second;
+}
+
+std::unique_ptr<CameraMetadata> CameraCapabilities::requestTemplatePreview() const
+{
+ /*
+ * \todo Keep this in sync with the actual number of entries.
+ * Currently: 20 entries, 35 bytes
+ */
+ auto requestTemplate = std::make_unique<CameraMetadata>(21, 36);
+ if (!requestTemplate->isValid()) {
+ return nullptr;
+ }
+
+ /* Get the FPS range registered in the static metadata. */
+ camera_metadata_ro_entry_t entry;
+ bool found = staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+ &entry);
+ if (!found) {
+ LOG(HAL, Error) << "Cannot create capture template without FPS range";
+ return nullptr;
+ }
+
+ /*
+ * Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata
+ * has been assembled as {{min, max} {max, max}}.
+ */
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ entry.data.i32, 2);
+
+ uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_MODE, aeMode);
+
+ int32_t aeExposureCompensation = 0;
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ aeExposureCompensation);
+
+ uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+ aePrecaptureTrigger);
+
+ uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_LOCK, aeLock);
+
+ uint8_t aeAntibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+ aeAntibandingMode);
+
+ uint8_t afMode = ANDROID_CONTROL_AF_MODE_OFF;
+ requestTemplate->addEntry(ANDROID_CONTROL_AF_MODE, afMode);
+
+ uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
+ requestTemplate->addEntry(ANDROID_CONTROL_AF_TRIGGER, afTrigger);
+
+ uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
+ requestTemplate->addEntry(ANDROID_CONTROL_AWB_MODE, awbMode);
+
+ uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
+ requestTemplate->addEntry(ANDROID_CONTROL_AWB_LOCK, awbLock);
+
+ uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
+ requestTemplate->addEntry(ANDROID_FLASH_MODE, flashMode);
+
+ uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
+ requestTemplate->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE,
+ faceDetectMode);
+
+ uint8_t noiseReduction = ANDROID_NOISE_REDUCTION_MODE_OFF;
+ requestTemplate->addEntry(ANDROID_NOISE_REDUCTION_MODE,
+ noiseReduction);
+
+ uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
+ requestTemplate->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+ aberrationMode);
+
+ uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
+ requestTemplate->addEntry(ANDROID_CONTROL_MODE, controlMode);
+
+ float lensAperture = 2.53 / 100;
+ requestTemplate->addEntry(ANDROID_LENS_APERTURE, lensAperture);
+
+ uint8_t opticalStabilization = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+ requestTemplate->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+ opticalStabilization);
+
+ uint8_t captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+ requestTemplate->addEntry(ANDROID_CONTROL_CAPTURE_INTENT,
+ captureIntent);
+
+ return requestTemplate;
+}
+
+std::unique_ptr<CameraMetadata> CameraCapabilities::requestTemplateVideo() const
+{
+ std::unique_ptr<CameraMetadata> previewTemplate = requestTemplatePreview();
+ if (!previewTemplate)
+ return nullptr;
+
+ /*
+ * The video template requires a fixed FPS range. Everything else
+ * stays the same as the preview template.
+ */
+ camera_metadata_ro_entry_t entry;
+ staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+ &entry);
+
+ /*
+ * Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata
+ * has been assembled as {{min, max} {max, max}}.
+ */
+ previewTemplate->updateEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ entry.data.i32 + 2, 2);
+
+ return previewTemplate;
+}
diff --git a/src/android/camera_capabilities.h b/src/android/camera_capabilities.h
new file mode 100644
index 00000000..4f5be825
--- /dev/null
+++ b/src/android/camera_capabilities.h
@@ -0,0 +1,66 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera_capabilities.h - Camera static properties manager
+ */
+#ifndef __ANDROID_CAMERA_CAPABILITIES_H__
+#define __ANDROID_CAMERA_CAPABILITIES_H__
+
+#include <map>
+#include <memory>
+#include <vector>
+
+#include <libcamera/base/class.h>
+
+#include <libcamera/camera.h>
+#include <libcamera/formats.h>
+#include <libcamera/geometry.h>
+
+#include "camera_metadata.h"
+
+class CameraCapabilities
+{
+public:
+ CameraCapabilities() = default;
+
+ int initialize(std::shared_ptr<libcamera::Camera> camera,
+ int orientation, int facing);
+
+ CameraMetadata *staticMetadata() const { return staticMetadata_.get(); }
+ libcamera::PixelFormat toPixelFormat(int format) const;
+ unsigned int maxJpegBufferSize() const { return maxJpegBufferSize_; }
+
+ std::unique_ptr<CameraMetadata> requestTemplatePreview() const;
+ std::unique_ptr<CameraMetadata> requestTemplateVideo() const;
+
+private:
+ LIBCAMERA_DISABLE_COPY_AND_MOVE(CameraCapabilities)
+
+ struct Camera3StreamConfiguration {
+ libcamera::Size resolution;
+ int androidFormat;
+ };
+
+ std::vector<libcamera::Size>
+ getYUVResolutions(libcamera::CameraConfiguration *cameraConfig,
+ const libcamera::PixelFormat &pixelFormat,
+ const std::vector<libcamera::Size> &resolutions);
+ std::vector<libcamera::Size>
+ getRawResolutions(const libcamera::PixelFormat &pixelFormat);
+ int initializeStreamConfigurations();
+
+ int initializeStaticMetadata();
+
+ std::shared_ptr<libcamera::Camera> camera_;
+
+ int facing_;
+ int orientation_;
+
+ std::vector<Camera3StreamConfiguration> streamConfigurations_;
+ std::map<int, libcamera::PixelFormat> formatsMap_;
+ std::unique_ptr<CameraMetadata> staticMetadata_;
+ unsigned int maxJpegBufferSize_;
+};
+
+#endif /* __ANDROID_CAMERA_CAPABILITIES_H__ */
diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp
index 751699cd..678cde23 100644
--- a/src/android/camera_device.cpp
+++ b/src/android/camera_device.cpp
@@ -6,174 +6,212 @@
*/
#include "camera_device.h"
+#include "camera_hal_config.h"
#include "camera_ops.h"
+#include "post_processor.h"
+#include <algorithm>
+#include <fstream>
#include <sys/mman.h>
-#include <tuple>
+#include <unistd.h>
#include <vector>
+#include <libcamera/base/log.h>
+#include <libcamera/base/thread.h>
+#include <libcamera/base/utils.h>
+
+#include <libcamera/control_ids.h>
#include <libcamera/controls.h>
#include <libcamera/formats.h>
#include <libcamera/property_ids.h>
-#include "libcamera/internal/formats.h"
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/utils.h"
-
-#include "camera_metadata.h"
#include "system/graphics.h"
-#include "jpeg/encoder_libjpeg.h"
-#include "jpeg/exif.h"
-
using namespace libcamera;
+LOG_DECLARE_CATEGORY(HAL)
+
namespace {
/*
- * \var camera3Resolutions
- * \brief The list of image resolutions defined as mandatory to be supported by
- * the Android Camera3 specification
+ * \struct Camera3StreamConfig
+ * \brief Data to store StreamConfiguration associated with camera3_stream(s)
+ * \var streams List of the pairs of a stream requested by Android HAL client
+ * and CameraStream::Type associated with the stream
+ * \var config StreamConfiguration for streams
*/
-const std::vector<Size> camera3Resolutions = {
- { 320, 240 },
- { 640, 480 },
- { 1280, 720 },
- { 1920, 1080 }
-};
+struct Camera3StreamConfig {
+ struct Camera3Stream {
+ camera3_stream_t *stream;
+ CameraStream::Type type;
+ };
-/*
- * \struct Camera3Format
- * \brief Data associated with an Android format identifier
- * \var libcameraFormats List of libcamera pixel formats compatible with the
- * Android format
- * \var name The human-readable representation of the Android format code
- */
-struct Camera3Format {
- std::vector<PixelFormat> libcameraFormats;
- bool mandatory;
- const char *name;
+ std::vector<Camera3Stream> streams;
+ StreamConfiguration config;
};
/*
- * \var camera3FormatsMap
- * \brief Associate Android format code with ancillary data
+ * Reorder the configurations so that libcamera::Camera can accept them as much
+ * as possible. The sort rule is as follows.
+ * 1.) The configuration for NV12 request whose resolution is the largest.
+ * 2.) The configuration for JPEG request.
+ * 3.) Others. Larger resolutions and different formats are put earlier.
*/
-const std::map<int, const Camera3Format> camera3FormatsMap = {
- {
- HAL_PIXEL_FORMAT_BLOB, {
- { formats::MJPEG },
- true,
- "BLOB"
- }
- }, {
- HAL_PIXEL_FORMAT_YCbCr_420_888, {
- { formats::NV12, formats::NV21 },
- true,
- "YCbCr_420_888"
+void sortCamera3StreamConfigs(std::vector<Camera3StreamConfig> &unsortedConfigs,
+ const camera3_stream_t *jpegStream)
+{
+ const Camera3StreamConfig *jpegConfig = nullptr;
+
+ std::map<PixelFormat, std::vector<const Camera3StreamConfig *>> formatToConfigs;
+ for (const auto &streamConfig : unsortedConfigs) {
+ if (jpegStream && !jpegConfig) {
+ const auto &streams = streamConfig.streams;
+ if (std::find_if(streams.begin(), streams.end(),
+ [jpegStream](const auto &stream) {
+ return stream.stream == jpegStream;
+ }) != streams.end()) {
+ jpegConfig = &streamConfig;
+ continue;
+ }
}
- }, {
+ formatToConfigs[streamConfig.config.pixelFormat].push_back(&streamConfig);
+ }
+
+ if (jpegStream && !jpegConfig)
+ LOG(HAL, Fatal) << "No Camera3StreamConfig is found for JPEG";
+
+ for (auto &fmt : formatToConfigs) {
+ auto &streamConfigs = fmt.second;
+
+ /* Sorted by resolution. Smaller is put first. */
+ std::sort(streamConfigs.begin(), streamConfigs.end(),
+ [](const auto *streamConfigA, const auto *streamConfigB) {
+ const Size &sizeA = streamConfigA->config.size;
+ const Size &sizeB = streamConfigB->config.size;
+ return sizeA < sizeB;
+ });
+ }
+
+ std::vector<Camera3StreamConfig> sortedConfigs;
+ sortedConfigs.reserve(unsortedConfigs.size());
+
+ /*
+ * NV12 is the most prioritized format. Put the configuration with NV12
+ * and the largest resolution first.
+ */
+ const auto nv12It = formatToConfigs.find(formats::NV12);
+ if (nv12It != formatToConfigs.end()) {
+ auto &nv12Configs = nv12It->second;
+ const Camera3StreamConfig *nv12Largest = nv12Configs.back();
+
/*
- * \todo Translate IMPLEMENTATION_DEFINED inspecting the gralloc
- * usage flag. For now, copy the YCbCr_420 configuration.
+ * If JPEG will be created from NV12 and the size is larger than
+ * the largest NV12 configurations, then put the NV12
+ * configuration for JPEG first.
*/
- HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, {
- { formats::NV12, formats::NV21 },
- true,
- "IMPLEMENTATION_DEFINED"
- }
- }, {
- HAL_PIXEL_FORMAT_RAW10, {
- {
- formats::SBGGR10_CSI2P,
- formats::SGBRG10_CSI2P,
- formats::SGRBG10_CSI2P,
- formats::SRGGB10_CSI2P
- },
- false,
- "RAW10"
- }
- }, {
- HAL_PIXEL_FORMAT_RAW12, {
- {
- formats::SBGGR12_CSI2P,
- formats::SGBRG12_CSI2P,
- formats::SGRBG12_CSI2P,
- formats::SRGGB12_CSI2P
- },
- false,
- "RAW12"
- }
- }, {
- HAL_PIXEL_FORMAT_RAW16, {
- {
- formats::SBGGR16,
- formats::SGBRG16,
- formats::SGRBG16,
- formats::SRGGB16
- },
- false,
- "RAW16"
+ if (jpegConfig && jpegConfig->config.pixelFormat == formats::NV12) {
+ const Size &nv12SizeForJpeg = jpegConfig->config.size;
+ const Size &nv12LargestSize = nv12Largest->config.size;
+
+ if (nv12LargestSize < nv12SizeForJpeg) {
+ LOG(HAL, Debug) << "Insert " << jpegConfig->config.toString();
+ sortedConfigs.push_back(std::move(*jpegConfig));
+ jpegConfig = nullptr;
+ }
}
- }, {
- HAL_PIXEL_FORMAT_RAW_OPAQUE, {
- {
- formats::SBGGR10_IPU3,
- formats::SGBRG10_IPU3,
- formats::SGRBG10_IPU3,
- formats::SRGGB10_IPU3
- },
- false,
- "RAW_OPAQUE"
+
+ LOG(HAL, Debug) << "Insert " << nv12Largest->config.toString();
+ sortedConfigs.push_back(*nv12Largest);
+ nv12Configs.pop_back();
+
+ if (nv12Configs.empty())
+ formatToConfigs.erase(nv12It);
+ }
+
+ /* If the configuration for JPEG is there, then put it. */
+ if (jpegConfig) {
+ LOG(HAL, Debug) << "Insert " << jpegConfig->config.toString();
+ sortedConfigs.push_back(std::move(*jpegConfig));
+ jpegConfig = nullptr;
+ }
+
+ /*
+ * Put configurations with different formats and larger resolutions
+ * earlier.
+ */
+ while (!formatToConfigs.empty()) {
+ for (auto it = formatToConfigs.begin(); it != formatToConfigs.end();) {
+ auto &configs = it->second;
+ LOG(HAL, Debug) << "Insert " << configs.back()->config.toString();
+ sortedConfigs.push_back(*configs.back());
+ configs.pop_back();
+
+ if (configs.empty())
+ it = formatToConfigs.erase(it);
+ else
+ it++;
}
- },
-};
+ }
-} /* namespace */
+ ASSERT(sortedConfigs.size() == unsortedConfigs.size());
-LOG_DECLARE_CATEGORY(HAL);
+ unsortedConfigs = sortedConfigs;
+}
-class MappedCamera3Buffer : public MappedBuffer
+const char *rotationToString(int rotation)
{
-public:
- MappedCamera3Buffer(const buffer_handle_t camera3buffer, int flags);
-};
+ switch (rotation) {
+ case CAMERA3_STREAM_ROTATION_0:
+ return "0";
+ case CAMERA3_STREAM_ROTATION_90:
+ return "90";
+ case CAMERA3_STREAM_ROTATION_180:
+ return "180";
+ case CAMERA3_STREAM_ROTATION_270:
+ return "270";
+ }
+ return "INVALID";
+}
-MappedCamera3Buffer::MappedCamera3Buffer(const buffer_handle_t camera3buffer,
- int flags)
+#if defined(OS_CHROMEOS)
+/*
+ * Check whether the crop_rotate_scale_degrees values for all streams in
+ * the list are valid according to the Chrome OS camera HAL API.
+ */
+bool validateCropRotate(const camera3_stream_configuration_t &streamList)
{
- maps_.reserve(camera3buffer->numFds);
- error_ = 0;
-
- for (int i = 0; i < camera3buffer->numFds; i++) {
- if (camera3buffer->data[i] == -1)
- continue;
-
- off_t length = lseek(camera3buffer->data[i], 0, SEEK_END);
- if (length < 0) {
- error_ = -errno;
- LOG(HAL, Error) << "Failed to query plane length";
+ ASSERT(streamList.num_streams > 0);
+ const int cropRotateScaleDegrees =
+ streamList.streams[0]->crop_rotate_scale_degrees;
+ for (unsigned int i = 0; i < streamList.num_streams; ++i) {
+ const camera3_stream_t &stream = *streamList.streams[i];
+
+ switch (stream.crop_rotate_scale_degrees) {
+ case CAMERA3_STREAM_ROTATION_0:
+ case CAMERA3_STREAM_ROTATION_90:
+ case CAMERA3_STREAM_ROTATION_270:
break;
- }
- void *address = mmap(nullptr, length, flags, MAP_SHARED,
- camera3buffer->data[i], 0);
- if (address == MAP_FAILED) {
- error_ = -errno;
- LOG(HAL, Error) << "Failed to mmap plane";
- break;
+ /* 180° rotation is specified by Chrome OS as invalid. */
+ case CAMERA3_STREAM_ROTATION_180:
+ default:
+ LOG(HAL, Error) << "Invalid crop_rotate_scale_degrees: "
+ << stream.crop_rotate_scale_degrees;
+ return false;
}
- maps_.emplace_back(static_cast<uint8_t *>(address),
- static_cast<size_t>(length));
+ if (cropRotateScaleDegrees != stream.crop_rotate_scale_degrees) {
+ LOG(HAL, Error) << "crop_rotate_scale_degrees in all "
+ << "streams are not identical";
+ return false;
+ }
}
-}
-CameraStream::CameraStream(PixelFormat format, Size size,
- unsigned int index, Encoder *encoder)
- : format_(format), size_(size), index_(index), encoder_(encoder)
-{
+ return true;
}
+#endif
+
+} /* namespace */
/*
* \struct Camera3RequestDescriptor
@@ -183,16 +221,30 @@ CameraStream::CameraStream(PixelFormat format, Size size,
*/
CameraDevice::Camera3RequestDescriptor::Camera3RequestDescriptor(
- unsigned int frameNumber, unsigned int numBuffers)
- : frameNumber(frameNumber), numBuffers(numBuffers)
+ Camera *camera, const camera3_capture_request_t *camera3Request)
{
- buffers = new camera3_stream_buffer_t[numBuffers];
- frameBuffers.reserve(numBuffers);
-}
+ frameNumber_ = camera3Request->frame_number;
-CameraDevice::Camera3RequestDescriptor::~Camera3RequestDescriptor()
-{
- delete[] buffers;
+ /* Copy the camera3 request stream information for later access. */
+ const uint32_t numBuffers = camera3Request->num_output_buffers;
+ buffers_.resize(numBuffers);
+ for (uint32_t i = 0; i < numBuffers; i++)
+ buffers_[i] = camera3Request->output_buffers[i];
+
+ /*
+ * FrameBuffer instances created by wrapping a camera3 provided dmabuf
+ * are emplaced in this vector of unique_ptr<> for lifetime management.
+ */
+ frameBuffers_.reserve(numBuffers);
+
+ /* Clone the controls associated with the camera3 request. */
+ settings_ = CameraMetadata(camera3Request->settings);
+
+ /*
+ * Create the CaptureRequest, stored as a unique_ptr<> to tie its
+ * lifetime to the descriptor.
+ */
+ request_ = std::make_unique<CaptureRequest>(camera);
}
/*
@@ -208,42 +260,64 @@ CameraDevice::Camera3RequestDescriptor::~Camera3RequestDescriptor()
* back to the framework using the designated callbacks.
*/
-CameraDevice::CameraDevice(unsigned int id, const std::shared_ptr<Camera> &camera)
- : id_(id), running_(false), camera_(camera), staticMetadata_(nullptr),
+CameraDevice::CameraDevice(unsigned int id, std::shared_ptr<Camera> camera)
+ : id_(id), state_(State::Stopped), camera_(std::move(camera)),
facing_(CAMERA_FACING_FRONT), orientation_(0)
{
camera_->requestCompleted.connect(this, &CameraDevice::requestComplete);
- /*
- * \todo Determine a more accurate value for this during
- * streamConfiguration.
- */
- maxJpegBufferSize_ = 13 << 20; /* 13631488 from USB HAL */
-}
+ maker_ = "libcamera";
+ model_ = "cameraModel";
-CameraDevice::~CameraDevice()
-{
- if (staticMetadata_)
- delete staticMetadata_;
+ /* \todo Support getting properties on Android */
+ std::ifstream fstream("/var/cache/camera/camera.prop");
+ if (!fstream.is_open())
+ return;
+
+ std::string line;
+ while (std::getline(fstream, line)) {
+ std::string::size_type delimPos = line.find("=");
+ if (delimPos == std::string::npos)
+ continue;
+ std::string key = line.substr(0, delimPos);
+ std::string val = line.substr(delimPos + 1);
- for (auto &it : requestTemplates_)
- delete it.second;
+ if (!key.compare("ro.product.model"))
+ model_ = val;
+ else if (!key.compare("ro.product.manufacturer"))
+ maker_ = val;
+ }
}
-std::shared_ptr<CameraDevice> CameraDevice::create(unsigned int id,
- const std::shared_ptr<Camera> &cam)
+CameraDevice::~CameraDevice() = default;
+
+std::unique_ptr<CameraDevice> CameraDevice::create(unsigned int id,
+ std::shared_ptr<Camera> cam)
{
- CameraDevice *camera = new CameraDevice(id, cam);
- return std::shared_ptr<CameraDevice>(camera);
+ return std::unique_ptr<CameraDevice>(
+ new CameraDevice(id, std::move(cam)));
}
/*
- * Initialize the camera static information.
+ * Initialize the camera static information retrieved from the
+ * Camera::properties or from the cameraConfigData.
+ *
+ * cameraConfigData is optional for external camera devices and can be
+ * nullptr.
+ *
* This method is called before the camera device is opened.
*/
-int CameraDevice::initialize()
+int CameraDevice::initialize(const CameraConfigData *cameraConfigData)
{
- /* Initialize orientation and facing side of the camera. */
+ /*
+ * Initialize orientation and facing side of the camera.
+ *
+ * If the libcamera::Camera provides those information as retrieved
+ * from firmware use them, otherwise fallback to values parsed from
+ * the configuration file. If the configuration file is not available
+ * the camera is external so its location and rotation can be safely
+ * defaulted.
+ */
const ControlList &properties = camera_->properties();
if (properties.contains(properties::Location)) {
@@ -259,6 +333,22 @@ int CameraDevice::initialize()
facing_ = CAMERA_FACING_EXTERNAL;
break;
}
+
+ if (cameraConfigData && cameraConfigData->facing != -1 &&
+ facing_ != cameraConfigData->facing) {
+ LOG(HAL, Warning)
+ << "Camera location does not match"
+ << " configuration file. Using " << facing_;
+ }
+ } else if (cameraConfigData) {
+ if (cameraConfigData->facing == -1) {
+ LOG(HAL, Error)
+ << "Camera facing not in configuration file";
+ return -EINVAL;
+ }
+ facing_ = cameraConfigData->facing;
+ } else {
+ facing_ = CAMERA_FACING_EXTERNAL;
}
/*
@@ -272,234 +362,24 @@ int CameraDevice::initialize()
if (properties.contains(properties::Rotation)) {
int rotation = properties.get(properties::Rotation);
orientation_ = (360 - rotation) % 360;
- }
-
- int ret = camera_->acquire();
- if (ret) {
- LOG(HAL, Error) << "Failed to temporarily acquire the camera";
- return ret;
- }
-
- ret = initializeStreamConfigurations();
- camera_->release();
- return ret;
-}
-
-std::vector<Size> CameraDevice::getYUVResolutions(CameraConfiguration *cameraConfig,
- const PixelFormat &pixelFormat,
- const std::vector<Size> &resolutions)
-{
- std::vector<Size> supportedResolutions;
-
- StreamConfiguration &cfg = cameraConfig->at(0);
- for (const Size &res : resolutions) {
- cfg.pixelFormat = pixelFormat;
- cfg.size = res;
-
- CameraConfiguration::Status status = cameraConfig->validate();
- if (status != CameraConfiguration::Valid) {
- LOG(HAL, Debug) << cfg.toString() << " not supported";
- continue;
+ if (cameraConfigData && cameraConfigData->rotation != -1 &&
+ orientation_ != cameraConfigData->rotation) {
+ LOG(HAL, Warning)
+ << "Camera orientation does not match"
+ << " configuration file. Using " << orientation_;
}
-
- LOG(HAL, Debug) << cfg.toString() << " supported";
-
- supportedResolutions.push_back(res);
- }
-
- return supportedResolutions;
-}
-
-std::vector<Size> CameraDevice::getRawResolutions(const libcamera::PixelFormat &pixelFormat)
-{
- std::unique_ptr<CameraConfiguration> cameraConfig =
- camera_->generateConfiguration({ StreamRole::Raw });
- StreamConfiguration &cfg = cameraConfig->at(0);
- const StreamFormats &formats = cfg.formats();
- std::vector<Size> supportedResolutions = formats.sizes(pixelFormat);
-
- return supportedResolutions;
-}
-
-/*
- * Initialize the format conversion map to translate from Android format
- * identifier to libcamera pixel formats and fill in the list of supported
- * stream configurations to be reported to the Android camera framework through
- * the static stream configuration metadata.
- */
-int CameraDevice::initializeStreamConfigurations()
-{
- /*
- * Get the maximum output resolutions
- * \todo Get this from the camera properties once defined
- */
- std::unique_ptr<CameraConfiguration> cameraConfig =
- camera_->generateConfiguration({ StillCapture });
- if (!cameraConfig) {
- LOG(HAL, Error) << "Failed to get maximum resolution";
- return -EINVAL;
- }
- StreamConfiguration &cfg = cameraConfig->at(0);
-
- /*
- * \todo JPEG - Adjust the maximum available resolution by taking the
- * JPEG encoder requirements into account (alignment and aspect ratio).
- */
- const Size maxRes = cfg.size;
- LOG(HAL, Debug) << "Maximum supported resolution: " << maxRes.toString();
-
- /*
- * Build the list of supported image resolutions.
- *
- * The resolutions listed in camera3Resolution are mandatory to be
- * supported, up to the camera maximum resolution.
- *
- * Augment the list by adding resolutions calculated from the camera
- * maximum one.
- */
- std::vector<Size> cameraResolutions;
- std::copy_if(camera3Resolutions.begin(), camera3Resolutions.end(),
- std::back_inserter(cameraResolutions),
- [&](const Size &res) { return res < maxRes; });
-
- /*
- * The Camera3 specification suggests adding 1/2 and 1/4 of the maximum
- * resolution.
- */
- for (unsigned int divider = 2;; divider <<= 1) {
- Size derivedSize{
- maxRes.width / divider,
- maxRes.height / divider,
- };
-
- if (derivedSize.width < 320 ||
- derivedSize.height < 240)
- break;
-
- cameraResolutions.push_back(derivedSize);
- }
- cameraResolutions.push_back(maxRes);
-
- /* Remove duplicated entries from the list of supported resolutions. */
- std::sort(cameraResolutions.begin(), cameraResolutions.end());
- auto last = std::unique(cameraResolutions.begin(), cameraResolutions.end());
- cameraResolutions.erase(last, cameraResolutions.end());
-
- /*
- * Build the list of supported camera formats.
- *
- * To each Android format a list of compatible libcamera formats is
- * associated. The first libcamera format that tests successful is added
- * to the format translation map used when configuring the streams.
- * It is then tested against the list of supported camera resolutions to
- * build the stream configuration map reported through the camera static
- * metadata.
- */
- for (const auto &format : camera3FormatsMap) {
- int androidFormat = format.first;
- const Camera3Format &camera3Format = format.second;
- const std::vector<PixelFormat> &libcameraFormats =
- camera3Format.libcameraFormats;
-
- LOG(HAL, Debug) << "Trying to map Android format "
- << camera3Format.name;
-
- /*
- * JPEG is always supported, either produced directly by the
- * camera, or encoded in the HAL.
- */
- if (androidFormat == HAL_PIXEL_FORMAT_BLOB) {
- formatsMap_[androidFormat] = formats::MJPEG;
- LOG(HAL, Debug) << "Mapped Android format "
- << camera3Format.name << " to "
- << formats::MJPEG.toString()
- << " (fixed mapping)";
- continue;
- }
-
- /*
- * Test the libcamera formats that can produce images
- * compatible with the format defined by Android.
- */
- PixelFormat mappedFormat;
- for (const PixelFormat &pixelFormat : libcameraFormats) {
-
- LOG(HAL, Debug) << "Testing " << pixelFormat.toString();
-
- /*
- * The stream configuration size can be adjusted,
- * not the pixel format.
- *
- * \todo This could be simplified once all pipeline
- * handlers will report the StreamFormats list of
- * supported formats.
- */
- cfg.pixelFormat = pixelFormat;
-
- CameraConfiguration::Status status = cameraConfig->validate();
- if (status != CameraConfiguration::Invalid &&
- cfg.pixelFormat == pixelFormat) {
- mappedFormat = pixelFormat;
- break;
- }
- }
-
- if (!mappedFormat.isValid()) {
- /* If the format is not mandatory, skip it. */
- if (!camera3Format.mandatory)
- continue;
-
+ } else if (cameraConfigData) {
+ if (cameraConfigData->rotation == -1) {
LOG(HAL, Error)
- << "Failed to map mandatory Android format "
- << camera3Format.name << " ("
- << utils::hex(androidFormat) << "): aborting";
+ << "Camera rotation not in configuration file";
return -EINVAL;
}
-
- /*
- * Record the mapping and then proceed to generate the
- * stream configurations map, by testing the image resolutions.
- */
- formatsMap_[androidFormat] = mappedFormat;
- LOG(HAL, Debug) << "Mapped Android format "
- << camera3Format.name << " to "
- << mappedFormat.toString();
-
- std::vector<Size> resolutions;
- const PixelFormatInfo &info = PixelFormatInfo::info(mappedFormat);
- if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW)
- resolutions = getRawResolutions(mappedFormat);
- else
- resolutions = getYUVResolutions(cameraConfig.get(),
- mappedFormat,
- cameraResolutions);
-
- for (const Size &res : resolutions) {
- streamConfigurations_.push_back({ res, androidFormat });
-
- /*
- * If the format is HAL_PIXEL_FORMAT_YCbCr_420_888
- * from which JPEG is produced, add an entry for
- * the JPEG stream.
- *
- * \todo Wire the JPEG encoder to query the supported
- * sizes provided a list of formats it can encode.
- *
- * \todo Support JPEG streams produced by the Camera
- * natively.
- */
- if (androidFormat == HAL_PIXEL_FORMAT_YCbCr_420_888)
- streamConfigurations_.push_back(
- { res, HAL_PIXEL_FORMAT_BLOB });
- }
+ orientation_ = cameraConfigData->rotation;
+ } else {
+ orientation_ = 0;
}
- LOG(HAL, Debug) << "Collected stream configuration map: ";
- for (const auto &entry : streamConfigurations_)
- LOG(HAL, Debug) << "{ " << entry.resolution.toString() << " - "
- << utils::hex(entry.androidFormat) << " }";
-
- return 0;
+ return capabilities_.initialize(camera_, orientation_, facing_);
}
/*
@@ -532,571 +412,56 @@ int CameraDevice::open(const hw_module_t *hardwareModule)
void CameraDevice::close()
{
- camera_->stop();
- camera_->release();
-
- running_ = false;
-}
-
-void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks)
-{
- callbacks_ = callbacks;
-}
-
-std::tuple<uint32_t, uint32_t> CameraDevice::calculateStaticMetadataSize()
-{
- /*
- * \todo Keep this in sync with the actual number of entries.
- * Currently: 51 entries, 687 bytes of static metadata
- */
- uint32_t numEntries = 51;
- uint32_t byteSize = 687;
+ streams_.clear();
- /*
- * Calculate space occupation in bytes for dynamically built metadata
- * entries.
- *
- * Each stream configuration entry requires 52 bytes:
- * 4 32bits integers for ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
- * 4 64bits integers for ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS
- */
- byteSize += streamConfigurations_.size() * 48;
+ stop();
- return std::make_tuple(numEntries, byteSize);
+ camera_->release();
}
-/*
- * Return static information for the camera.
- */
-const camera_metadata_t *CameraDevice::getStaticMetadata()
+void CameraDevice::flush()
{
- if (staticMetadata_)
- return staticMetadata_->get();
-
- /*
- * The here reported metadata are enough to implement a basic capture
- * example application, but a real camera implementation will require
- * more.
- */
- uint32_t numEntries;
- uint32_t byteSize;
- std::tie(numEntries, byteSize) = calculateStaticMetadataSize();
- staticMetadata_ = new CameraMetadata(numEntries, byteSize);
- if (!staticMetadata_->isValid()) {
- LOG(HAL, Error) << "Failed to allocate static metadata";
- delete staticMetadata_;
- staticMetadata_ = nullptr;
- return nullptr;
- }
-
- /* Color correction static metadata. */
- std::vector<uint8_t> aberrationModes = {
- ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
- aberrationModes.data(),
- aberrationModes.size());
-
- /* Control static metadata. */
- std::vector<uint8_t> aeAvailableAntiBandingModes = {
- ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
- ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,
- ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,
- ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
- aeAvailableAntiBandingModes.data(),
- aeAvailableAntiBandingModes.size());
-
- std::vector<uint8_t> aeAvailableModes = {
- ANDROID_CONTROL_AE_MODE_ON,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_MODES,
- aeAvailableModes.data(),
- aeAvailableModes.size());
-
- std::vector<int32_t> availableAeFpsTarget = {
- 15, 30,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
- availableAeFpsTarget.data(),
- availableAeFpsTarget.size());
-
- std::vector<int32_t> aeCompensationRange = {
- 0, 0,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
- aeCompensationRange.data(),
- aeCompensationRange.size());
-
- const camera_metadata_rational_t aeCompensationStep[] = {
- { 0, 1 }
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_STEP,
- aeCompensationStep, 1);
-
- std::vector<uint8_t> availableAfModes = {
- ANDROID_CONTROL_AF_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AF_AVAILABLE_MODES,
- availableAfModes.data(),
- availableAfModes.size());
-
- std::vector<uint8_t> availableEffects = {
- ANDROID_CONTROL_EFFECT_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_EFFECTS,
- availableEffects.data(),
- availableEffects.size());
-
- std::vector<uint8_t> availableSceneModes = {
- ANDROID_CONTROL_SCENE_MODE_DISABLED,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
- availableSceneModes.data(),
- availableSceneModes.size());
-
- std::vector<uint8_t> availableStabilizationModes = {
- ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
- availableStabilizationModes.data(),
- availableStabilizationModes.size());
-
- std::vector<uint8_t> availableAwbModes = {
- ANDROID_CONTROL_AWB_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
- availableAwbModes.data(),
- availableAwbModes.size());
-
- std::vector<int32_t> availableMaxRegions = {
- 0, 0, 0,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_MAX_REGIONS,
- availableMaxRegions.data(),
- availableMaxRegions.size());
-
- std::vector<uint8_t> sceneModesOverride = {
- ANDROID_CONTROL_AE_MODE_ON,
- ANDROID_CONTROL_AWB_MODE_AUTO,
- ANDROID_CONTROL_AF_MODE_AUTO,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
- sceneModesOverride.data(),
- sceneModesOverride.size());
-
- uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
- &aeLockAvailable, 1);
-
- uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
- staticMetadata_->addEntry(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
- &awbLockAvailable, 1);
-
- char availableControlModes = ANDROID_CONTROL_MODE_AUTO;
- staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_MODES,
- &availableControlModes, 1);
-
- /* JPEG static metadata. */
- std::vector<int32_t> availableThumbnailSizes = {
- 0, 0,
- };
- staticMetadata_->addEntry(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
- availableThumbnailSizes.data(),
- availableThumbnailSizes.size());
-
- /*
- * \todo Calculate the maximum JPEG buffer size by asking the encoder
- * giving the maximum frame size required.
- */
- staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, &maxJpegBufferSize_, 1);
-
- /* Sensor static metadata. */
- int32_t pixelArraySize[] = {
- 2592, 1944,
- };
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
- &pixelArraySize, 2);
-
- int32_t sensorSizes[] = {
- 0, 0, 2560, 1920,
- };
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
- &sensorSizes, 4);
-
- int32_t sensitivityRange[] = {
- 32, 2400,
- };
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
- &sensitivityRange, 2);
-
- uint16_t filterArr = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG;
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
- &filterArr, 1);
-
- int64_t exposureTimeRange[] = {
- 100000, 200000000,
- };
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
- &exposureTimeRange, 2);
-
- staticMetadata_->addEntry(ANDROID_SENSOR_ORIENTATION, &orientation_, 1);
-
- std::vector<int32_t> testPatterModes = {
- ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
- testPatterModes.data(),
- testPatterModes.size());
-
- std::vector<float> physicalSize = {
- 2592, 1944,
- };
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
- physicalSize.data(),
- physicalSize.size());
-
- uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
- &timestampSource, 1);
-
- /* Statistics static metadata. */
- uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
- staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
- &faceDetectMode, 1);
-
- int32_t maxFaceCount = 0;
- staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
- &maxFaceCount, 1);
-
- /* Sync static metadata. */
- int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
- staticMetadata_->addEntry(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
-
- /* Flash static metadata. */
- char flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
- staticMetadata_->addEntry(ANDROID_FLASH_INFO_AVAILABLE,
- &flashAvailable, 1);
-
- /* Lens static metadata. */
- std::vector<float> lensApertures = {
- 2.53 / 100,
- };
- staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
- lensApertures.data(),
- lensApertures.size());
-
- uint8_t lensFacing;
- switch (facing_) {
- default:
- case CAMERA_FACING_FRONT:
- lensFacing = ANDROID_LENS_FACING_FRONT;
- break;
- case CAMERA_FACING_BACK:
- lensFacing = ANDROID_LENS_FACING_BACK;
- break;
- case CAMERA_FACING_EXTERNAL:
- lensFacing = ANDROID_LENS_FACING_EXTERNAL;
- break;
- }
- staticMetadata_->addEntry(ANDROID_LENS_FACING, &lensFacing, 1);
-
- std::vector<float> lensFocalLenghts = {
- 1,
- };
- staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
- lensFocalLenghts.data(),
- lensFocalLenghts.size());
-
- std::vector<uint8_t> opticalStabilizations = {
- ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
- opticalStabilizations.data(),
- opticalStabilizations.size());
-
- float hypeFocalDistance = 0;
- staticMetadata_->addEntry(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
- &hypeFocalDistance, 1);
-
- float minFocusDistance = 0;
- staticMetadata_->addEntry(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
- &minFocusDistance, 1);
-
- /* Noise reduction modes. */
- uint8_t noiseReductionModes = ANDROID_NOISE_REDUCTION_MODE_OFF;
- staticMetadata_->addEntry(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
- &noiseReductionModes, 1);
-
- /* Scaler static metadata. */
- float maxDigitalZoom = 1;
- staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
- &maxDigitalZoom, 1);
-
- std::vector<uint32_t> availableStreamConfigurations;
- availableStreamConfigurations.reserve(streamConfigurations_.size() * 4);
- for (const auto &entry : streamConfigurations_) {
- availableStreamConfigurations.push_back(entry.androidFormat);
- availableStreamConfigurations.push_back(entry.resolution.width);
- availableStreamConfigurations.push_back(entry.resolution.height);
- availableStreamConfigurations.push_back(
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
- }
- staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
- availableStreamConfigurations.data(),
- availableStreamConfigurations.size());
-
- std::vector<int64_t> availableStallDurations = {
- ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920, 33333333,
- };
- staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
- availableStallDurations.data(),
- availableStallDurations.size());
-
- /* \todo Collect the minimum frame duration from the camera. */
- std::vector<int64_t> minFrameDurations;
- minFrameDurations.reserve(streamConfigurations_.size() * 4);
- for (const auto &entry : streamConfigurations_) {
- minFrameDurations.push_back(entry.androidFormat);
- minFrameDurations.push_back(entry.resolution.width);
- minFrameDurations.push_back(entry.resolution.height);
- minFrameDurations.push_back(33333333);
- }
- staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
- minFrameDurations.data(),
- minFrameDurations.size());
-
- uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
- staticMetadata_->addEntry(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
-
- /* Info static metadata. */
- uint8_t supportedHWLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
- staticMetadata_->addEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
- &supportedHWLevel, 1);
-
- /* Request static metadata. */
- int32_t partialResultCount = 1;
- staticMetadata_->addEntry(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
- &partialResultCount, 1);
-
- uint8_t maxPipelineDepth = 2;
- staticMetadata_->addEntry(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
- &maxPipelineDepth, 1);
-
- /* LIMITED does not support reprocessing. */
- uint32_t maxNumInputStreams = 0;
- staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
- &maxNumInputStreams, 1);
-
- std::vector<uint8_t> availableCapabilities = {
- ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
- };
+ {
+ MutexLocker stateLock(stateMutex_);
+ if (state_ != State::Running)
+ return;
- /* Report if camera supports RAW. */
- std::unique_ptr<CameraConfiguration> cameraConfig =
- camera_->generateConfiguration({ StreamRole::Raw });
- if (cameraConfig && !cameraConfig->empty()) {
- const PixelFormatInfo &info =
- PixelFormatInfo::info(cameraConfig->at(0).pixelFormat);
- if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW)
- availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
+ state_ = State::Flushing;
}
- staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
- availableCapabilities.data(),
- availableCapabilities.size());
-
- std::vector<int32_t> availableCharacteristicsKeys = {
- ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
- ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
- ANDROID_CONTROL_AE_AVAILABLE_MODES,
- ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
- ANDROID_CONTROL_AE_COMPENSATION_RANGE,
- ANDROID_CONTROL_AE_COMPENSATION_STEP,
- ANDROID_CONTROL_AF_AVAILABLE_MODES,
- ANDROID_CONTROL_AVAILABLE_EFFECTS,
- ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
- ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
- ANDROID_CONTROL_AWB_AVAILABLE_MODES,
- ANDROID_CONTROL_MAX_REGIONS,
- ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
- ANDROID_CONTROL_AE_LOCK_AVAILABLE,
- ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
- ANDROID_CONTROL_AVAILABLE_MODES,
- ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
- ANDROID_JPEG_MAX_SIZE,
- ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
- ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
- ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
- ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
- ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
- ANDROID_SENSOR_ORIENTATION,
- ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
- ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
- ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
- ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
- ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
- ANDROID_SYNC_MAX_LATENCY,
- ANDROID_FLASH_INFO_AVAILABLE,
- ANDROID_LENS_INFO_AVAILABLE_APERTURES,
- ANDROID_LENS_FACING,
- ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
- ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
- ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
- ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
- ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
- ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
- ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
- ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
- ANDROID_SCALER_CROPPING_TYPE,
- ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
- ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
- ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
- ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
- ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
- };
- staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
- availableCharacteristicsKeys.data(),
- availableCharacteristicsKeys.size());
-
- std::vector<int32_t> availableRequestKeys = {
- ANDROID_CONTROL_AE_MODE,
- ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
- ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
- ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
- ANDROID_CONTROL_AE_ANTIBANDING_MODE,
- ANDROID_CONTROL_AE_LOCK,
- ANDROID_CONTROL_AF_TRIGGER,
- ANDROID_CONTROL_AWB_MODE,
- ANDROID_CONTROL_AWB_LOCK,
- ANDROID_FLASH_MODE,
- ANDROID_STATISTICS_FACE_DETECT_MODE,
- ANDROID_NOISE_REDUCTION_MODE,
- ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
- ANDROID_LENS_APERTURE,
- ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
- ANDROID_CONTROL_MODE,
- ANDROID_CONTROL_CAPTURE_INTENT,
- };
- staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
- availableRequestKeys.data(),
- availableRequestKeys.size());
-
- std::vector<int32_t> availableResultKeys = {
- ANDROID_CONTROL_AE_STATE,
- ANDROID_CONTROL_AE_LOCK,
- ANDROID_CONTROL_AF_STATE,
- ANDROID_CONTROL_AWB_STATE,
- ANDROID_CONTROL_AWB_LOCK,
- ANDROID_LENS_STATE,
- ANDROID_SCALER_CROP_REGION,
- ANDROID_SENSOR_TIMESTAMP,
- ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
- ANDROID_SENSOR_EXPOSURE_TIME,
- ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
- ANDROID_STATISTICS_SCENE_FLICKER,
- ANDROID_JPEG_SIZE,
- ANDROID_JPEG_QUALITY,
- ANDROID_JPEG_ORIENTATION,
- };
- staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
- availableResultKeys.data(),
- availableResultKeys.size());
-
- if (!staticMetadata_->isValid()) {
- LOG(HAL, Error) << "Failed to construct static metadata";
- delete staticMetadata_;
- staticMetadata_ = nullptr;
- return nullptr;
- }
+ worker_.stop();
+ camera_->stop();
- return staticMetadata_->get();
+ MutexLocker stateLock(stateMutex_);
+ state_ = State::Stopped;
}
-CameraMetadata *CameraDevice::requestTemplatePreview()
+void CameraDevice::stop()
{
- /*
- * \todo Keep this in sync with the actual number of entries.
- * Currently: 20 entries, 35 bytes
- */
- CameraMetadata *requestTemplate = new CameraMetadata(20, 35);
- if (!requestTemplate->isValid()) {
- delete requestTemplate;
- return nullptr;
- }
-
- uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
- requestTemplate->addEntry(ANDROID_CONTROL_AE_MODE,
- &aeMode, 1);
-
- int32_t aeExposureCompensation = 0;
- requestTemplate->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
- &aeExposureCompensation, 1);
+ MutexLocker stateLock(stateMutex_);
+ if (state_ == State::Stopped)
+ return;
- uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
- requestTemplate->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
- &aePrecaptureTrigger, 1);
-
- uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
- requestTemplate->addEntry(ANDROID_CONTROL_AE_LOCK,
- &aeLock, 1);
-
- std::vector<int32_t> aeFpsTarget = {
- 15, 30,
- };
- requestTemplate->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
- aeFpsTarget.data(),
- aeFpsTarget.size());
-
- uint8_t aeAntibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
- requestTemplate->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
- &aeAntibandingMode, 1);
-
- uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
- requestTemplate->addEntry(ANDROID_CONTROL_AF_TRIGGER,
- &afTrigger, 1);
-
- uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
- requestTemplate->addEntry(ANDROID_CONTROL_AWB_MODE,
- &awbMode, 1);
-
- uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
- requestTemplate->addEntry(ANDROID_CONTROL_AWB_LOCK,
- &awbLock, 1);
-
- uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
- requestTemplate->addEntry(ANDROID_FLASH_MODE,
- &flashMode, 1);
-
- uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
- requestTemplate->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE,
- &faceDetectMode, 1);
-
- uint8_t noiseReduction = ANDROID_NOISE_REDUCTION_MODE_OFF;
- requestTemplate->addEntry(ANDROID_NOISE_REDUCTION_MODE,
- &noiseReduction, 1);
-
- uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
- requestTemplate->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
- &aberrationMode, 1);
-
- uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
- requestTemplate->addEntry(ANDROID_CONTROL_MODE, &controlMode, 1);
+ worker_.stop();
+ camera_->stop();
- float lensAperture = 2.53 / 100;
- requestTemplate->addEntry(ANDROID_LENS_APERTURE, &lensAperture, 1);
+ descriptors_.clear();
+ state_ = State::Stopped;
+}
- uint8_t opticalStabilization = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
- requestTemplate->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
- &opticalStabilization, 1);
+unsigned int CameraDevice::maxJpegBufferSize() const
+{
+ return capabilities_.maxJpegBufferSize();
+}
- uint8_t captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
- requestTemplate->addEntry(ANDROID_CONTROL_CAPTURE_INTENT,
- &captureIntent, 1);
+void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks)
+{
+ callbacks_ = callbacks;
+}
- return requestTemplate;
+const camera_metadata_t *CameraDevice::getStaticMetadata()
+{
+ return capabilities_.staticMetadata()->get();
}
/*
@@ -1109,57 +474,47 @@ const camera_metadata_t *CameraDevice::constructDefaultRequestSettings(int type)
return it->second->get();
/* Use the capture intent matching the requested template type. */
- CameraMetadata *requestTemplate;
+ std::unique_ptr<CameraMetadata> requestTemplate;
uint8_t captureIntent;
switch (type) {
case CAMERA3_TEMPLATE_PREVIEW:
captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+ requestTemplate = capabilities_.requestTemplatePreview();
break;
case CAMERA3_TEMPLATE_STILL_CAPTURE:
+ /*
+ * Use the preview template for still capture, they only differ
+ * for the torch mode we currently do not support.
+ */
captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
+ requestTemplate = capabilities_.requestTemplatePreview();
break;
case CAMERA3_TEMPLATE_VIDEO_RECORD:
captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
+ requestTemplate = capabilities_.requestTemplateVideo();
break;
case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
+ requestTemplate = capabilities_.requestTemplateVideo();
break;
+ /* \todo Implement templates generation for the remaining use cases. */
case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
- captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
- break;
case CAMERA3_TEMPLATE_MANUAL:
- captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
- break;
default:
- LOG(HAL, Error) << "Invalid template request type: " << type;
+ LOG(HAL, Error) << "Unsupported template request type: " << type;
return nullptr;
}
- requestTemplate = requestTemplatePreview();
if (!requestTemplate || !requestTemplate->isValid()) {
LOG(HAL, Error) << "Failed to construct request template";
- delete requestTemplate;
return nullptr;
}
requestTemplate->updateEntry(ANDROID_CONTROL_CAPTURE_INTENT,
- &captureIntent, 1);
-
- requestTemplates_[type] = requestTemplate;
- return requestTemplate->get();
-}
+ captureIntent);
-PixelFormat CameraDevice::toPixelFormat(int format)
-{
- /* Translate Android format code to libcamera pixel format. */
- auto it = formatsMap_.find(format);
- if (it == formatsMap_.end()) {
- LOG(HAL, Error) << "Requested format " << utils::hex(format)
- << " not supported";
- return PixelFormat();
- }
-
- return it->second;
+ requestTemplates_[type] = std::move(requestTemplate);
+ return requestTemplates_[type]->get();
}
/*
@@ -1168,12 +523,25 @@ PixelFormat CameraDevice::toPixelFormat(int format)
*/
int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
{
+ /* Before any configuration attempt, stop the camera. */
+ stop();
+
+ if (stream_list->num_streams == 0) {
+ LOG(HAL, Error) << "No streams in configuration";
+ return -EINVAL;
+ }
+
+#if defined(OS_CHROMEOS)
+ if (!validateCropRotate(*stream_list))
+ return -EINVAL;
+#endif
+
/*
* Generate an empty configuration, and construct a StreamConfiguration
* for each camera3_stream to add to it.
*/
- config_ = camera_->generateConfiguration();
- if (!config_) {
+ std::unique_ptr<CameraConfiguration> config = camera_->generateConfiguration();
+ if (!config) {
LOG(HAL, Error) << "Failed to generate camera configuration";
return -EINVAL;
}
@@ -1186,24 +554,44 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
streams_.clear();
streams_.reserve(stream_list->num_streams);
+ std::vector<Camera3StreamConfig> streamConfigs;
+ streamConfigs.reserve(stream_list->num_streams);
+
/* First handle all non-MJPEG streams. */
camera3_stream_t *jpegStream = nullptr;
for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
camera3_stream_t *stream = stream_list->streams[i];
Size size(stream->width, stream->height);
- PixelFormat format = toPixelFormat(stream->format);
+ PixelFormat format = capabilities_.toPixelFormat(stream->format);
LOG(HAL, Info) << "Stream #" << i
<< ", direction: " << stream->stream_type
<< ", width: " << stream->width
<< ", height: " << stream->height
<< ", format: " << utils::hex(stream->format)
+ << ", rotation: " << rotationToString(stream->rotation)
+#if defined(OS_CHROMEOS)
+ << ", crop_rotate_scale_degrees: "
+ << rotationToString(stream->crop_rotate_scale_degrees)
+#endif
<< " (" << format.toString() << ")";
if (!format.isValid())
return -EINVAL;
+ /* \todo Support rotation. */
+ if (stream->rotation != CAMERA3_STREAM_ROTATION_0) {
+ LOG(HAL, Error) << "Rotation is not supported";
+ return -EINVAL;
+ }
+#if defined(OS_CHROMEOS)
+ if (stream->crop_rotate_scale_degrees != CAMERA3_STREAM_ROTATION_0) {
+ LOG(HAL, Error) << "Rotation is not supported";
+ return -EINVAL;
+ }
+#endif
+
/* Defer handling of MJPEG streams until all others are known. */
if (stream->format == HAL_PIXEL_FORMAT_BLOB) {
if (jpegStream) {
@@ -1216,23 +604,25 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
continue;
}
- StreamConfiguration streamConfiguration;
- streamConfiguration.size = size;
- streamConfiguration.pixelFormat = format;
+ Camera3StreamConfig streamConfig;
+ streamConfig.streams = { { stream, CameraStream::Type::Direct } };
+ streamConfig.config.size = size;
+ streamConfig.config.pixelFormat = format;
+ streamConfigs.push_back(std::move(streamConfig));
- config_->addConfiguration(streamConfiguration);
- unsigned int index = config_->size() - 1;
- streams_.emplace_back(format, size, index);
- stream->priv = static_cast<void *>(&streams_.back());
+ /* This stream will be produced by hardware. */
+ stream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
}
/* Now handle the MJPEG streams, adding a new stream if required. */
if (jpegStream) {
+ CameraStream::Type type;
int index = -1;
/* Search for a compatible stream in the non-JPEG ones. */
- for (unsigned int i = 0; i < config_->size(); i++) {
- StreamConfiguration &cfg = config_->at(i);
+ for (size_t i = 0; i < streamConfigs.size(); ++i) {
+ Camera3StreamConfig &streamConfig = streamConfigs[i];
+ const auto &cfg = streamConfig.config;
/*
* \todo The PixelFormat must also be compatible with
@@ -1245,7 +635,15 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
LOG(HAL, Info)
<< "Android JPEG stream mapped to libcamera stream " << i;
+ type = CameraStream::Type::Mapped;
index = i;
+
+ /*
+ * The source stream will be read by software to
+ * produce the JPEG stream.
+ */
+ camera3_stream_t *stream = streamConfig.streams[0].stream;
+ stream->usage |= GRALLOC_USAGE_SW_READ_OFTEN;
break;
}
@@ -1254,80 +652,82 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
* introduce a new stream to satisfy the request requirements.
*/
if (index < 0) {
- StreamConfiguration streamConfiguration;
-
/*
* \todo The pixelFormat should be a 'best-fit' choice
* and may require a validation cycle. This is not yet
* handled, and should be considered as part of any
* stream configuration reworks.
*/
- streamConfiguration.size.width = jpegStream->width;
- streamConfiguration.size.height = jpegStream->height;
- streamConfiguration.pixelFormat = formats::NV12;
+ Camera3StreamConfig streamConfig;
+ streamConfig.config.size.width = jpegStream->width;
+ streamConfig.config.size.height = jpegStream->height;
+ streamConfig.config.pixelFormat = formats::NV12;
+ streamConfigs.push_back(std::move(streamConfig));
- LOG(HAL, Info) << "Adding " << streamConfiguration.toString()
+ LOG(HAL, Info) << "Adding " << streamConfig.config.toString()
<< " for MJPEG support";
- config_->addConfiguration(streamConfiguration);
- index = config_->size() - 1;
+ type = CameraStream::Type::Internal;
+ index = streamConfigs.size() - 1;
}
- StreamConfiguration &cfg = config_->at(index);
+ /* The JPEG stream will be produced by software. */
+ jpegStream->usage |= GRALLOC_USAGE_SW_WRITE_OFTEN;
- /*
- * Construct a software encoder for the MJPEG streams from the
- * chosen libcamera source stream.
- */
- Encoder *encoder = new EncoderLibJpeg();
- int ret = encoder->configure(cfg);
- if (ret) {
- LOG(HAL, Error) << "Failed to configure encoder";
- delete encoder;
- return ret;
- }
+ streamConfigs[index].streams.push_back({ jpegStream, type });
+ }
+
+ sortCamera3StreamConfigs(streamConfigs, jpegStream);
+ for (const auto &streamConfig : streamConfigs) {
+ config->addConfiguration(streamConfig.config);
- streams_.emplace_back(formats::MJPEG, cfg.size, index, encoder);
- jpegStream->priv = static_cast<void *>(&streams_.back());
+ for (auto &stream : streamConfig.streams) {
+ streams_.emplace_back(this, config.get(), stream.type,
+ stream.stream, config->size() - 1);
+ stream.stream->priv = static_cast<void *>(&streams_.back());
+ }
}
- switch (config_->validate()) {
+ switch (config->validate()) {
case CameraConfiguration::Valid:
break;
case CameraConfiguration::Adjusted:
LOG(HAL, Info) << "Camera configuration adjusted";
- for (const StreamConfiguration &cfg : *config_)
+ for (const StreamConfiguration &cfg : *config)
LOG(HAL, Info) << " - " << cfg.toString();
- config_.reset();
return -EINVAL;
case CameraConfiguration::Invalid:
LOG(HAL, Info) << "Camera configuration invalid";
- config_.reset();
return -EINVAL;
}
- for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
- camera3_stream_t *stream = stream_list->streams[i];
- CameraStream *cameraStream = static_cast<CameraStream *>(stream->priv);
- StreamConfiguration &cfg = config_->at(cameraStream->index());
-
- /* Use the bufferCount confirmed by the validation process. */
- stream->max_buffers = cfg.bufferCount;
- }
-
/*
* Once the CameraConfiguration has been adjusted/validated
* it can be applied to the camera.
*/
- int ret = camera_->configure(config_.get());
+ int ret = camera_->configure(config.get());
if (ret) {
LOG(HAL, Error) << "Failed to configure camera '"
<< camera_->id() << "'";
return ret;
}
+ /*
+ * Configure the HAL CameraStream instances using the associated
+ * StreamConfiguration and set the number of required buffers in
+ * the Android camera3_stream_t.
+ */
+ for (CameraStream &cameraStream : streams_) {
+ ret = cameraStream.configure();
+ if (ret) {
+ LOG(HAL, Error) << "Failed to configure camera stream";
+ return ret;
+ }
+ }
+
+ config_ = std::move(config);
return 0;
}
@@ -1361,85 +761,234 @@ FrameBuffer *CameraDevice::createFrameBuffer(const buffer_handle_t camera3buffer
return new FrameBuffer(std::move(planes));
}
-int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Request)
+int CameraDevice::processControls(Camera3RequestDescriptor *descriptor)
+{
+ const CameraMetadata &settings = descriptor->settings_;
+ if (!settings.isValid())
+ return 0;
+
+ /* Translate the Android request settings to libcamera controls. */
+ camera_metadata_ro_entry_t entry;
+ if (settings.getEntry(ANDROID_SCALER_CROP_REGION, &entry)) {
+ const int32_t *data = entry.data.i32;
+ Rectangle cropRegion{ data[0], data[1],
+ static_cast<unsigned int>(data[2]),
+ static_cast<unsigned int>(data[3]) };
+ ControlList &controls = descriptor->request_->controls();
+ controls.set(controls::ScalerCrop, cropRegion);
+ }
+
+ return 0;
+}
+
+void CameraDevice::abortRequest(camera3_capture_request_t *request)
+{
+ notifyError(request->frame_number, nullptr, CAMERA3_MSG_ERROR_REQUEST);
+
+ camera3_capture_result_t result = {};
+ result.num_output_buffers = request->num_output_buffers;
+ result.frame_number = request->frame_number;
+ result.partial_result = 0;
+
+ std::vector<camera3_stream_buffer_t> resultBuffers(result.num_output_buffers);
+ for (auto [i, buffer] : utils::enumerate(resultBuffers)) {
+ buffer = request->output_buffers[i];
+ buffer.release_fence = request->output_buffers[i].acquire_fence;
+ buffer.acquire_fence = -1;
+ buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ }
+ result.output_buffers = resultBuffers.data();
+
+ callbacks_->process_capture_result(callbacks_, &result);
+}
+
+bool CameraDevice::isValidRequest(camera3_capture_request_t *camera3Request) const
{
- if (!camera3Request->num_output_buffers) {
+ if (!camera3Request) {
+ LOG(HAL, Error) << "No capture request provided";
+ return false;
+ }
+
+ if (!camera3Request->num_output_buffers ||
+ !camera3Request->output_buffers) {
LOG(HAL, Error) << "No output buffers provided";
- return -EINVAL;
+ return false;
}
- /* Start the camera if that's the first request we handle. */
- if (!running_) {
- int ret = camera_->start();
- if (ret) {
- LOG(HAL, Error) << "Failed to start camera";
- return ret;
+ /* configureStreams() has not been called or has failed. */
+ if (streams_.empty() || !config_) {
+ LOG(HAL, Error) << "No stream is configured";
+ return false;
+ }
+
+ for (uint32_t i = 0; i < camera3Request->num_output_buffers; i++) {
+ const camera3_stream_buffer_t &outputBuffer =
+ camera3Request->output_buffers[i];
+ if (!outputBuffer.buffer || !(*outputBuffer.buffer)) {
+ LOG(HAL, Error) << "Invalid native handle";
+ return false;
+ }
+
+ const native_handle_t *handle = *outputBuffer.buffer;
+ constexpr int kNativeHandleMaxFds = 1024;
+ if (handle->numFds < 0 || handle->numFds > kNativeHandleMaxFds) {
+ LOG(HAL, Error)
+ << "Invalid number of fds (" << handle->numFds
+ << ") in buffer " << i;
+ return false;
+ }
+
+ constexpr int kNativeHandleMaxInts = 1024;
+ if (handle->numInts < 0 || handle->numInts > kNativeHandleMaxInts) {
+ LOG(HAL, Error)
+ << "Invalid number of ints (" << handle->numInts
+ << ") in buffer " << i;
+ return false;
}
- running_ = true;
+ const camera3_stream *camera3Stream = outputBuffer.stream;
+ if (!camera3Stream)
+ return false;
+
+ const CameraStream *cameraStream =
+ static_cast<CameraStream *>(camera3Stream->priv);
+
+ auto found = std::find_if(streams_.begin(), streams_.end(),
+ [cameraStream](const CameraStream &stream) {
+ return &stream == cameraStream;
+ });
+ if (found == streams_.end()) {
+ LOG(HAL, Error)
+ << "No corresponding configured stream found";
+ return false;
+ }
}
- /*
- * Queue a request for the Camera with the provided dmabuf file
- * descriptors.
- */
- const camera3_stream_buffer_t *camera3Buffers =
- camera3Request->output_buffers;
+ return true;
+}
+
+int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Request)
+{
+ if (!isValidRequest(camera3Request))
+ return -EINVAL;
/*
* Save the request descriptors for use at completion time.
* The descriptor and the associated memory reserved here are freed
* at request complete time.
*/
- Camera3RequestDescriptor *descriptor =
- new Camera3RequestDescriptor(camera3Request->frame_number,
- camera3Request->num_output_buffers);
+ Camera3RequestDescriptor descriptor(camera_.get(), camera3Request);
- Request *request =
- camera_->createRequest(reinterpret_cast<uint64_t>(descriptor));
-
- for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
- CameraStream *cameraStream =
- static_cast<CameraStream *>(camera3Buffers[i].stream->priv);
+ /*
+ * \todo The Android request model is incremental, settings passed in
+ * previous requests are to be effective until overridden explicitly in
+ * a new request. Do we need to cache settings incrementally here, or is
+ * it handled by the Android camera service ?
+ */
+ if (camera3Request->settings)
+ lastSettings_ = camera3Request->settings;
+ else
+ descriptor.settings_ = lastSettings_;
+
+ LOG(HAL, Debug) << "Queueing request " << descriptor.request_->cookie()
+ << " with " << descriptor.buffers_.size() << " streams";
+ for (unsigned int i = 0; i < descriptor.buffers_.size(); ++i) {
+ const camera3_stream_buffer_t &camera3Buffer = descriptor.buffers_[i];
+ camera3_stream *camera3Stream = camera3Buffer.stream;
+ CameraStream *cameraStream = static_cast<CameraStream *>(camera3Stream->priv);
+
+ std::stringstream ss;
+ ss << i << " - (" << camera3Stream->width << "x"
+ << camera3Stream->height << ")"
+ << "[" << utils::hex(camera3Stream->format) << "] -> "
+ << "(" << cameraStream->configuration().size.toString() << ")["
+ << cameraStream->configuration().pixelFormat.toString() << "]";
/*
- * Keep track of which stream the request belongs to and store
- * the native buffer handles.
+ * Inspect the camera stream type, create buffers opportunely
+ * and add them to the Request if required.
*/
- descriptor->buffers[i].stream = camera3Buffers[i].stream;
- descriptor->buffers[i].buffer = camera3Buffers[i].buffer;
-
- /* Software streams are handled after hardware streams complete. */
- if (cameraStream->format() == formats::MJPEG)
+ FrameBuffer *buffer = nullptr;
+ switch (cameraStream->type()) {
+ case CameraStream::Type::Mapped:
+ /*
+ * Mapped streams don't need buffers added to the
+ * Request.
+ */
+ LOG(HAL, Debug) << ss.str() << " (mapped)";
continue;
- /*
- * Create a libcamera buffer using the dmabuf descriptors of
- * the camera3Buffer for each stream. The FrameBuffer is
- * directly associated with the Camera3RequestDescriptor for
- * lifetime management only.
- */
- FrameBuffer *buffer = createFrameBuffer(*camera3Buffers[i].buffer);
+ case CameraStream::Type::Direct:
+ /*
+ * Create a libcamera buffer using the dmabuf
+ * descriptors of the camera3Buffer for each stream and
+ * associate it with the Camera3RequestDescriptor for
+ * lifetime management only.
+ */
+ buffer = createFrameBuffer(*camera3Buffer.buffer);
+ descriptor.frameBuffers_.emplace_back(buffer);
+ LOG(HAL, Debug) << ss.str() << " (direct)";
+ break;
+
+ case CameraStream::Type::Internal:
+ /*
+ * Get the frame buffer from the CameraStream internal
+ * buffer pool.
+ *
+ * The buffer has to be returned to the CameraStream
+ * once it has been processed.
+ */
+ buffer = cameraStream->getBuffer();
+ LOG(HAL, Debug) << ss.str() << " (internal)";
+ break;
+ }
+
if (!buffer) {
LOG(HAL, Error) << "Failed to create buffer";
- delete request;
- delete descriptor;
return -ENOMEM;
}
- descriptor->frameBuffers.emplace_back(buffer);
-
- StreamConfiguration *streamConfiguration = &config_->at(cameraStream->index());
- Stream *stream = streamConfiguration->stream();
- request->addBuffer(stream, buffer);
+ descriptor.request_->addBuffer(cameraStream->stream(), buffer,
+ camera3Buffer.acquire_fence);
}
- int ret = camera_->queueRequest(request);
- if (ret) {
- LOG(HAL, Error) << "Failed to queue request";
- delete request;
- delete descriptor;
+ /*
+ * Translate controls from Android to libcamera and queue the request
+ * to the CameraWorker thread.
+ */
+ int ret = processControls(&descriptor);
+ if (ret)
return ret;
+
+ /*
+ * If flush is in progress abort the request. If the camera has been
+ * stopped we have to re-start it to be able to process the request.
+ */
+ MutexLocker stateLock(stateMutex_);
+
+ if (state_ == State::Flushing) {
+ abortRequest(camera3Request);
+ return 0;
+ }
+
+ if (state_ == State::Stopped) {
+ worker_.start();
+
+ ret = camera_->start();
+ if (ret) {
+ LOG(HAL, Error) << "Failed to start camera";
+ worker_.stop();
+ return ret;
+ }
+
+ state_ = State::Running;
+ }
+
+ worker_.queueRequest(descriptor.request_.get());
+
+ {
+ MutexLocker descriptorsLock(descriptorsMutex_);
+ descriptors_[descriptor.request_->cookie()] = std::move(descriptor);
}
return 0;
@@ -1447,152 +996,127 @@ int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Reques
void CameraDevice::requestComplete(Request *request)
{
- const Request::BufferMap &buffers = request->buffers();
- camera3_buffer_status status = CAMERA3_BUFFER_STATUS_OK;
- std::unique_ptr<CameraMetadata> resultMetadata;
- Camera3RequestDescriptor *descriptor =
- reinterpret_cast<Camera3RequestDescriptor *>(request->cookie());
+ decltype(descriptors_)::node_type node;
+ {
+ MutexLocker descriptorsLock(descriptorsMutex_);
+ auto it = descriptors_.find(request->cookie());
+ if (it == descriptors_.end()) {
+ /*
+ * \todo Clarify if the Camera has to be closed on
+ * ERROR_DEVICE and possibly demote the Fatal to simple
+ * Error.
+ */
+ notifyError(0, nullptr, CAMERA3_MSG_ERROR_DEVICE);
+ LOG(HAL, Fatal)
+ << "Unknown request: " << request->cookie();
+
+ return;
+ }
+
+ node = descriptors_.extract(it);
+ }
+ Camera3RequestDescriptor &descriptor = node.mapped();
+
+ /*
+ * Prepare the capture result for the Android camera stack.
+ *
+ * The buffer status is set to OK and later changed to ERROR if
+ * post-processing/compression fails.
+ */
+ camera3_capture_result_t captureResult = {};
+ captureResult.frame_number = descriptor.frameNumber_;
+ captureResult.num_output_buffers = descriptor.buffers_.size();
+ for (camera3_stream_buffer_t &buffer : descriptor.buffers_) {
+ buffer.acquire_fence = -1;
+ buffer.release_fence = -1;
+ buffer.status = CAMERA3_BUFFER_STATUS_OK;
+ }
+ captureResult.output_buffers = descriptor.buffers_.data();
+ captureResult.partial_result = 1;
+ /*
+ * If the Request has failed, abort the request by notifying the error
+ * and complete the request with all buffers in error state.
+ */
if (request->status() != Request::RequestComplete) {
- LOG(HAL, Error) << "Request not successfully completed: "
+ LOG(HAL, Error) << "Request " << request->cookie()
+ << " not successfully completed: "
<< request->status();
- status = CAMERA3_BUFFER_STATUS_ERROR;
+
+ notifyError(descriptor.frameNumber_, nullptr,
+ CAMERA3_MSG_ERROR_REQUEST);
+
+ captureResult.partial_result = 0;
+ for (camera3_stream_buffer_t &buffer : descriptor.buffers_)
+ buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ callbacks_->process_capture_result(callbacks_, &captureResult);
+
+ return;
}
/*
- * \todo The timestamp used for the metadata is currently always taken
- * from the first buffer (which may be the first stream) in the Request.
- * It might be appropriate to return a 'correct' (as determined by
- * pipeline handlers) timestamp in the Request itself.
+ * Notify shutter as soon as we have verified we have a valid request.
+ *
+ * \todo The shutter event notification should be sent to the framework
+ * as soon as possible, earlier than request completion time.
+ */
+ uint64_t sensorTimestamp = static_cast<uint64_t>(request->metadata()
+ .get(controls::SensorTimestamp));
+ notifyShutter(descriptor.frameNumber_, sensorTimestamp);
+
+ LOG(HAL, Debug) << "Request " << request->cookie() << " completed with "
+ << descriptor.buffers_.size() << " streams";
+
+ /*
+ * Generate the metadata associated with the captured buffers.
+ *
+ * Notify if the metadata generation has failed, but continue processing
+ * buffers and return an empty metadata pack.
*/
- FrameBuffer *buffer = buffers.begin()->second;
- resultMetadata = getResultMetadata(descriptor->frameNumber,
- buffer->metadata().timestamp);
+ std::unique_ptr<CameraMetadata> resultMetadata = getResultMetadata(descriptor);
+ if (!resultMetadata) {
+ notifyError(descriptor.frameNumber_, nullptr, CAMERA3_MSG_ERROR_RESULT);
+
+ /* The camera framework expects an empy metadata pack on error. */
+ resultMetadata = std::make_unique<CameraMetadata>(0, 0);
+ }
/* Handle any JPEG compression. */
- for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
+ for (camera3_stream_buffer_t &buffer : descriptor.buffers_) {
CameraStream *cameraStream =
- static_cast<CameraStream *>(descriptor->buffers[i].stream->priv);
-
- if (cameraStream->format() != formats::MJPEG)
- continue;
+ static_cast<CameraStream *>(buffer.stream->priv);
- Encoder *encoder = cameraStream->encoder();
- if (!encoder) {
- LOG(HAL, Error) << "Failed to identify encoder";
+ if (cameraStream->camera3Stream().format != HAL_PIXEL_FORMAT_BLOB)
continue;
- }
- StreamConfiguration *streamConfiguration = &config_->at(cameraStream->index());
- Stream *stream = streamConfiguration->stream();
- FrameBuffer *buffer = request->findBuffer(stream);
- if (!buffer) {
+ FrameBuffer *src = request->findBuffer(cameraStream->stream());
+ if (!src) {
LOG(HAL, Error) << "Failed to find a source stream buffer";
+ buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ notifyError(descriptor.frameNumber_, buffer.stream,
+ CAMERA3_MSG_ERROR_BUFFER);
continue;
}
+ int ret = cameraStream->process(*src, *buffer.buffer,
+ descriptor.settings_,
+ resultMetadata.get());
/*
- * \todo Buffer mapping and compression should be moved to a
- * separate thread.
+ * Return the FrameBuffer to the CameraStream now that we're
+ * done processing it.
*/
+ if (cameraStream->type() == CameraStream::Type::Internal)
+ cameraStream->putBuffer(src);
- MappedCamera3Buffer mapped(*descriptor->buffers[i].buffer,
- PROT_READ | PROT_WRITE);
- if (!mapped.isValid()) {
- LOG(HAL, Error) << "Failed to mmap android blob buffer";
- continue;
- }
-
- /* Set EXIF metadata for various tags. */
- Exif exif;
- /* \todo Set Make and Model from external vendor tags. */
- exif.setMake("libcamera");
- exif.setModel("cameraModel");
- exif.setOrientation(orientation_);
- exif.setSize(cameraStream->size());
- /*
- * We set the frame's EXIF timestamp as the time of encode.
- * Since the precision we need for EXIF timestamp is only one
- * second, it is good enough.
- */
- exif.setTimestamp(std::time(nullptr));
- if (exif.generate() != 0)
- LOG(HAL, Error) << "Failed to generate valid EXIF data";
-
- int jpeg_size = encoder->encode(buffer, mapped.maps()[0], exif.data());
- if (jpeg_size < 0) {
- LOG(HAL, Error) << "Failed to encode stream image";
- status = CAMERA3_BUFFER_STATUS_ERROR;
- continue;
+ if (ret) {
+ buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ notifyError(descriptor.frameNumber_, buffer.stream,
+ CAMERA3_MSG_ERROR_BUFFER);
}
-
- /*
- * Fill in the JPEG blob header.
- *
- * The mapped size of the buffer is being returned as
- * substantially larger than the requested JPEG_MAX_SIZE
- * (which is referenced from maxJpegBufferSize_). Utilise
- * this static size to ensure the correct offset of the blob is
- * determined.
- *
- * \todo Investigate if the buffer size mismatch is an issue or
- * expected behaviour.
- */
- uint8_t *resultPtr = mapped.maps()[0].data() +
- maxJpegBufferSize_ -
- sizeof(struct camera3_jpeg_blob);
- auto *blob = reinterpret_cast<struct camera3_jpeg_blob *>(resultPtr);
- blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
- blob->jpeg_size = jpeg_size;
-
- /* Update the JPEG result Metadata. */
- resultMetadata->addEntry(ANDROID_JPEG_SIZE,
- &jpeg_size, 1);
-
- const uint32_t jpeg_quality = 95;
- resultMetadata->addEntry(ANDROID_JPEG_QUALITY,
- &jpeg_quality, 1);
-
- const uint32_t jpeg_orientation = 0;
- resultMetadata->addEntry(ANDROID_JPEG_ORIENTATION,
- &jpeg_orientation, 1);
- }
-
- /* Prepare to call back the Android camera stack. */
- camera3_capture_result_t captureResult = {};
- captureResult.frame_number = descriptor->frameNumber;
- captureResult.num_output_buffers = descriptor->numBuffers;
- for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
- descriptor->buffers[i].acquire_fence = -1;
- descriptor->buffers[i].release_fence = -1;
- descriptor->buffers[i].status = status;
- }
- captureResult.output_buffers =
- const_cast<const camera3_stream_buffer_t *>(descriptor->buffers);
-
-
- if (status == CAMERA3_BUFFER_STATUS_OK) {
- notifyShutter(descriptor->frameNumber,
- buffer->metadata().timestamp);
-
- captureResult.partial_result = 1;
- captureResult.result = resultMetadata->get();
- }
-
- if (status == CAMERA3_BUFFER_STATUS_ERROR || !captureResult.result) {
- /* \todo Improve error handling. In case we notify an error
- * because the metadata generation fails, a shutter event has
- * already been notified for this frame number before the error
- * is here signalled. Make sure the error path plays well with
- * the camera stack state machine.
- */
- notifyError(descriptor->frameNumber,
- descriptor->buffers[0].stream);
}
+ captureResult.result = resultMetadata->get();
callbacks_->process_capture_result(callbacks_, &captureResult);
-
- delete descriptor;
}
std::string CameraDevice::logPrefix() const
@@ -1611,21 +1135,15 @@ void CameraDevice::notifyShutter(uint32_t frameNumber, uint64_t timestamp)
callbacks_->notify(callbacks_, &notify);
}
-void CameraDevice::notifyError(uint32_t frameNumber, camera3_stream_t *stream)
+void CameraDevice::notifyError(uint32_t frameNumber, camera3_stream_t *stream,
+ camera3_error_msg_code code)
{
camera3_notify_msg_t notify = {};
- /*
- * \todo Report and identify the stream number or configuration to
- * clarify the stream that failed.
- */
- LOG(HAL, Error) << "Error occurred on frame " << frameNumber << " ("
- << toPixelFormat(stream->format).toString() << ")";
-
notify.type = CAMERA3_MSG_ERROR;
notify.message.error.error_stream = stream;
notify.message.error.frame_number = frameNumber;
- notify.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
+ notify.message.error.error_code = code;
callbacks_->notify(callbacks_, &notify);
}
@@ -1634,63 +1152,182 @@ void CameraDevice::notifyError(uint32_t frameNumber, camera3_stream_t *stream)
* Produce a set of fixed result metadata.
*/
std::unique_ptr<CameraMetadata>
-CameraDevice::getResultMetadata([[maybe_unused]] int frame_number,
- int64_t timestamp)
+CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) const
{
+ const ControlList &metadata = descriptor.request_->metadata();
+ const CameraMetadata &settings = descriptor.settings_;
+ camera_metadata_ro_entry_t entry;
+ bool found;
+
/*
* \todo Keep this in sync with the actual number of entries.
- * Currently: 18 entries, 62 bytes
+ * Currently: 40 entries, 156 bytes
+ *
+ * Reserve more space for the JPEG metadata set by the post-processor.
+ * Currently:
+ * ANDROID_JPEG_GPS_COORDINATES (double x 3) = 24 bytes
+ * ANDROID_JPEG_GPS_PROCESSING_METHOD (byte x 32) = 32 bytes
+ * ANDROID_JPEG_GPS_TIMESTAMP (int64) = 8 bytes
+ * ANDROID_JPEG_SIZE (int32_t) = 4 bytes
+ * ANDROID_JPEG_QUALITY (byte) = 1 byte
+ * ANDROID_JPEG_ORIENTATION (int32_t) = 4 bytes
+ * ANDROID_JPEG_THUMBNAIL_QUALITY (byte) = 1 byte
+ * ANDROID_JPEG_THUMBNAIL_SIZE (int32 x 2) = 8 bytes
+ * Total bytes for JPEG metadata: 82
*/
std::unique_ptr<CameraMetadata> resultMetadata =
- std::make_unique<CameraMetadata>(18, 62);
+ std::make_unique<CameraMetadata>(44, 166);
if (!resultMetadata->isValid()) {
- LOG(HAL, Error) << "Failed to allocate static metadata";
+ LOG(HAL, Error) << "Failed to allocate result metadata";
return nullptr;
}
- const uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
- resultMetadata->addEntry(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
+ /*
+ * \todo The value of the results metadata copied from the settings
+ * will have to be passed to the libcamera::Camera and extracted
+ * from libcamera::Request::metadata.
+ */
+
+ uint8_t value = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+ value);
+
+ value = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE, value);
+
+ int32_t value32 = 0;
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ value32);
- const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF;
- resultMetadata->addEntry(ANDROID_CONTROL_AE_LOCK, &ae_lock, 1);
+ value = ANDROID_CONTROL_AE_LOCK_OFF;
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_LOCK, value);
- uint8_t af_state = ANDROID_CONTROL_AF_STATE_INACTIVE;
- resultMetadata->addEntry(ANDROID_CONTROL_AF_STATE, &af_state, 1);
+ value = ANDROID_CONTROL_AE_MODE_ON;
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_MODE, value);
- const uint8_t awb_state = ANDROID_CONTROL_AWB_STATE_CONVERGED;
- resultMetadata->addEntry(ANDROID_CONTROL_AWB_STATE, &awb_state, 1);
+ if (settings.getEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, &entry))
+ /*
+ * \todo Retrieve the AE FPS range from the libcamera metadata.
+ * As libcamera does not support that control, as a temporary
+ * workaround return what the framework asked.
+ */
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ entry.data.i32, 2);
- const uint8_t awb_lock = ANDROID_CONTROL_AWB_LOCK_OFF;
- resultMetadata->addEntry(ANDROID_CONTROL_AWB_LOCK, &awb_lock, 1);
+ found = settings.getEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &entry);
+ value = found ? *entry.data.u8 :
+ (uint8_t)ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, value);
- const uint8_t lens_state = ANDROID_LENS_STATE_STATIONARY;
- resultMetadata->addEntry(ANDROID_LENS_STATE, &lens_state, 1);
+ value = ANDROID_CONTROL_AE_STATE_CONVERGED;
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_STATE, value);
- int32_t sensorSizes[] = {
- 0, 0, 2560, 1920,
- };
- resultMetadata->addEntry(ANDROID_SCALER_CROP_REGION, sensorSizes, 4);
+ value = ANDROID_CONTROL_AF_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_CONTROL_AF_MODE, value);
+
+ value = ANDROID_CONTROL_AF_STATE_INACTIVE;
+ resultMetadata->addEntry(ANDROID_CONTROL_AF_STATE, value);
- resultMetadata->addEntry(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
+ value = ANDROID_CONTROL_AF_TRIGGER_IDLE;
+ resultMetadata->addEntry(ANDROID_CONTROL_AF_TRIGGER, value);
+
+ value = ANDROID_CONTROL_AWB_MODE_AUTO;
+ resultMetadata->addEntry(ANDROID_CONTROL_AWB_MODE, value);
+
+ value = ANDROID_CONTROL_AWB_LOCK_OFF;
+ resultMetadata->addEntry(ANDROID_CONTROL_AWB_LOCK, value);
+
+ value = ANDROID_CONTROL_AWB_STATE_CONVERGED;
+ resultMetadata->addEntry(ANDROID_CONTROL_AWB_STATE, value);
+
+ value = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+ resultMetadata->addEntry(ANDROID_CONTROL_CAPTURE_INTENT, value);
+
+ value = ANDROID_CONTROL_EFFECT_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_CONTROL_EFFECT_MODE, value);
+
+ value = ANDROID_CONTROL_MODE_AUTO;
+ resultMetadata->addEntry(ANDROID_CONTROL_MODE, value);
+
+ value = ANDROID_CONTROL_SCENE_MODE_DISABLED;
+ resultMetadata->addEntry(ANDROID_CONTROL_SCENE_MODE, value);
+
+ value = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, value);
+
+ value = ANDROID_FLASH_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_FLASH_MODE, value);
+
+ value = ANDROID_FLASH_STATE_UNAVAILABLE;
+ resultMetadata->addEntry(ANDROID_FLASH_STATE, value);
+
+ if (settings.getEntry(ANDROID_LENS_APERTURE, &entry))
+ resultMetadata->addEntry(ANDROID_LENS_APERTURE, entry.data.f, 1);
+
+ float focal_length = 1.0;
+ resultMetadata->addEntry(ANDROID_LENS_FOCAL_LENGTH, focal_length);
+
+ value = ANDROID_LENS_STATE_STATIONARY;
+ resultMetadata->addEntry(ANDROID_LENS_STATE, value);
+
+ value = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+ value);
+
+ value32 = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_SENSOR_TEST_PATTERN_MODE, value32);
+
+ value = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE, value);
+
+ value = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
+ value);
+
+ value = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, value);
+
+ value = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
+ resultMetadata->addEntry(ANDROID_STATISTICS_SCENE_FLICKER, value);
+
+ value = ANDROID_NOISE_REDUCTION_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_NOISE_REDUCTION_MODE, value);
/* 33.3 msec */
const int64_t rolling_shutter_skew = 33300000;
resultMetadata->addEntry(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
- &rolling_shutter_skew, 1);
+ rolling_shutter_skew);
- /* 16.6 msec */
- const int64_t exposure_time = 16600000;
- resultMetadata->addEntry(ANDROID_SENSOR_EXPOSURE_TIME,
- &exposure_time, 1);
+ /* Add metadata tags reported by libcamera. */
+ const int64_t timestamp = metadata.get(controls::SensorTimestamp);
+ resultMetadata->addEntry(ANDROID_SENSOR_TIMESTAMP, timestamp);
- const uint8_t lens_shading_map_mode =
- ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
- resultMetadata->addEntry(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
- &lens_shading_map_mode, 1);
+ if (metadata.contains(controls::draft::PipelineDepth)) {
+ uint8_t pipeline_depth =
+ metadata.get<int32_t>(controls::draft::PipelineDepth);
+ resultMetadata->addEntry(ANDROID_REQUEST_PIPELINE_DEPTH,
+ pipeline_depth);
+ }
- const uint8_t scene_flicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
- resultMetadata->addEntry(ANDROID_STATISTICS_SCENE_FLICKER,
- &scene_flicker, 1);
+ if (metadata.contains(controls::ExposureTime)) {
+ int64_t exposure = metadata.get(controls::ExposureTime) * 1000ULL;
+ resultMetadata->addEntry(ANDROID_SENSOR_EXPOSURE_TIME, exposure);
+ }
+
+ if (metadata.contains(controls::FrameDuration)) {
+ int64_t duration = metadata.get(controls::FrameDuration) * 1000;
+ resultMetadata->addEntry(ANDROID_SENSOR_FRAME_DURATION,
+ duration);
+ }
+
+ if (metadata.contains(controls::ScalerCrop)) {
+ Rectangle crop = metadata.get(controls::ScalerCrop);
+ int32_t cropRect[] = {
+ crop.x, crop.y, static_cast<int32_t>(crop.width),
+ static_cast<int32_t>(crop.height),
+ };
+ resultMetadata->addEntry(ANDROID_SCALER_CROP_REGION, cropRect);
+ }
/*
* Return the result metadata pack even is not valid: get() will return
@@ -1700,5 +1337,12 @@ CameraDevice::getResultMetadata([[maybe_unused]] int frame_number,
LOG(HAL, Error) << "Failed to construct result metadata";
}
+ if (resultMetadata->resized()) {
+ auto [entryCount, dataCount] = resultMetadata->usage();
+ LOG(HAL, Info)
+ << "Result metadata resized: " << entryCount
+ << " entries and " << dataCount << " bytes used";
+ }
+
return resultMetadata;
}
diff --git a/src/android/camera_device.h b/src/android/camera_device.h
index 1837748d..089a6204 100644
--- a/src/android/camera_device.h
+++ b/src/android/camera_device.h
@@ -9,65 +9,52 @@
#include <map>
#include <memory>
-#include <tuple>
+#include <mutex>
#include <vector>
#include <hardware/camera3.h>
-#include <libcamera/buffer.h>
+#include <libcamera/base/class.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/message.h>
+#include <libcamera/base/thread.h>
+
#include <libcamera/camera.h>
-#include <libcamera/geometry.h>
+#include <libcamera/framebuffer.h>
#include <libcamera/request.h>
#include <libcamera/stream.h>
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/message.h"
+#include "libcamera/internal/framebuffer.h"
+#include "camera_capabilities.h"
+#include "camera_metadata.h"
+#include "camera_stream.h"
+#include "camera_worker.h"
#include "jpeg/encoder.h"
-class CameraMetadata;
-
-class CameraStream
-{
-public:
- CameraStream(libcamera::PixelFormat format, libcamera::Size size,
- unsigned int index, Encoder *encoder = nullptr);
-
- const libcamera::PixelFormat &format() const { return format_; }
- const libcamera::Size &size() const { return size_; }
- unsigned int index() const { return index_; }
- Encoder *encoder() const { return encoder_.get(); }
-
-private:
- libcamera::PixelFormat format_;
- libcamera::Size size_;
- /*
- * The index of the libcamera StreamConfiguration as added during
- * configureStreams(). A single libcamera Stream may be used to deliver
- * one or more streams to the Android framework.
- */
- unsigned int index_;
- std::unique_ptr<Encoder> encoder_;
-};
-
+struct CameraConfigData;
class CameraDevice : protected libcamera::Loggable
{
public:
- static std::shared_ptr<CameraDevice> create(unsigned int id,
- const std::shared_ptr<libcamera::Camera> &cam);
+ static std::unique_ptr<CameraDevice> create(unsigned int id,
+ std::shared_ptr<libcamera::Camera> cam);
~CameraDevice();
- int initialize();
+ int initialize(const CameraConfigData *cameraConfigData);
int open(const hw_module_t *hardwareModule);
void close();
+ void flush();
unsigned int id() const { return id_; }
camera3_device_t *camera3Device() { return &camera3Device_; }
- const libcamera::Camera *camera() const { return camera_.get(); }
+ const std::shared_ptr<libcamera::Camera> &camera() const { return camera_; }
+ const std::string &maker() const { return maker_; }
+ const std::string &model() const { return model_; }
int facing() const { return facing_; }
int orientation() const { return orientation_; }
+ unsigned int maxJpegBufferSize() const;
void setCallbacks(const camera3_callback_ops_t *callbacks);
const camera_metadata_t *getStaticMetadata();
@@ -80,60 +67,69 @@ protected:
std::string logPrefix() const override;
private:
- CameraDevice(unsigned int id, const std::shared_ptr<libcamera::Camera> &camera);
+ LIBCAMERA_DISABLE_COPY_AND_MOVE(CameraDevice)
+
+ CameraDevice(unsigned int id, std::shared_ptr<libcamera::Camera> camera);
struct Camera3RequestDescriptor {
- Camera3RequestDescriptor(unsigned int frameNumber,
- unsigned int numBuffers);
- ~Camera3RequestDescriptor();
-
- uint32_t frameNumber;
- uint32_t numBuffers;
- camera3_stream_buffer_t *buffers;
- std::vector<std::unique_ptr<libcamera::FrameBuffer>> frameBuffers;
+ Camera3RequestDescriptor() = default;
+ ~Camera3RequestDescriptor() = default;
+ Camera3RequestDescriptor(libcamera::Camera *camera,
+ const camera3_capture_request_t *camera3Request);
+ Camera3RequestDescriptor &operator=(Camera3RequestDescriptor &&) = default;
+
+ uint32_t frameNumber_ = 0;
+ std::vector<camera3_stream_buffer_t> buffers_;
+ std::vector<std::unique_ptr<libcamera::FrameBuffer>> frameBuffers_;
+ CameraMetadata settings_;
+ std::unique_ptr<CaptureRequest> request_;
};
- struct Camera3StreamConfiguration {
- libcamera::Size resolution;
- int androidFormat;
+ enum class State {
+ Stopped,
+ Flushing,
+ Running,
};
- int initializeStreamConfigurations();
- std::vector<libcamera::Size>
- getYUVResolutions(libcamera::CameraConfiguration *cameraConfig,
- const libcamera::PixelFormat &pixelFormat,
- const std::vector<libcamera::Size> &resolutions);
- std::vector<libcamera::Size>
- getRawResolutions(const libcamera::PixelFormat &pixelFormat);
+ void stop();
- std::tuple<uint32_t, uint32_t> calculateStaticMetadataSize();
libcamera::FrameBuffer *createFrameBuffer(const buffer_handle_t camera3buffer);
+ void abortRequest(camera3_capture_request_t *request);
+ bool isValidRequest(camera3_capture_request_t *request) const;
void notifyShutter(uint32_t frameNumber, uint64_t timestamp);
- void notifyError(uint32_t frameNumber, camera3_stream_t *stream);
- CameraMetadata *requestTemplatePreview();
- libcamera::PixelFormat toPixelFormat(int format);
- std::unique_ptr<CameraMetadata> getResultMetadata(int frame_number,
- int64_t timestamp);
+ void notifyError(uint32_t frameNumber, camera3_stream_t *stream,
+ camera3_error_msg_code code);
+ int processControls(Camera3RequestDescriptor *descriptor);
+ std::unique_ptr<CameraMetadata> getResultMetadata(
+ const Camera3RequestDescriptor &descriptor) const;
unsigned int id_;
camera3_device_t camera3Device_;
- bool running_;
+ CameraWorker worker_;
+
+ libcamera::Mutex stateMutex_; /* Protects access to the camera state. */
+ State state_;
+
std::shared_ptr<libcamera::Camera> camera_;
std::unique_ptr<libcamera::CameraConfiguration> config_;
+ CameraCapabilities capabilities_;
- CameraMetadata *staticMetadata_;
- std::map<unsigned int, const CameraMetadata *> requestTemplates_;
+ std::map<unsigned int, std::unique_ptr<CameraMetadata>> requestTemplates_;
const camera3_callback_ops_t *callbacks_;
- std::vector<Camera3StreamConfiguration> streamConfigurations_;
- std::map<int, libcamera::PixelFormat> formatsMap_;
std::vector<CameraStream> streams_;
+ libcamera::Mutex descriptorsMutex_; /* Protects descriptors_. */
+ std::map<uint64_t, Camera3RequestDescriptor> descriptors_;
+
+ std::string maker_;
+ std::string model_;
+
int facing_;
int orientation_;
- unsigned int maxJpegBufferSize_;
+ CameraMetadata lastSettings_;
};
#endif /* __ANDROID_CAMERA_DEVICE_H__ */
diff --git a/src/android/camera_hal_config.cpp b/src/android/camera_hal_config.cpp
new file mode 100644
index 00000000..833cf4ba
--- /dev/null
+++ b/src/android/camera_hal_config.cpp
@@ -0,0 +1,407 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera_hal_config.cpp - Camera HAL configuration file manager
+ */
+#include "camera_hal_config.h"
+
+#if defined(_GLIBCXX_RELEASE) && _GLIBCXX_RELEASE < 8
+#include <experimental/filesystem>
+namespace std {
+namespace filesystem = std::experimental::filesystem;
+}
+#else
+#include <filesystem>
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#include <string>
+#include <yaml.h>
+
+#include <hardware/camera3.h>
+
+#include <libcamera/base/log.h>
+
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(HALConfig)
+
+class CameraHalConfig::Private : public Extensible::Private
+{
+ LIBCAMERA_DECLARE_PUBLIC(CameraHalConfig)
+
+public:
+ Private(CameraHalConfig *halConfig);
+
+ int parseConfigFile(FILE *fh, std::map<std::string, CameraConfigData> *cameras);
+
+private:
+ std::string parseValue();
+ std::string parseKey();
+ int parseValueBlock();
+ int parseCameraLocation(CameraConfigData *cameraConfigData,
+ const std::string &location);
+ int parseCameraConfigData(const std::string &cameraId);
+ int parseCameras();
+ int parseEntry();
+
+ yaml_parser_t parser_;
+ std::map<std::string, CameraConfigData> *cameras_;
+};
+
+CameraHalConfig::Private::Private(CameraHalConfig *halConfig)
+ : Extensible::Private(halConfig)
+{
+}
+
+std::string CameraHalConfig::Private::parseValue()
+{
+ yaml_token_t token;
+
+ /* Make sure the token type is a value and get its content. */
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_VALUE_TOKEN) {
+ yaml_token_delete(&token);
+ return "";
+ }
+ yaml_token_delete(&token);
+
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_SCALAR_TOKEN) {
+ yaml_token_delete(&token);
+ return "";
+ }
+
+ std::string value(reinterpret_cast<char *>(token.data.scalar.value),
+ token.data.scalar.length);
+ yaml_token_delete(&token);
+
+ return value;
+}
+
+std::string CameraHalConfig::Private::parseKey()
+{
+ yaml_token_t token;
+
+ /* Make sure the token type is a key and get its value. */
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_SCALAR_TOKEN) {
+ yaml_token_delete(&token);
+ return "";
+ }
+
+ std::string value(reinterpret_cast<char *>(token.data.scalar.value),
+ token.data.scalar.length);
+ yaml_token_delete(&token);
+
+ return value;
+}
+
+int CameraHalConfig::Private::parseValueBlock()
+{
+ yaml_token_t token;
+
+ /* Make sure the next token are VALUE and BLOCK_MAPPING_START. */
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_VALUE_TOKEN) {
+ yaml_token_delete(&token);
+ return -EINVAL;
+ }
+ yaml_token_delete(&token);
+
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_BLOCK_MAPPING_START_TOKEN) {
+ yaml_token_delete(&token);
+ return -EINVAL;
+ }
+ yaml_token_delete(&token);
+
+ return 0;
+}
+
+int CameraHalConfig::Private::parseCameraLocation(CameraConfigData *cameraConfigData,
+ const std::string &location)
+{
+ if (location == "front")
+ cameraConfigData->facing = CAMERA_FACING_FRONT;
+ else if (location == "back")
+ cameraConfigData->facing = CAMERA_FACING_BACK;
+ else if (location == "external")
+ cameraConfigData->facing = CAMERA_FACING_EXTERNAL;
+ else
+ return -EINVAL;
+
+ return 0;
+}
+
+int CameraHalConfig::Private::parseCameraConfigData(const std::string &cameraId)
+{
+ int ret = parseValueBlock();
+ if (ret)
+ return ret;
+
+ /*
+ * Parse the camera properties and store them in a cameraConfigData
+ * instance.
+ *
+ * Add a safety counter to make sure we don't loop indefinitely in case
+ * the configuration file is malformed.
+ */
+ CameraConfigData cameraConfigData;
+ unsigned int sentinel = 100;
+ bool blockEnd = false;
+ yaml_token_t token;
+
+ do {
+ yaml_parser_scan(&parser_, &token);
+ switch (token.type) {
+ case YAML_KEY_TOKEN: {
+ yaml_token_delete(&token);
+
+ /*
+ * Parse the camera property key and make sure it is
+ * valid.
+ */
+ std::string key = parseKey();
+ std::string value = parseValue();
+ if (key.empty() || value.empty())
+ return -EINVAL;
+
+ if (key == "location") {
+ ret = parseCameraLocation(&cameraConfigData, value);
+ if (ret) {
+ LOG(HALConfig, Error)
+ << "Unknown location: " << value;
+ return -EINVAL;
+ }
+ } else if (key == "rotation") {
+ ret = std::stoi(value);
+ if (ret < 0 || ret >= 360) {
+ LOG(HALConfig, Error)
+ << "Unknown rotation: " << value;
+ return -EINVAL;
+ }
+ cameraConfigData.rotation = ret;
+ } else {
+ LOG(HALConfig, Error)
+ << "Unknown key: " << key;
+ return -EINVAL;
+ }
+ break;
+ }
+
+ case YAML_BLOCK_END_TOKEN:
+ blockEnd = true;
+ [[fallthrough]];
+ default:
+ yaml_token_delete(&token);
+ break;
+ }
+
+ --sentinel;
+ } while (!blockEnd && sentinel);
+ if (!sentinel)
+ return -EINVAL;
+
+ (*cameras_)[cameraId] = cameraConfigData;
+
+ return 0;
+}
+
+int CameraHalConfig::Private::parseCameras()
+{
+ int ret = parseValueBlock();
+ if (ret) {
+ LOG(HALConfig, Error) << "Configuration file is not valid";
+ return ret;
+ }
+
+ /*
+ * Parse the camera properties.
+ *
+ * Each camera properties block is a list of properties associated
+ * with the ID (as assembled by CameraSensor::generateId()) of the
+ * camera they refer to.
+ *
+ * cameras:
+ * "camera0 id":
+ * key: value
+ * key: value
+ * ...
+ *
+ * "camera1 id":
+ * key: value
+ * key: value
+ * ...
+ */
+ bool blockEnd = false;
+ yaml_token_t token;
+ do {
+ yaml_parser_scan(&parser_, &token);
+ switch (token.type) {
+ case YAML_KEY_TOKEN: {
+ yaml_token_delete(&token);
+
+ /* Parse the camera ID as key of the property list. */
+ std::string cameraId = parseKey();
+ if (cameraId.empty())
+ return -EINVAL;
+
+ ret = parseCameraConfigData(cameraId);
+ if (ret)
+ return -EINVAL;
+ break;
+ }
+ case YAML_BLOCK_END_TOKEN:
+ blockEnd = true;
+ [[fallthrough]];
+ default:
+ yaml_token_delete(&token);
+ break;
+ }
+ } while (!blockEnd);
+
+ return 0;
+}
+
+int CameraHalConfig::Private::parseEntry()
+{
+ int ret = -EINVAL;
+
+ /*
+ * Parse each key we find in the file.
+ *
+ * The 'cameras' keys maps to a list of (lists) of camera properties.
+ */
+
+ std::string key = parseKey();
+ if (key.empty())
+ return ret;
+
+ if (key == "cameras")
+ ret = parseCameras();
+ else
+ LOG(HALConfig, Error) << "Unknown key: " << key;
+
+ return ret;
+}
+
+int CameraHalConfig::Private::parseConfigFile(FILE *fh,
+ std::map<std::string, CameraConfigData> *cameras)
+{
+ cameras_ = cameras;
+
+ int ret = yaml_parser_initialize(&parser_);
+ if (!ret) {
+ LOG(HALConfig, Error) << "Failed to initialize yaml parser";
+ return -EINVAL;
+ }
+ yaml_parser_set_input_file(&parser_, fh);
+
+ yaml_token_t token;
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_STREAM_START_TOKEN) {
+ LOG(HALConfig, Error) << "Configuration file is not valid";
+ yaml_token_delete(&token);
+ yaml_parser_delete(&parser_);
+ return -EINVAL;
+ }
+ yaml_token_delete(&token);
+
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_BLOCK_MAPPING_START_TOKEN) {
+ LOG(HALConfig, Error) << "Configuration file is not valid";
+ yaml_token_delete(&token);
+ yaml_parser_delete(&parser_);
+ return -EINVAL;
+ }
+ yaml_token_delete(&token);
+
+ /* Parse the file and parse each single key one by one. */
+ do {
+ yaml_parser_scan(&parser_, &token);
+ switch (token.type) {
+ case YAML_KEY_TOKEN:
+ yaml_token_delete(&token);
+ ret = parseEntry();
+ break;
+
+ case YAML_STREAM_END_TOKEN:
+ ret = -ENOENT;
+ [[fallthrough]];
+ default:
+ yaml_token_delete(&token);
+ break;
+ }
+ } while (ret >= 0);
+ yaml_parser_delete(&parser_);
+
+ if (ret && ret != -ENOENT)
+ LOG(HALConfig, Error) << "Configuration file is not valid";
+
+ return ret == -ENOENT ? 0 : ret;
+}
+
+CameraHalConfig::CameraHalConfig()
+ : Extensible(new Private(this)), exists_(false), valid_(false)
+{
+ parseConfigurationFile();
+}
+
+/*
+ * Open the HAL configuration file and validate its content.
+ * Return 0 on success, a negative error code otherwise
+ * retval -ENOENT The configuration file is not available
+ * retval -EINVAL The configuration file is available but not valid
+ */
+int CameraHalConfig::parseConfigurationFile()
+{
+ std::filesystem::path filePath = LIBCAMERA_SYSCONF_DIR;
+ filePath /= "camera_hal.yaml";
+ if (!std::filesystem::is_regular_file(filePath)) {
+ LOG(HALConfig, Debug)
+ << "Configuration file: \"" << filePath << "\" not found";
+ return -ENOENT;
+ }
+
+ FILE *fh = fopen(filePath.c_str(), "r");
+ if (!fh) {
+ int ret = -errno;
+ LOG(HALConfig, Error) << "Failed to open configuration file "
+ << filePath << ": " << strerror(-ret);
+ return ret;
+ }
+
+ exists_ = true;
+
+ int ret = _d()->parseConfigFile(fh, &cameras_);
+ fclose(fh);
+ if (ret)
+ return -EINVAL;
+
+ valid_ = true;
+
+ for (const auto &c : cameras_) {
+ const std::string &cameraId = c.first;
+ const CameraConfigData &camera = c.second;
+ LOG(HALConfig, Debug) << "'" << cameraId << "' "
+ << "(" << camera.facing << ")["
+ << camera.rotation << "]";
+ }
+
+ return 0;
+}
+
+const CameraConfigData *CameraHalConfig::cameraConfigData(const std::string &cameraId) const
+{
+ const auto &it = cameras_.find(cameraId);
+ if (it == cameras_.end()) {
+ LOG(HALConfig, Error)
+ << "Camera '" << cameraId
+ << "' not described in the HAL configuration file";
+ return nullptr;
+ }
+
+ return &it->second;
+}
diff --git a/src/android/camera_hal_config.h b/src/android/camera_hal_config.h
new file mode 100644
index 00000000..a79d5d6c
--- /dev/null
+++ b/src/android/camera_hal_config.h
@@ -0,0 +1,39 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera_hal_config.h - Camera HAL configuration file manager
+ */
+#ifndef __ANDROID_CAMERA_HAL_CONFIG_H__
+#define __ANDROID_CAMERA_HAL_CONFIG_H__
+
+#include <map>
+#include <string>
+
+#include <libcamera/base/class.h>
+
+struct CameraConfigData {
+ int facing = -1;
+ int rotation = -1;
+};
+
+class CameraHalConfig final : public libcamera::Extensible
+{
+ LIBCAMERA_DECLARE_PRIVATE()
+
+public:
+ CameraHalConfig();
+
+ bool exists() const { return exists_; }
+ bool isValid() const { return valid_; }
+
+ const CameraConfigData *cameraConfigData(const std::string &cameraId) const;
+
+private:
+ bool exists_;
+ bool valid_;
+ std::map<std::string, CameraConfigData> cameras_;
+
+ int parseConfigurationFile();
+};
+#endif /* __ANDROID_CAMERA_HAL_CONFIG_H__ */
diff --git a/src/android/camera_hal_manager.cpp b/src/android/camera_hal_manager.cpp
index 05b47401..4cd67544 100644
--- a/src/android/camera_hal_manager.cpp
+++ b/src/android/camera_hal_manager.cpp
@@ -7,16 +7,16 @@
#include "camera_hal_manager.h"
+#include <libcamera/base/log.h>
+
#include <libcamera/camera.h>
#include <libcamera/property_ids.h>
-#include "libcamera/internal/log.h"
-
#include "camera_device.h"
using namespace libcamera;
-LOG_DECLARE_CATEGORY(HAL);
+LOG_DECLARE_CATEGORY(HAL)
/*
* \class CameraHalManager
@@ -34,20 +34,28 @@ CameraHalManager::CameraHalManager()
{
}
-CameraHalManager::~CameraHalManager()
-{
- cameras_.clear();
+/* CameraManager calls stop() in the destructor. */
+CameraHalManager::~CameraHalManager() = default;
- if (cameraManager_) {
- cameraManager_->stop();
- delete cameraManager_;
- cameraManager_ = nullptr;
- }
+/* static */
+CameraHalManager *CameraHalManager::instance()
+{
+ static CameraHalManager *cameraHalManager = new CameraHalManager;
+ return cameraHalManager;
}
int CameraHalManager::init()
{
- cameraManager_ = new CameraManager();
+ cameraManager_ = std::make_unique<CameraManager>();
+
+ /*
+ * If the configuration file is not available the HAL only supports
+ * external cameras. If it exists but it's not valid then error out.
+ */
+ if (halConfig_.exists() && !halConfig_.isValid()) {
+ LOG(HAL, Error) << "HAL configuration file is not valid";
+ return -EINVAL;
+ }
/* Support camera hotplug. */
cameraManager_->cameraAdded.connect(this, &CameraHalManager::cameraAdded);
@@ -57,36 +65,36 @@ int CameraHalManager::init()
if (ret) {
LOG(HAL, Error) << "Failed to start camera manager: "
<< strerror(-ret);
- delete cameraManager_;
- cameraManager_ = nullptr;
+ cameraManager_.reset();
return ret;
}
return 0;
}
-CameraDevice *CameraHalManager::open(unsigned int id,
- const hw_module_t *hardwareModule)
+std::tuple<CameraDevice *, int>
+CameraHalManager::open(unsigned int id, const hw_module_t *hardwareModule)
{
MutexLocker locker(mutex_);
if (!callbacks_) {
LOG(HAL, Error) << "Can't open camera before callbacks are set";
- return nullptr;
+ return { nullptr, -ENODEV };
}
CameraDevice *camera = cameraDeviceFromHalId(id);
if (!camera) {
LOG(HAL, Error) << "Invalid camera id '" << id << "'";
- return nullptr;
+ return { nullptr, -ENODEV };
}
- if (camera->open(hardwareModule))
- return nullptr;
+ int ret = camera->open(hardwareModule);
+ if (ret)
+ return { nullptr, ret };
LOG(HAL, Info) << "Open camera '" << id << "'";
- return camera;
+ return { camera, 0 };
}
void CameraHalManager::cameraAdded(std::shared_ptr<Camera> cam)
@@ -108,6 +116,8 @@ void CameraHalManager::cameraAdded(std::shared_ptr<Camera> cam)
auto iter = cameraIdsMap_.find(cam->id());
if (iter != cameraIdsMap_.end()) {
id = iter->second;
+ if (id >= firstExternalCameraId_)
+ isCameraExternal = true;
} else {
isCameraNew = true;
@@ -124,8 +134,28 @@ void CameraHalManager::cameraAdded(std::shared_ptr<Camera> cam)
}
/* Create a CameraDevice instance to wrap the libcamera Camera. */
- std::shared_ptr<CameraDevice> camera = CameraDevice::create(id, std::move(cam));
- int ret = camera->initialize();
+ std::unique_ptr<CameraDevice> camera = CameraDevice::create(id, cam);
+
+ /*
+ * The configuration file must be valid, and contain a corresponding
+ * entry for internal cameras. External cameras can be initialized
+ * without configuration file.
+ */
+ if (!isCameraExternal && !halConfig_.exists()) {
+ LOG(HAL, Error)
+ << "HAL configuration file is mandatory for internal cameras";
+ return;
+ }
+
+ const CameraConfigData *cameraConfigData = halConfig_.cameraConfigData(cam->id());
+ if (!isCameraExternal && !cameraConfigData) {
+ LOG(HAL, Error)
+ << "HAL configuration entry for internal camera "
+ << cam->id() << " is missing";
+ return;
+ }
+
+ int ret = camera->initialize(cameraConfigData);
if (ret) {
LOG(HAL, Error) << "Failed to initialize camera: " << cam->id();
return;
@@ -154,8 +184,8 @@ void CameraHalManager::cameraRemoved(std::shared_ptr<Camera> cam)
MutexLocker locker(mutex_);
auto iter = std::find_if(cameras_.begin(), cameras_.end(),
- [&cam](std::shared_ptr<CameraDevice> &camera) {
- return cam.get() == camera->camera();
+ [&cam](const std::unique_ptr<CameraDevice> &camera) {
+ return cam == camera->camera();
});
if (iter == cameras_.end())
return;
@@ -191,7 +221,7 @@ int32_t CameraHalManager::cameraLocation(const Camera *cam)
CameraDevice *CameraHalManager::cameraDeviceFromHalId(unsigned int id)
{
auto iter = std::find_if(cameras_.begin(), cameras_.end(),
- [id](std::shared_ptr<CameraDevice> &camera) {
+ [id](const std::unique_ptr<CameraDevice> &camera) {
return camera->id() == id;
});
if (iter == cameras_.end())
@@ -243,7 +273,7 @@ void CameraHalManager::setCallbacks(const camera_module_callbacks_t *callbacks)
* Internal cameras are already assumed to be present at module load
* time by the Android framework.
*/
- for (std::shared_ptr<CameraDevice> &camera : cameras_) {
+ for (const std::unique_ptr<CameraDevice> &camera : cameras_) {
unsigned int id = camera->id();
if (id >= firstExternalCameraId_)
callbacks_->camera_device_status_change(callbacks_, id,
diff --git a/src/android/camera_hal_manager.h b/src/android/camera_hal_manager.h
index a91decc7..3f6d302a 100644
--- a/src/android/camera_hal_manager.h
+++ b/src/android/camera_hal_manager.h
@@ -10,36 +10,47 @@
#include <map>
#include <mutex>
#include <stddef.h>
+#include <tuple>
#include <vector>
#include <hardware/camera_common.h>
#include <hardware/hardware.h>
#include <system/camera_metadata.h>
+#include <libcamera/base/class.h>
+
#include <libcamera/camera_manager.h>
+#include "camera_hal_config.h"
+
class CameraDevice;
class CameraHalManager
{
public:
- CameraHalManager();
~CameraHalManager();
+ static CameraHalManager *instance();
+
int init();
- CameraDevice *open(unsigned int id, const hw_module_t *module);
+ std::tuple<CameraDevice *, int>
+ open(unsigned int id, const hw_module_t *module);
unsigned int numCameras() const;
int getCameraInfo(unsigned int id, struct camera_info *info);
void setCallbacks(const camera_module_callbacks_t *callbacks);
private:
+ LIBCAMERA_DISABLE_COPY_AND_MOVE(CameraHalManager)
+
using Mutex = std::mutex;
using MutexLocker = std::unique_lock<std::mutex>;
static constexpr unsigned int firstExternalCameraId_ = 1000;
+ CameraHalManager();
+
static int32_t cameraLocation(const libcamera::Camera *cam);
void cameraAdded(std::shared_ptr<libcamera::Camera> cam);
@@ -47,10 +58,11 @@ private:
CameraDevice *cameraDeviceFromHalId(unsigned int id);
- libcamera::CameraManager *cameraManager_;
+ std::unique_ptr<libcamera::CameraManager> cameraManager_;
+ CameraHalConfig halConfig_;
const camera_module_callbacks_t *callbacks_;
- std::vector<std::shared_ptr<CameraDevice>> cameras_;
+ std::vector<std::unique_ptr<CameraDevice>> cameras_;
std::map<std::string, unsigned int> cameraIdsMap_;
Mutex mutex_;
diff --git a/src/android/camera_metadata.cpp b/src/android/camera_metadata.cpp
index f0da9ea9..3fc7cf27 100644
--- a/src/android/camera_metadata.cpp
+++ b/src/android/camera_metadata.cpp
@@ -7,29 +7,132 @@
#include "camera_metadata.h"
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
using namespace libcamera;
-LOG_DEFINE_CATEGORY(CameraMetadata);
+LOG_DEFINE_CATEGORY(CameraMetadata)
+
+CameraMetadata::CameraMetadata()
+ : metadata_(nullptr), valid_(false), resized_(false)
+{
+}
CameraMetadata::CameraMetadata(size_t entryCapacity, size_t dataCapacity)
+ : resized_(false)
{
metadata_ = allocate_camera_metadata(entryCapacity, dataCapacity);
valid_ = metadata_ != nullptr;
}
+CameraMetadata::CameraMetadata(const camera_metadata_t *metadata)
+ : resized_(false)
+{
+ metadata_ = clone_camera_metadata(metadata);
+ valid_ = metadata_ != nullptr;
+}
+
+CameraMetadata::CameraMetadata(const CameraMetadata &other)
+ : CameraMetadata(other.get())
+{
+}
+
CameraMetadata::~CameraMetadata()
{
if (metadata_)
free_camera_metadata(metadata_);
}
-bool CameraMetadata::addEntry(uint32_t tag, const void *data, size_t count)
+CameraMetadata &CameraMetadata::operator=(const CameraMetadata &other)
+{
+ if (this == &other)
+ return *this;
+
+ if (metadata_)
+ free_camera_metadata(metadata_);
+
+ metadata_ = clone_camera_metadata(other.get());
+ valid_ = metadata_ != nullptr;
+
+ return *this;
+}
+
+std::tuple<size_t, size_t> CameraMetadata::usage() const
+{
+ size_t currentEntryCount = get_camera_metadata_entry_count(metadata_);
+ size_t currentDataCount = get_camera_metadata_data_count(metadata_);
+
+ return { currentEntryCount, currentDataCount };
+}
+
+bool CameraMetadata::getEntry(uint32_t tag, camera_metadata_ro_entry_t *entry) const
+{
+ if (find_camera_metadata_ro_entry(metadata_, tag, entry))
+ return false;
+
+ return true;
+}
+
+/*
+ * \brief Resize the metadata container, if necessary
+ * \param[in] count Number of entries to add to the container
+ * \param[in] size Total size of entries to add, in bytes
+ * \return True if resize was successful or unnecessary, false otherwise
+ */
+bool CameraMetadata::resize(size_t count, size_t size)
+{
+ if (!valid_)
+ return false;
+
+ if (!count && !size)
+ return true;
+
+ size_t currentEntryCount = get_camera_metadata_entry_count(metadata_);
+ size_t currentEntryCapacity = get_camera_metadata_entry_capacity(metadata_);
+ size_t newEntryCapacity = currentEntryCapacity < currentEntryCount + count ?
+ currentEntryCapacity * 2 : currentEntryCapacity;
+
+ size_t currentDataCount = get_camera_metadata_data_count(metadata_);
+ size_t currentDataCapacity = get_camera_metadata_data_capacity(metadata_);
+ size_t newDataCapacity = currentDataCapacity < currentDataCount + size ?
+ currentDataCapacity * 2 : currentDataCapacity;
+
+ if (newEntryCapacity > currentEntryCapacity ||
+ newDataCapacity > currentDataCapacity) {
+ camera_metadata_t *oldMetadata = metadata_;
+ metadata_ = allocate_camera_metadata(newEntryCapacity, newDataCapacity);
+ if (!metadata_) {
+ metadata_ = oldMetadata;
+ return false;
+ }
+
+ LOG(CameraMetadata, Info)
+ << "Resized: old entry capacity " << currentEntryCapacity
+ << ", old data capacity " << currentDataCapacity
+ << ", new entry capacity " << newEntryCapacity
+ << ", new data capacity " << newDataCapacity;
+
+ append_camera_metadata(metadata_, oldMetadata);
+ free_camera_metadata(oldMetadata);
+
+ resized_ = true;
+ }
+
+ return true;
+}
+
+bool CameraMetadata::addEntry(uint32_t tag, const void *data, size_t count,
+ size_t elementSize)
{
if (!valid_)
return false;
+ if (!resize(1, count * elementSize)) {
+ LOG(CameraMetadata, Error) << "Failed to resize";
+ valid_ = false;
+ return false;
+ }
+
if (!add_camera_metadata_entry(metadata_, tag, data, count))
return true;
@@ -46,7 +149,8 @@ bool CameraMetadata::addEntry(uint32_t tag, const void *data, size_t count)
return false;
}
-bool CameraMetadata::updateEntry(uint32_t tag, const void *data, size_t count)
+bool CameraMetadata::updateEntry(uint32_t tag, const void *data, size_t count,
+ size_t elementSize)
{
if (!valid_)
return false;
@@ -61,16 +165,39 @@ bool CameraMetadata::updateEntry(uint32_t tag, const void *data, size_t count)
return false;
}
- ret = update_camera_metadata_entry(metadata_, entry.index, data,
- count, nullptr);
- if (ret) {
+ if (camera_metadata_type_size[entry.type] != elementSize) {
const char *name = get_camera_metadata_tag_name(tag);
- LOG(CameraMetadata, Error)
- << "Failed to update tag " << (name ? name : "<unknown>");
+ LOG(CameraMetadata, Fatal)
+ << "Invalid element size for tag "
+ << (name ? name : "<unknown>");
return false;
}
- return true;
+ size_t oldSize =
+ calculate_camera_metadata_entry_data_size(entry.type,
+ entry.count);
+ size_t newSize =
+ calculate_camera_metadata_entry_data_size(entry.type,
+ count);
+ size_t sizeIncrement = newSize - oldSize > 0 ? newSize - oldSize : 0;
+ if (!resize(0, sizeIncrement)) {
+ LOG(CameraMetadata, Error) << "Failed to resize";
+ valid_ = false;
+ return false;
+ }
+
+ ret = update_camera_metadata_entry(metadata_, entry.index, data,
+ count, nullptr);
+ if (!ret)
+ return true;
+
+ const char *name = get_camera_metadata_tag_name(tag);
+ LOG(CameraMetadata, Error)
+ << "Failed to update tag " << (name ? name : "<unknown>");
+
+ valid_ = false;
+
+ return false;
}
camera_metadata_t *CameraMetadata::get()
diff --git a/src/android/camera_metadata.h b/src/android/camera_metadata.h
index 9d047b1b..3b7c9e24 100644
--- a/src/android/camera_metadata.h
+++ b/src/android/camera_metadata.h
@@ -8,25 +8,91 @@
#define __ANDROID_CAMERA_METADATA_H__
#include <stdint.h>
+#include <vector>
#include <system/camera_metadata.h>
class CameraMetadata
{
public:
+ CameraMetadata();
CameraMetadata(size_t entryCapacity, size_t dataCapacity);
+ CameraMetadata(const camera_metadata_t *metadata);
+ CameraMetadata(const CameraMetadata &other);
~CameraMetadata();
+ CameraMetadata &operator=(const CameraMetadata &other);
+
+ std::tuple<size_t, size_t> usage() const;
+ bool resized() const { return resized_; }
+
bool isValid() const { return valid_; }
- bool addEntry(uint32_t tag, const void *data, size_t data_count);
- bool updateEntry(uint32_t tag, const void *data, size_t data_count);
+ bool getEntry(uint32_t tag, camera_metadata_ro_entry_t *entry) const;
+
+ template<typename T,
+ std::enable_if_t<std::is_arithmetic_v<T>> * = nullptr>
+ bool addEntry(uint32_t tag, const T &data)
+ {
+ return addEntry(tag, &data, 1, sizeof(T));
+ }
+
+ template<typename T, size_t size>
+ bool addEntry(uint32_t tag, const T (&data)[size])
+ {
+ return addEntry(tag, data, size, sizeof(T));
+ }
+
+ template<typename S,
+ typename T = typename S::value_type>
+ bool addEntry(uint32_t tag, const S &data)
+ {
+ return addEntry(tag, data.data(), data.size(), sizeof(T));
+ }
+
+ template<typename T>
+ bool addEntry(uint32_t tag, const T *data, size_t count)
+ {
+ return addEntry(tag, data, count, sizeof(T));
+ }
+
+ template<typename T>
+ bool updateEntry(uint32_t tag, const T &data)
+ {
+ return updateEntry(tag, &data, 1, sizeof(T));
+ }
+
+ template<typename T, size_t size>
+ bool updateEntry(uint32_t tag, const T (&data)[size])
+ {
+ return updateEntry(tag, data, size, sizeof(T));
+ }
+
+ template<typename S,
+ typename T = typename S::value_type>
+ bool updateEntry(uint32_t tag, const S &data)
+ {
+ return updateEntry(tag, data.data(), data.size(), sizeof(T));
+ }
+
+ template<typename T>
+ bool updateEntry(uint32_t tag, const T *data, size_t count)
+ {
+ return updateEntry(tag, data, count, sizeof(T));
+ }
camera_metadata_t *get();
const camera_metadata_t *get() const;
private:
+ bool resize(size_t count, size_t size);
+ bool addEntry(uint32_t tag, const void *data, size_t count,
+ size_t elementSize);
+ bool updateEntry(uint32_t tag, const void *data, size_t count,
+ size_t elementSize);
+
camera_metadata_t *metadata_;
bool valid_;
+ bool resized_;
};
#endif /* __ANDROID_CAMERA_METADATA_H__ */
diff --git a/src/android/camera_ops.cpp b/src/android/camera_ops.cpp
index 696e8043..8a3cfa17 100644
--- a/src/android/camera_ops.cpp
+++ b/src/android/camera_ops.cpp
@@ -66,8 +66,14 @@ static void hal_dev_dump([[maybe_unused]] const struct camera3_device *dev,
{
}
-static int hal_dev_flush([[maybe_unused]] const struct camera3_device *dev)
+static int hal_dev_flush(const struct camera3_device *dev)
{
+ if (!dev)
+ return -EINVAL;
+
+ CameraDevice *camera = reinterpret_cast<CameraDevice *>(dev->priv);
+ camera->flush();
+
return 0;
}
diff --git a/src/android/camera_stream.cpp b/src/android/camera_stream.cpp
new file mode 100644
index 00000000..bf4a7b41
--- /dev/null
+++ b/src/android/camera_stream.cpp
@@ -0,0 +1,146 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * camera_stream.cpp - Camera HAL stream
+ */
+
+#include "camera_stream.h"
+
+#include "camera_buffer.h"
+#include "camera_device.h"
+#include "camera_metadata.h"
+#include "jpeg/post_processor_jpeg.h"
+
+#include <libcamera/formats.h>
+
+using namespace libcamera;
+
+LOG_DECLARE_CATEGORY(HAL)
+
+/*
+ * \class CameraStream
+ * \brief Map a camera3_stream_t to a StreamConfiguration
+ *
+ * The CameraStream class maps a camera3_stream_t provided by Android
+ * camera framework to a libcamera::StreamConfiguration.
+ *
+ * The StreamConfiguration is represented by its index as recorded in the
+ * CameraConfiguration and not by pointer as StreamConfiguration is subject to
+ * relocation.
+ *
+ * A single StreamConfiguration may be used to deliver one or more streams to
+ * the Android framework. The mapping type between a camera3 stream to a
+ * StreamConfiguration is described by the CameraStream::Type.
+ *
+ * CameraStream handles all the aspects of producing a stream with the size
+ * and format requested by the camera3 stream from the data produced by
+ * the associated libcamera::Stream, including the creation of the encoder
+ * and buffer allocation.
+ */
+
+CameraStream::CameraStream(CameraDevice *const cameraDevice,
+ CameraConfiguration *config, Type type,
+ camera3_stream_t *camera3Stream, unsigned int index)
+ : cameraDevice_(cameraDevice), config_(config), type_(type),
+ camera3Stream_(camera3Stream), index_(index)
+{
+ if (type_ == Type::Internal || type_ == Type::Mapped) {
+ /*
+ * \todo There might be multiple post-processors. The logic
+ * which should be instantiated here, is deferred for the
+ * future. For now, we only have PostProcessorJpeg and that
+ * is what we instantiate here.
+ */
+ postProcessor_ = std::make_unique<PostProcessorJpeg>(cameraDevice_);
+ }
+
+ if (type == Type::Internal) {
+ allocator_ = std::make_unique<FrameBufferAllocator>(cameraDevice_->camera());
+ mutex_ = std::make_unique<std::mutex>();
+ }
+}
+
+const StreamConfiguration &CameraStream::configuration() const
+{
+ return config_->at(index_);
+}
+
+Stream *CameraStream::stream() const
+{
+ return configuration().stream();
+}
+
+int CameraStream::configure()
+{
+ if (postProcessor_) {
+ StreamConfiguration output = configuration();
+ output.pixelFormat = formats::MJPEG;
+ int ret = postProcessor_->configure(configuration(), output);
+ if (ret)
+ return ret;
+ }
+
+ if (allocator_) {
+ int ret = allocator_->allocate(stream());
+ if (ret < 0)
+ return ret;
+
+ /* Save a pointer to the reserved frame buffers */
+ for (const auto &frameBuffer : allocator_->buffers(stream()))
+ buffers_.push_back(frameBuffer.get());
+ }
+
+ camera3Stream_->max_buffers = configuration().bufferCount;
+
+ return 0;
+}
+
+int CameraStream::process(const libcamera::FrameBuffer &source,
+ buffer_handle_t camera3Dest,
+ const CameraMetadata &requestMetadata,
+ CameraMetadata *resultMetadata)
+{
+ if (!postProcessor_)
+ return 0;
+
+ /*
+ * \todo Buffer mapping and processing should be moved to a
+ * separate thread.
+ */
+ CameraBuffer dest(camera3Dest, PROT_READ | PROT_WRITE);
+ if (!dest.isValid()) {
+ LOG(HAL, Error) << "Failed to map android blob buffer";
+ return -EINVAL;
+ }
+
+ return postProcessor_->process(source, &dest, requestMetadata, resultMetadata);
+}
+
+FrameBuffer *CameraStream::getBuffer()
+{
+ if (!allocator_)
+ return nullptr;
+
+ std::lock_guard<std::mutex> locker(*mutex_);
+
+ if (buffers_.empty()) {
+ LOG(HAL, Error) << "Buffer underrun";
+ return nullptr;
+ }
+
+ FrameBuffer *buffer = buffers_.back();
+ buffers_.pop_back();
+
+ return buffer;
+}
+
+void CameraStream::putBuffer(libcamera::FrameBuffer *buffer)
+{
+ if (!allocator_)
+ return;
+
+ std::lock_guard<std::mutex> locker(*mutex_);
+
+ buffers_.push_back(buffer);
+}
diff --git a/src/android/camera_stream.h b/src/android/camera_stream.h
new file mode 100644
index 00000000..629d9e00
--- /dev/null
+++ b/src/android/camera_stream.h
@@ -0,0 +1,147 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * camera_stream.h - Camera HAL stream
+ */
+#ifndef __ANDROID_CAMERA_STREAM_H__
+#define __ANDROID_CAMERA_STREAM_H__
+
+#include <memory>
+#include <mutex>
+#include <vector>
+
+#include <hardware/camera3.h>
+
+#include <libcamera/camera.h>
+#include <libcamera/framebuffer.h>
+#include <libcamera/framebuffer_allocator.h>
+#include <libcamera/geometry.h>
+#include <libcamera/pixel_format.h>
+
+#include "libcamera/internal/framebuffer.h"
+
+class CameraDevice;
+class CameraMetadata;
+class PostProcessor;
+
+class CameraStream
+{
+public:
+ /*
+ * Enumeration of CameraStream types.
+ *
+ * A camera stream associates an Android stream to a libcamera stream.
+ * This enumeration describes how the two streams are associated and how
+ * and where data produced from libcamera are delivered to the
+ * Android framework.
+ *
+ * Direct:
+ *
+ * The Android stream is directly mapped onto a libcamera stream: frames
+ * are delivered by the library directly in the memory location
+ * specified by the Android stream (buffer_handle_t->data) and provided
+ * to the framework as they are. The Android stream characteristics are
+ * directly translated to the libcamera stream configuration.
+ *
+ * +-----+ +-----+
+ * | A | | L |
+ * +-----+ +-----+
+ * | |
+ * V V
+ * +-----+ +------+
+ * | B |<---------------| FB |
+ * +-----+ +------+
+ *
+ *
+ * Internal:
+ *
+ * Data for the Android stream is produced by processing a libcamera
+ * stream created by the HAL for that purpose. The libcamera stream
+ * needs to be supplied with intermediate buffers where the library
+ * delivers frames to be processed and then provided to the framework.
+ * The libcamera stream configuration is not a direct translation of the
+ * Android stream characteristics, but it describes the format and size
+ * required for the processing procedure to produce frames in the
+ * Android required format.
+ *
+ * +-----+ +-----+
+ * | A | | L |
+ * +-----+ +-----+
+ * | |
+ * V V
+ * +-----+ +------+
+ * | B | | FB |
+ * +-----+ +------+
+ * ^ |
+ * |-------Processing------|
+ *
+ *
+ * Mapped:
+ *
+ * Data for the Android stream is produced by processing a libcamera
+ * stream associated with another CameraStream. Mapped camera streams do
+ * not need any memory to be reserved for them as they process data
+ * produced by libcamera for a different stream whose format and size
+ * are compatible with the processing procedure requirements to produce
+ * frames in the Android required format.
+ *
+ * +-----+ +-----+ +-----+
+ * | A | | A' | | L |
+ * +-----+ +-----+ +-----+
+ * | | |
+ * V V V
+ * +-----+ +-----+ +------+
+ * | B | | B' |<---------| FB |
+ * +-----+ +-----+ +------+
+ * ^ |
+ * |--Processing--|
+ *
+ *
+ * --------------------------------------------------------------------
+ * A = Android stream
+ * L = libcamera stream
+ * B = memory buffer
+ * FB = libcamera FrameBuffer
+ * "Processing" = Frame processing procedure (Encoding, scaling etc)
+ */
+ enum class Type {
+ Direct,
+ Internal,
+ Mapped,
+ };
+ CameraStream(CameraDevice *const cameraDevice,
+ libcamera::CameraConfiguration *config, Type type,
+ camera3_stream_t *camera3Stream, unsigned int index);
+
+ Type type() const { return type_; }
+ const camera3_stream_t &camera3Stream() const { return *camera3Stream_; }
+ const libcamera::StreamConfiguration &configuration() const;
+ libcamera::Stream *stream() const;
+
+ int configure();
+ int process(const libcamera::FrameBuffer &source,
+ buffer_handle_t camera3Dest,
+ const CameraMetadata &requestMetadata,
+ CameraMetadata *resultMetadata);
+ libcamera::FrameBuffer *getBuffer();
+ void putBuffer(libcamera::FrameBuffer *buffer);
+
+private:
+ CameraDevice *const cameraDevice_;
+ const libcamera::CameraConfiguration *config_;
+ const Type type_;
+ camera3_stream_t *camera3Stream_;
+ const unsigned int index_;
+
+ std::unique_ptr<libcamera::FrameBufferAllocator> allocator_;
+ std::vector<libcamera::FrameBuffer *> buffers_;
+ /*
+ * The class has to be MoveConstructible as instances are stored in
+ * an std::vector in CameraDevice.
+ */
+ std::unique_ptr<std::mutex> mutex_;
+ std::unique_ptr<PostProcessor> postProcessor_;
+};
+
+#endif /* __ANDROID_CAMERA_STREAM__ */
diff --git a/src/android/camera_worker.cpp b/src/android/camera_worker.cpp
new file mode 100644
index 00000000..98dddd9e
--- /dev/null
+++ b/src/android/camera_worker.cpp
@@ -0,0 +1,129 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * camera_worker.cpp - Process capture requests on behalf of the Camera HAL
+ */
+
+#include "camera_worker.h"
+
+#include <errno.h>
+#include <string.h>
+#include <sys/poll.h>
+#include <unistd.h>
+
+#include "camera_device.h"
+
+using namespace libcamera;
+
+LOG_DECLARE_CATEGORY(HAL)
+
+/*
+ * \class CaptureRequest
+ * \brief Wrap a libcamera::Request associated with buffers and fences
+ *
+ * A CaptureRequest is constructed by the CameraDevice, filled with
+ * buffers and fences provided by the camera3 framework and then processed
+ * by the CameraWorker which queues it to the libcamera::Camera after handling
+ * fences.
+ */
+CaptureRequest::CaptureRequest(libcamera::Camera *camera)
+ : camera_(camera)
+{
+ request_ = camera_->createRequest(reinterpret_cast<uint64_t>(this));
+}
+
+void CaptureRequest::addBuffer(Stream *stream, FrameBuffer *buffer, int fence)
+{
+ request_->addBuffer(stream, buffer);
+ acquireFences_.push_back(fence);
+}
+
+void CaptureRequest::queue()
+{
+ camera_->queueRequest(request_.get());
+}
+
+/*
+ * \class CameraWorker
+ * \brief Process a CaptureRequest on an internal thread
+ *
+ * The CameraWorker class wraps a Worker that runs on an internal thread
+ * and schedules processing of CaptureRequest through it.
+ */
+CameraWorker::CameraWorker()
+{
+ worker_.moveToThread(this);
+}
+
+void CameraWorker::start()
+{
+ Thread::start();
+}
+
+void CameraWorker::stop()
+{
+ exit();
+ wait();
+}
+
+void CameraWorker::run()
+{
+ exec();
+ dispatchMessages(Message::Type::InvokeMessage);
+}
+
+void CameraWorker::queueRequest(CaptureRequest *request)
+{
+ /* Async process the request on the worker which runs its own thread. */
+ worker_.invokeMethod(&Worker::processRequest, ConnectionTypeQueued,
+ request);
+}
+
+/*
+ * \class CameraWorker::Worker
+ * \brief Process a CaptureRequest handling acquisition fences
+ */
+int CameraWorker::Worker::waitFence(int fence)
+{
+ /*
+ * \todo Better characterize the timeout. Currently equal to the one
+ * used by the Rockchip Camera HAL on ChromeOS.
+ */
+ constexpr unsigned int timeoutMs = 300;
+ struct pollfd fds = { fence, POLLIN, 0 };
+
+ do {
+ int ret = poll(&fds, 1, timeoutMs);
+ if (ret == 0)
+ return -ETIME;
+
+ if (ret > 0) {
+ if (fds.revents & (POLLERR | POLLNVAL))
+ return -EINVAL;
+
+ return 0;
+ }
+ } while (errno == EINTR || errno == EAGAIN);
+
+ return -errno;
+}
+
+void CameraWorker::Worker::processRequest(CaptureRequest *request)
+{
+ /* Wait on all fences before queuing the Request. */
+ for (int fence : request->fences()) {
+ if (fence == -1)
+ continue;
+
+ int ret = waitFence(fence);
+ close(fence);
+ if (ret < 0) {
+ LOG(HAL, Error) << "Failed waiting for fence: "
+ << fence << ": " << strerror(-ret);
+ return;
+ }
+ }
+
+ request->queue();
+}
diff --git a/src/android/camera_worker.h b/src/android/camera_worker.h
new file mode 100644
index 00000000..67ae50bd
--- /dev/null
+++ b/src/android/camera_worker.h
@@ -0,0 +1,71 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * camera_worker.h - Process capture requests on behalf of the Camera HAL
+ */
+#ifndef __ANDROID_CAMERA_WORKER_H__
+#define __ANDROID_CAMERA_WORKER_H__
+
+#include <memory>
+
+#include <libcamera/base/object.h>
+#include <libcamera/base/thread.h>
+
+#include <libcamera/camera.h>
+#include <libcamera/framebuffer.h>
+#include <libcamera/request.h>
+#include <libcamera/stream.h>
+
+class CameraDevice;
+
+class CaptureRequest
+{
+public:
+ CaptureRequest(libcamera::Camera *camera);
+
+ const std::vector<int> &fences() const { return acquireFences_; }
+ libcamera::ControlList &controls() { return request_->controls(); }
+ const libcamera::ControlList &metadata() const
+ {
+ return request_->metadata();
+ }
+ unsigned long cookie() const { return request_->cookie(); }
+
+ void addBuffer(libcamera::Stream *stream,
+ libcamera::FrameBuffer *buffer, int fence);
+ void queue();
+
+private:
+ libcamera::Camera *camera_;
+ std::vector<int> acquireFences_;
+ std::unique_ptr<libcamera::Request> request_;
+};
+
+class CameraWorker : private libcamera::Thread
+{
+public:
+ CameraWorker();
+
+ void start();
+ void stop();
+
+ void queueRequest(CaptureRequest *request);
+
+protected:
+ void run() override;
+
+private:
+ class Worker : public libcamera::Object
+ {
+ public:
+ void processRequest(CaptureRequest *request);
+
+ private:
+ int waitFence(int fence);
+ };
+
+ Worker worker_;
+};
+
+#endif /* __ANDROID_CAMERA_WORKER_H__ */
diff --git a/src/android/cros/camera3_hal.cpp b/src/android/cros/camera3_hal.cpp
new file mode 100644
index 00000000..fb863b5f
--- /dev/null
+++ b/src/android/cros/camera3_hal.cpp
@@ -0,0 +1,24 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera3_hal.cpp - cros-specific components of Android Camera HALv3 module
+ */
+
+#include <cros-camera/cros_camera_hal.h>
+
+#include "../camera_hal_manager.h"
+
+static void set_up([[maybe_unused]] cros::CameraMojoChannelManagerToken *token)
+{
+}
+
+static void tear_down()
+{
+ delete CameraHalManager::instance();
+}
+
+cros::cros_camera_hal_t CROS_CAMERA_EXPORT CROS_CAMERA_HAL_INFO_SYM = {
+ .set_up = set_up,
+ .tear_down = tear_down
+};
diff --git a/src/android/cros/meson.build b/src/android/cros/meson.build
new file mode 100644
index 00000000..35995dd8
--- /dev/null
+++ b/src/android/cros/meson.build
@@ -0,0 +1,13 @@
+# SPDX-License-Identifier: CC0-1.0
+
+if get_option('android_platform') != 'cros'
+ subdir_done()
+endif
+
+android_hal_sources += files([
+ 'camera3_hal.cpp',
+])
+
+android_deps += dependency('libcros_camera')
+
+android_cpp_args += ['-DOS_CHROMEOS']
diff --git a/src/android/data/soraka/camera_hal.yaml b/src/android/data/soraka/camera_hal.yaml
new file mode 100644
index 00000000..2e996403
--- /dev/null
+++ b/src/android/data/soraka/camera_hal.yaml
@@ -0,0 +1,8 @@
+cameras:
+ "\\_SB_.PCI0.I2C4.CAM1":
+ location: front
+ rotation: 0
+
+ "\\_SB_.PCI0.I2C2.CAM0":
+ location: back
+ rotation: 0
diff --git a/src/android/jpeg/encoder.h b/src/android/jpeg/encoder.h
index cf26d67a..a28522f4 100644
--- a/src/android/jpeg/encoder.h
+++ b/src/android/jpeg/encoder.h
@@ -1,4 +1,4 @@
-/* SPDX-License-Identifier: GPL-2.0-or-later */
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
@@ -7,19 +7,21 @@
#ifndef __ANDROID_JPEG_ENCODER_H__
#define __ANDROID_JPEG_ENCODER_H__
-#include <libcamera/buffer.h>
-#include <libcamera/span.h>
+#include <libcamera/base/span.h>
+
+#include <libcamera/framebuffer.h>
#include <libcamera/stream.h>
class Encoder
{
public:
- virtual ~Encoder() {};
+ virtual ~Encoder() = default;
virtual int configure(const libcamera::StreamConfiguration &cfg) = 0;
- virtual int encode(const libcamera::FrameBuffer *source,
- const libcamera::Span<uint8_t> &destination,
- const libcamera::Span<const uint8_t> &exifData) = 0;
+ virtual int encode(const libcamera::FrameBuffer &source,
+ libcamera::Span<uint8_t> destination,
+ libcamera::Span<const uint8_t> exifData,
+ unsigned int quality) = 0;
};
#endif /* __ANDROID_JPEG_ENCODER_H__ */
diff --git a/src/android/jpeg/encoder_libjpeg.cpp b/src/android/jpeg/encoder_libjpeg.cpp
index 510613cd..e6358ca9 100644
--- a/src/android/jpeg/encoder_libjpeg.cpp
+++ b/src/android/jpeg/encoder_libjpeg.cpp
@@ -1,4 +1,4 @@
-/* SPDX-License-Identifier: GPL-2.0-or-later */
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
@@ -16,16 +16,17 @@
#include <unistd.h>
#include <vector>
+#include <libcamera/base/log.h>
+
#include <libcamera/camera.h>
#include <libcamera/formats.h>
#include <libcamera/pixel_format.h>
#include "libcamera/internal/formats.h"
-#include "libcamera/internal/log.h"
using namespace libcamera;
-LOG_DEFINE_CATEGORY(JPEG)
+LOG_DECLARE_CATEGORY(JPEG)
namespace {
@@ -68,7 +69,6 @@ const struct JPEGPixelFormatInfo &findPixelInfo(const PixelFormat &format)
} /* namespace */
EncoderLibJpeg::EncoderLibJpeg()
- : quality_(95)
{
/* \todo Expand error handling coverage with a custom handler. */
compress_.err = jpeg_std_error(&jerr_);
@@ -94,7 +94,6 @@ int EncoderLibJpeg::configure(const StreamConfiguration &cfg)
compress_.input_components = info.colorSpace == JCS_GRAYSCALE ? 1 : 3;
jpeg_set_defaults(&compress_);
- jpeg_set_quality(&compress_, quality_, TRUE);
pixelFormatInfo_ = &info.pixelFormatInfo;
@@ -104,9 +103,9 @@ int EncoderLibJpeg::configure(const StreamConfiguration &cfg)
return 0;
}
-void EncoderLibJpeg::compressRGB(const libcamera::MappedBuffer *frame)
+void EncoderLibJpeg::compressRGB(Span<const uint8_t> frame)
{
- unsigned char *src = static_cast<unsigned char *>(frame->maps()[0].data());
+ unsigned char *src = const_cast<unsigned char *>(frame.data());
/* \todo Stride information should come from buffer configuration. */
unsigned int stride = pixelFormatInfo_->stride(compress_.image_width, 0);
@@ -122,7 +121,7 @@ void EncoderLibJpeg::compressRGB(const libcamera::MappedBuffer *frame)
* Compress the incoming buffer from a supported NV format.
* This naively unpacks the semi-planar NV12 to a YUV888 format for libjpeg.
*/
-void EncoderLibJpeg::compressNV(const libcamera::MappedBuffer *frame)
+void EncoderLibJpeg::compressNV(Span<const uint8_t> frame)
{
uint8_t tmprowbuf[compress_.image_width * 3];
@@ -144,7 +143,7 @@ void EncoderLibJpeg::compressNV(const libcamera::MappedBuffer *frame)
unsigned int cb_pos = nvSwap_ ? 1 : 0;
unsigned int cr_pos = nvSwap_ ? 0 : 1;
- const unsigned char *src = static_cast<unsigned char *>(frame->maps()[0].data());
+ const unsigned char *src = frame.data();
const unsigned char *src_c = src + y_stride * compress_.image_height;
JSAMPROW row_pointer[1];
@@ -179,20 +178,27 @@ void EncoderLibJpeg::compressNV(const libcamera::MappedBuffer *frame)
}
}
-int EncoderLibJpeg::encode(const FrameBuffer *source,
- const libcamera::Span<uint8_t> &dest,
- const libcamera::Span<const uint8_t> &exifData)
+int EncoderLibJpeg::encode(const FrameBuffer &source, Span<uint8_t> dest,
+ Span<const uint8_t> exifData, unsigned int quality)
{
- MappedFrameBuffer frame(source, PROT_READ);
+ MappedFrameBuffer frame(&source, PROT_READ);
if (!frame.isValid()) {
LOG(JPEG, Error) << "Failed to map FrameBuffer : "
<< strerror(frame.error());
return frame.error();
}
+ return encode(frame.maps()[0], dest, exifData, quality);
+}
+
+int EncoderLibJpeg::encode(Span<const uint8_t> src, Span<uint8_t> dest,
+ Span<const uint8_t> exifData, unsigned int quality)
+{
unsigned char *destination = dest.data();
unsigned long size = dest.size();
+ jpeg_set_quality(&compress_, quality, TRUE);
+
/*
* The jpeg_mem_dest will reallocate if the required size is not
* sufficient. That means the output won't be written to the correct
@@ -215,9 +221,9 @@ int EncoderLibJpeg::encode(const FrameBuffer *source,
<< "x" << compress_.image_height;
if (nv_)
- compressNV(&frame);
+ compressNV(src);
else
- compressRGB(&frame);
+ compressRGB(src);
jpeg_finish_compress(&compress_);
diff --git a/src/android/jpeg/encoder_libjpeg.h b/src/android/jpeg/encoder_libjpeg.h
index 1e8df05a..14bf8922 100644
--- a/src/android/jpeg/encoder_libjpeg.h
+++ b/src/android/jpeg/encoder_libjpeg.h
@@ -1,4 +1,4 @@
-/* SPDX-License-Identifier: GPL-2.0-or-later */
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
@@ -9,8 +9,8 @@
#include "encoder.h"
-#include "libcamera/internal/buffer.h"
#include "libcamera/internal/formats.h"
+#include "libcamera/internal/framebuffer.h"
#include <jpeglib.h>
@@ -21,19 +21,22 @@ public:
~EncoderLibJpeg();
int configure(const libcamera::StreamConfiguration &cfg) override;
- int encode(const libcamera::FrameBuffer *source,
- const libcamera::Span<uint8_t> &destination,
- const libcamera::Span<const uint8_t> &exifData) override;
+ int encode(const libcamera::FrameBuffer &source,
+ libcamera::Span<uint8_t> destination,
+ libcamera::Span<const uint8_t> exifData,
+ unsigned int quality) override;
+ int encode(libcamera::Span<const uint8_t> source,
+ libcamera::Span<uint8_t> destination,
+ libcamera::Span<const uint8_t> exifData,
+ unsigned int quality);
private:
- void compressRGB(const libcamera::MappedBuffer *frame);
- void compressNV(const libcamera::MappedBuffer *frame);
+ void compressRGB(libcamera::Span<const uint8_t> frame);
+ void compressNV(libcamera::Span<const uint8_t> frame);
struct jpeg_compress_struct compress_;
struct jpeg_error_mgr jerr_;
- unsigned int quality_;
-
const libcamera::PixelFormatInfo *pixelFormatInfo_;
bool nv_;
diff --git a/src/android/jpeg/exif.cpp b/src/android/jpeg/exif.cpp
index 32cf8974..0ba4cb85 100644
--- a/src/android/jpeg/exif.cpp
+++ b/src/android/jpeg/exif.cpp
@@ -1,4 +1,4 @@
-/* SPDX-License-Identifier: GPL-2.0-or-later */
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
@@ -7,7 +7,15 @@
#include "exif.h"
-#include "libcamera/internal/log.h"
+#include <cmath>
+#include <iomanip>
+#include <map>
+#include <sstream>
+#include <tuple>
+#include <uchar.h>
+
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
using namespace libcamera;
@@ -35,7 +43,8 @@ enum class _ExifTag {
* data can be obtained using the data() method.
*/
Exif::Exif()
- : valid_(false), data_(nullptr), exifData_(0), size_(0)
+ : valid_(false), data_(nullptr), order_(EXIF_BYTE_ORDER_INTEL),
+ exifData_(0), size_(0)
{
/* Create an ExifMem allocator to construct entries. */
mem_ = exif_mem_new_default();
@@ -59,7 +68,7 @@ Exif::Exif()
* Big-Endian: EXIF_BYTE_ORDER_MOTOROLA
* Little Endian: EXIF_BYTE_ORDER_INTEL
*/
- exif_data_set_byte_order(data_, EXIF_BYTE_ORDER_INTEL);
+ exif_data_set_byte_order(data_, order_);
setString(EXIF_IFD_EXIF, EXIF_TAG_EXIF_VERSION,
EXIF_FORMAT_UNDEFINED, "0231");
@@ -73,8 +82,16 @@ Exif::~Exif()
if (exifData_)
free(exifData_);
- if (data_)
+ if (data_) {
+ /*
+ * Reset thumbnail data to avoid getting double-freed by
+ * libexif. It is owned by the caller (i.e. PostProcessorJpeg).
+ */
+ data_->data = nullptr;
+ data_->size = 0;
+
exif_data_unref(data_);
+ }
if (mem_)
exif_mem_unref(mem_);
@@ -138,13 +155,23 @@ ExifEntry *Exif::createEntry(ExifIfd ifd, ExifTag tag, ExifFormat format,
return entry;
}
+void Exif::setByte(ExifIfd ifd, ExifTag tag, uint8_t item)
+{
+ ExifEntry *entry = createEntry(ifd, tag, EXIF_FORMAT_BYTE, 1, 1);
+ if (!entry)
+ return;
+
+ entry->data[0] = item;
+ exif_entry_unref(entry);
+}
+
void Exif::setShort(ExifIfd ifd, ExifTag tag, uint16_t item)
{
ExifEntry *entry = createEntry(ifd, tag);
if (!entry)
return;
- exif_set_short(entry->data, EXIF_BYTE_ORDER_INTEL, item);
+ exif_set_short(entry->data, order_, item);
exif_entry_unref(entry);
}
@@ -154,31 +181,96 @@ void Exif::setLong(ExifIfd ifd, ExifTag tag, uint32_t item)
if (!entry)
return;
- exif_set_long(entry->data, EXIF_BYTE_ORDER_INTEL, item);
+ exif_set_long(entry->data, order_, item);
exif_entry_unref(entry);
}
void Exif::setRational(ExifIfd ifd, ExifTag tag, ExifRational item)
{
- ExifEntry *entry = createEntry(ifd, tag);
+ setRational(ifd, tag, { &item, 1 });
+}
+
+void Exif::setRational(ExifIfd ifd, ExifTag tag, Span<const ExifRational> items)
+{
+ ExifEntry *entry = createEntry(ifd, tag, EXIF_FORMAT_RATIONAL,
+ items.size(),
+ items.size() * sizeof(ExifRational));
if (!entry)
return;
- exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL, item);
+ for (size_t i = 0; i < items.size(); i++)
+ exif_set_rational(entry->data + i * sizeof(ExifRational),
+ order_, items[i]);
exif_entry_unref(entry);
}
-void Exif::setString(ExifIfd ifd, ExifTag tag, ExifFormat format, const std::string &item)
+static const std::map<Exif::StringEncoding, std::array<uint8_t, 8>> stringEncodingCodes = {
+ { Exif::ASCII, { 0x41, 0x53, 0x43, 0x49, 0x49, 0x00, 0x00, 0x00 } },
+ { Exif::Unicode, { 0x55, 0x4e, 0x49, 0x43, 0x4f, 0x44, 0x45, 0x00 } },
+};
+
+void Exif::setString(ExifIfd ifd, ExifTag tag, ExifFormat format,
+ const std::string &item, StringEncoding encoding)
{
- /* Pad 1 extra byte for null-terminated string in ASCII format. */
- size_t length = format == EXIF_FORMAT_ASCII ?
- item.length() + 1 : item.length();
+ std::string ascii;
+ size_t length;
+ const char *str;
+ std::vector<uint8_t> buf;
+
+ if (format == EXIF_FORMAT_ASCII) {
+ ascii = utils::toAscii(item);
+ str = ascii.c_str();
+
+ /* Pad 1 extra byte to null-terminate the ASCII string. */
+ length = ascii.length() + 1;
+ } else {
+ std::u16string u16str;
+
+ auto encodingString = stringEncodingCodes.find(encoding);
+ if (encodingString != stringEncodingCodes.end()) {
+ buf = {
+ encodingString->second.begin(),
+ encodingString->second.end()
+ };
+ }
+
+ switch (encoding) {
+ case Unicode:
+ u16str = utf8ToUtf16(item);
+
+ buf.resize(8 + u16str.size() * 2);
+ for (size_t i = 0; i < u16str.size(); i++) {
+ if (order_ == EXIF_BYTE_ORDER_INTEL) {
+ buf[8 + 2 * i] = u16str[i] & 0xff;
+ buf[8 + 2 * i + 1] = (u16str[i] >> 8) & 0xff;
+ } else {
+ buf[8 + 2 * i] = (u16str[i] >> 8) & 0xff;
+ buf[8 + 2 * i + 1] = u16str[i] & 0xff;
+ }
+ }
+
+ break;
+
+ case ASCII:
+ case NoEncoding:
+ buf.insert(buf.end(), item.begin(), item.end());
+ break;
+ }
+
+ str = reinterpret_cast<const char *>(buf.data());
+
+ /*
+ * Strings stored in different formats (EXIF_FORMAT_UNDEFINED)
+ * are not null-terminated.
+ */
+ length = buf.size();
+ }
ExifEntry *entry = createEntry(ifd, tag, format, length, length);
if (!entry)
return;
- memcpy(entry->data, item.c_str(), length);
+ memcpy(entry->data, str, length);
exif_entry_unref(entry);
}
@@ -198,7 +290,7 @@ void Exif::setSize(const Size &size)
setLong(EXIF_IFD_EXIF, EXIF_TAG_PIXEL_X_DIMENSION, size.width);
}
-void Exif::setTimestamp(time_t timestamp)
+void Exif::setTimestamp(time_t timestamp, std::chrono::milliseconds msec)
{
struct tm tm;
localtime_r(&timestamp, &tm);
@@ -213,19 +305,107 @@ void Exif::setTimestamp(time_t timestamp)
/* Query and set timezone information if available. */
int r = strftime(str, sizeof(str), "%z", &tm);
- if (r > 0) {
- std::string tz(str);
- tz.insert(3, 1, ':');
- setString(EXIF_IFD_EXIF,
- static_cast<ExifTag>(_ExifTag::OFFSET_TIME),
- EXIF_FORMAT_ASCII, tz);
- setString(EXIF_IFD_EXIF,
- static_cast<ExifTag>(_ExifTag::OFFSET_TIME_ORIGINAL),
- EXIF_FORMAT_ASCII, tz);
- setString(EXIF_IFD_EXIF,
- static_cast<ExifTag>(_ExifTag::OFFSET_TIME_DIGITIZED),
- EXIF_FORMAT_ASCII, tz);
- }
+ if (r <= 0)
+ return;
+
+ std::string tz(str);
+ tz.insert(3, 1, ':');
+ setString(EXIF_IFD_EXIF,
+ static_cast<ExifTag>(_ExifTag::OFFSET_TIME),
+ EXIF_FORMAT_ASCII, tz);
+ setString(EXIF_IFD_EXIF,
+ static_cast<ExifTag>(_ExifTag::OFFSET_TIME_ORIGINAL),
+ EXIF_FORMAT_ASCII, tz);
+ setString(EXIF_IFD_EXIF,
+ static_cast<ExifTag>(_ExifTag::OFFSET_TIME_DIGITIZED),
+ EXIF_FORMAT_ASCII, tz);
+
+ std::stringstream sstr;
+ sstr << std::setfill('0') << std::setw(3) << msec.count();
+ std::string subsec = sstr.str();
+
+ setString(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME,
+ EXIF_FORMAT_ASCII, subsec);
+ setString(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_ORIGINAL,
+ EXIF_FORMAT_ASCII, subsec);
+ setString(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_DIGITIZED,
+ EXIF_FORMAT_ASCII, subsec);
+}
+
+void Exif::setGPSDateTimestamp(time_t timestamp)
+{
+ struct tm tm;
+ gmtime_r(&timestamp, &tm);
+
+ char str[11];
+ strftime(str, sizeof(str), "%Y:%m:%d", &tm);
+ std::string tsStr(str);
+
+ setString(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_DATE_STAMP),
+ EXIF_FORMAT_ASCII, tsStr);
+
+ /* Set GPS_TIME_STAMP */
+ ExifRational ts[] = {
+ { static_cast<ExifLong>(tm.tm_hour), 1 },
+ { static_cast<ExifLong>(tm.tm_min), 1 },
+ { static_cast<ExifLong>(tm.tm_sec), 1 },
+ };
+
+ setRational(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_TIME_STAMP),
+ ts);
+}
+
+std::tuple<int, int, int> Exif::degreesToDMS(double decimalDegrees)
+{
+ int degrees = std::trunc(decimalDegrees);
+ double minutes = std::abs((decimalDegrees - degrees) * 60);
+ double seconds = (minutes - std::trunc(minutes)) * 60;
+
+ return { degrees, std::trunc(minutes), std::round(seconds) };
+}
+
+void Exif::setGPSDMS(ExifIfd ifd, ExifTag tag, int deg, int min, int sec)
+{
+ ExifRational coords[] = {
+ { static_cast<ExifLong>(deg), 1 },
+ { static_cast<ExifLong>(min), 1 },
+ { static_cast<ExifLong>(sec), 1 },
+ };
+
+ setRational(ifd, tag, coords);
+}
+
+/*
+ * \brief Set GPS location (lat, long, alt)
+ * \param[in] coords Pointer to coordinates latitude, longitude, and altitude,
+ * first two in degrees, the third in meters
+ */
+void Exif::setGPSLocation(const double *coords)
+{
+ int deg, min, sec;
+
+ std::tie<int, int, int>(deg, min, sec) = degreesToDMS(coords[0]);
+ setString(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_LATITUDE_REF),
+ EXIF_FORMAT_ASCII, deg >= 0 ? "N" : "S");
+ setGPSDMS(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_LATITUDE),
+ std::abs(deg), min, sec);
+
+ std::tie<int, int, int>(deg, min, sec) = degreesToDMS(coords[1]);
+ setString(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_LONGITUDE_REF),
+ EXIF_FORMAT_ASCII, deg >= 0 ? "E" : "W");
+ setGPSDMS(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_LONGITUDE),
+ std::abs(deg), min, sec);
+
+ setByte(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_ALTITUDE_REF),
+ coords[2] >= 0 ? 0 : 1);
+ setRational(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_ALTITUDE),
+ ExifRational{ static_cast<ExifLong>(std::abs(coords[2])), 1 });
+}
+
+void Exif::setGPSMethod(const std::string &method)
+{
+ setString(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_PROCESSING_METHOD),
+ EXIF_FORMAT_UNDEFINED, method, NoEncoding);
}
void Exif::setOrientation(int orientation)
@@ -237,19 +417,94 @@ void Exif::setOrientation(int orientation)
value = 1;
break;
case 90:
- value = 8;
+ value = 6;
break;
case 180:
value = 3;
break;
case 270:
- value = 6;
+ value = 8;
break;
}
setShort(EXIF_IFD_0, EXIF_TAG_ORIENTATION, value);
}
+/*
+ * The thumbnail data should remain valid until the Exif object is destroyed.
+ * Failing to do so, might result in no thumbnail data being set even after a
+ * call to Exif::setThumbnail().
+ */
+void Exif::setThumbnail(Span<const unsigned char> thumbnail,
+ Compression compression)
+{
+ data_->data = const_cast<unsigned char *>(thumbnail.data());
+ data_->size = thumbnail.size();
+
+ setShort(EXIF_IFD_0, EXIF_TAG_COMPRESSION, compression);
+}
+
+void Exif::setFocalLength(float length)
+{
+ ExifRational rational = { static_cast<ExifLong>(length * 1000), 1000 };
+ setRational(EXIF_IFD_EXIF, EXIF_TAG_FOCAL_LENGTH, rational);
+}
+
+void Exif::setExposureTime(uint64_t nsec)
+{
+ ExifRational rational = { static_cast<ExifLong>(nsec), 1000000000 };
+ setRational(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_TIME, rational);
+}
+
+void Exif::setAperture(float size)
+{
+ ExifRational rational = { static_cast<ExifLong>(size * 10000), 10000 };
+ setRational(EXIF_IFD_EXIF, EXIF_TAG_FNUMBER, rational);
+}
+
+void Exif::setISO(uint16_t iso)
+{
+ setShort(EXIF_IFD_EXIF, EXIF_TAG_ISO_SPEED_RATINGS, iso);
+}
+
+void Exif::setFlash(Flash flash)
+{
+ setShort(EXIF_IFD_EXIF, EXIF_TAG_FLASH, static_cast<ExifShort>(flash));
+}
+
+void Exif::setWhiteBalance(WhiteBalance wb)
+{
+ setShort(EXIF_IFD_EXIF, EXIF_TAG_WHITE_BALANCE, static_cast<ExifShort>(wb));
+}
+
+/**
+ * \brief Convert UTF-8 string to UTF-16 string
+ * \param[in] str String to convert
+ *
+ * \return \a str in UTF-16
+ */
+std::u16string Exif::utf8ToUtf16(const std::string &str)
+{
+ mbstate_t state{};
+ char16_t c16;
+ const char *ptr = str.data();
+ const char *end = ptr + str.size();
+
+ std::u16string ret;
+ while (size_t rc = mbrtoc16(&c16, ptr, end - ptr + 1, &state)) {
+ if (rc == static_cast<size_t>(-2) ||
+ rc == static_cast<size_t>(-1))
+ break;
+
+ ret.push_back(c16);
+
+ if (rc > 0)
+ ptr += rc;
+ }
+
+ return ret;
+}
+
[[nodiscard]] int Exif::generate()
{
if (exifData_) {
diff --git a/src/android/jpeg/exif.h b/src/android/jpeg/exif.h
index f04cefce..23b0e097 100644
--- a/src/android/jpeg/exif.h
+++ b/src/android/jpeg/exif.h
@@ -1,4 +1,4 @@
-/* SPDX-License-Identifier: GPL-2.0-or-later */
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
@@ -7,13 +7,15 @@
#ifndef __ANDROID_JPEG_EXIF_H__
#define __ANDROID_JPEG_EXIF_H__
+#include <chrono>
#include <string>
#include <time.h>
#include <libexif/exif-data.h>
+#include <libcamera/base/span.h>
+
#include <libcamera/geometry.h>
-#include <libcamera/span.h>
class Exif
{
@@ -21,12 +23,57 @@ public:
Exif();
~Exif();
+ enum Compression {
+ None = 1,
+ JPEG = 6,
+ };
+
+ enum Flash {
+ /* bit 0 */
+ Fired = 0x01,
+ /* bits 1 and 2 */
+ StrobeDetected = 0x04,
+ StrobeNotDetected = 0x06,
+ /* bits 3 and 4 */
+ ModeCompulsoryFiring = 0x08,
+ ModeCompulsorySuppression = 0x10,
+ ModeAuto = 0x18,
+ /* bit 5 */
+ FlashNotPresent = 0x20,
+ /* bit 6 */
+ RedEye = 0x40,
+ };
+
+ enum WhiteBalance {
+ Auto = 0,
+ Manual = 1,
+ };
+
+ enum StringEncoding {
+ NoEncoding = 0,
+ ASCII = 1,
+ Unicode = 2,
+ };
+
void setMake(const std::string &make);
void setModel(const std::string &model);
void setOrientation(int orientation);
void setSize(const libcamera::Size &size);
- void setTimestamp(time_t timestamp);
+ void setThumbnail(libcamera::Span<const unsigned char> thumbnail,
+ Compression compression);
+ void setTimestamp(time_t timestamp, std::chrono::milliseconds msec);
+
+ void setGPSDateTimestamp(time_t timestamp);
+ void setGPSLocation(const double *coords);
+ void setGPSMethod(const std::string &method);
+
+ void setFocalLength(float length);
+ void setExposureTime(uint64_t nsec);
+ void setAperture(float size);
+ void setISO(uint16_t iso);
+ void setFlash(Flash flash);
+ void setWhiteBalance(WhiteBalance wb);
libcamera::Span<const uint8_t> data() const { return { exifData_, size_ }; }
[[nodiscard]] int generate();
@@ -36,16 +83,26 @@ private:
ExifEntry *createEntry(ExifIfd ifd, ExifTag tag, ExifFormat format,
unsigned long components, unsigned int size);
+ void setByte(ExifIfd ifd, ExifTag tag, uint8_t item);
void setShort(ExifIfd ifd, ExifTag tag, uint16_t item);
void setLong(ExifIfd ifd, ExifTag tag, uint32_t item);
void setString(ExifIfd ifd, ExifTag tag, ExifFormat format,
- const std::string &item);
+ const std::string &item,
+ StringEncoding encoding = NoEncoding);
void setRational(ExifIfd ifd, ExifTag tag, ExifRational item);
+ void setRational(ExifIfd ifd, ExifTag tag,
+ libcamera::Span<const ExifRational> items);
+
+ std::tuple<int, int, int> degreesToDMS(double decimalDegrees);
+ void setGPSDMS(ExifIfd ifd, ExifTag tag, int deg, int min, int sec);
+
+ std::u16string utf8ToUtf16(const std::string &str);
bool valid_;
ExifData *data_;
ExifMem *mem_;
+ ExifByteOrder order_;
unsigned char *exifData_;
unsigned int size_;
diff --git a/src/android/jpeg/post_processor_jpeg.cpp b/src/android/jpeg/post_processor_jpeg.cpp
new file mode 100644
index 00000000..0e93f365
--- /dev/null
+++ b/src/android/jpeg/post_processor_jpeg.cpp
@@ -0,0 +1,196 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * post_processor_jpeg.cpp - JPEG Post Processor
+ */
+
+#include "post_processor_jpeg.h"
+
+#include <chrono>
+
+#include "../camera_device.h"
+#include "../camera_metadata.h"
+#include "encoder_libjpeg.h"
+#include "exif.h"
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/formats.h>
+
+using namespace libcamera;
+using namespace std::chrono_literals;
+
+LOG_DEFINE_CATEGORY(JPEG)
+
+PostProcessorJpeg::PostProcessorJpeg(CameraDevice *const device)
+ : cameraDevice_(device)
+{
+}
+
+int PostProcessorJpeg::configure(const StreamConfiguration &inCfg,
+ const StreamConfiguration &outCfg)
+{
+ if (inCfg.size != outCfg.size) {
+ LOG(JPEG, Error) << "Mismatch of input and output stream sizes";
+ return -EINVAL;
+ }
+
+ if (outCfg.pixelFormat != formats::MJPEG) {
+ LOG(JPEG, Error) << "Output stream pixel format is not JPEG";
+ return -EINVAL;
+ }
+
+ streamSize_ = outCfg.size;
+
+ thumbnailer_.configure(inCfg.size, inCfg.pixelFormat);
+
+ encoder_ = std::make_unique<EncoderLibJpeg>();
+
+ return encoder_->configure(inCfg);
+}
+
+void PostProcessorJpeg::generateThumbnail(const FrameBuffer &source,
+ const Size &targetSize,
+ unsigned int quality,
+ std::vector<unsigned char> *thumbnail)
+{
+ /* Stores the raw scaled-down thumbnail bytes. */
+ std::vector<unsigned char> rawThumbnail;
+
+ thumbnailer_.createThumbnail(source, targetSize, &rawThumbnail);
+
+ StreamConfiguration thCfg;
+ thCfg.size = targetSize;
+ thCfg.pixelFormat = thumbnailer_.pixelFormat();
+ int ret = thumbnailEncoder_.configure(thCfg);
+
+ if (!rawThumbnail.empty() && !ret) {
+ /*
+ * \todo Avoid value-initialization of all elements of the
+ * vector.
+ */
+ thumbnail->resize(rawThumbnail.size());
+
+ int jpeg_size = thumbnailEncoder_.encode(rawThumbnail,
+ *thumbnail, {}, quality);
+ thumbnail->resize(jpeg_size);
+
+ LOG(JPEG, Debug)
+ << "Thumbnail compress returned "
+ << jpeg_size << " bytes";
+ }
+}
+
+int PostProcessorJpeg::process(const FrameBuffer &source,
+ CameraBuffer *destination,
+ const CameraMetadata &requestMetadata,
+ CameraMetadata *resultMetadata)
+{
+ if (!encoder_)
+ return 0;
+
+ ASSERT(destination->numPlanes() == 1);
+
+ camera_metadata_ro_entry_t entry;
+ int ret;
+
+ /* Set EXIF metadata for various tags. */
+ Exif exif;
+ exif.setMake(cameraDevice_->maker());
+ exif.setModel(cameraDevice_->model());
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_ORIENTATION, &entry);
+
+ const uint32_t jpegOrientation = ret ? *entry.data.i32 : 0;
+ resultMetadata->addEntry(ANDROID_JPEG_ORIENTATION, jpegOrientation);
+ exif.setOrientation(jpegOrientation);
+
+ exif.setSize(streamSize_);
+ /*
+ * We set the frame's EXIF timestamp as the time of encode.
+ * Since the precision we need for EXIF timestamp is only one
+ * second, it is good enough.
+ */
+ exif.setTimestamp(std::time(nullptr), 0ms);
+
+ ret = resultMetadata->getEntry(ANDROID_SENSOR_EXPOSURE_TIME, &entry);
+ exif.setExposureTime(ret ? *entry.data.i64 : 0);
+ ret = requestMetadata.getEntry(ANDROID_LENS_APERTURE, &entry);
+ if (ret)
+ exif.setAperture(*entry.data.f);
+ exif.setISO(100);
+ exif.setFlash(Exif::Flash::FlashNotPresent);
+ exif.setWhiteBalance(Exif::WhiteBalance::Auto);
+
+ exif.setFocalLength(1.0);
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_GPS_TIMESTAMP, &entry);
+ if (ret) {
+ exif.setGPSDateTimestamp(*entry.data.i64);
+ resultMetadata->addEntry(ANDROID_JPEG_GPS_TIMESTAMP,
+ *entry.data.i64);
+ }
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_THUMBNAIL_SIZE, &entry);
+ if (ret) {
+ const int32_t *data = entry.data.i32;
+ Size thumbnailSize = { static_cast<uint32_t>(data[0]),
+ static_cast<uint32_t>(data[1]) };
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_THUMBNAIL_QUALITY, &entry);
+ uint8_t quality = ret ? *entry.data.u8 : 95;
+ resultMetadata->addEntry(ANDROID_JPEG_THUMBNAIL_QUALITY, quality);
+
+ if (thumbnailSize != Size(0, 0)) {
+ std::vector<unsigned char> thumbnail;
+ generateThumbnail(source, thumbnailSize, quality, &thumbnail);
+ if (!thumbnail.empty())
+ exif.setThumbnail(thumbnail, Exif::Compression::JPEG);
+ }
+
+ resultMetadata->addEntry(ANDROID_JPEG_THUMBNAIL_SIZE, data, 2);
+ }
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_GPS_COORDINATES, &entry);
+ if (ret) {
+ exif.setGPSLocation(entry.data.d);
+ resultMetadata->addEntry(ANDROID_JPEG_GPS_COORDINATES,
+ entry.data.d, 3);
+ }
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_GPS_PROCESSING_METHOD, &entry);
+ if (ret) {
+ std::string method(entry.data.u8, entry.data.u8 + entry.count);
+ exif.setGPSMethod(method);
+ resultMetadata->addEntry(ANDROID_JPEG_GPS_PROCESSING_METHOD,
+ entry.data.u8, entry.count);
+ }
+
+ if (exif.generate() != 0)
+ LOG(JPEG, Error) << "Failed to generate valid EXIF data";
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_QUALITY, &entry);
+ const uint8_t quality = ret ? *entry.data.u8 : 95;
+ resultMetadata->addEntry(ANDROID_JPEG_QUALITY, quality);
+
+ int jpeg_size = encoder_->encode(source, destination->plane(0),
+ exif.data(), quality);
+ if (jpeg_size < 0) {
+ LOG(JPEG, Error) << "Failed to encode stream image";
+ return jpeg_size;
+ }
+
+ /* Fill in the JPEG blob header. */
+ uint8_t *resultPtr = destination->plane(0).data()
+ + destination->jpegBufferSize(cameraDevice_->maxJpegBufferSize())
+ - sizeof(struct camera3_jpeg_blob);
+ auto *blob = reinterpret_cast<struct camera3_jpeg_blob *>(resultPtr);
+ blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
+ blob->jpeg_size = jpeg_size;
+
+ /* Update the JPEG result Metadata. */
+ resultMetadata->addEntry(ANDROID_JPEG_SIZE, jpeg_size);
+
+ return 0;
+}
diff --git a/src/android/jpeg/post_processor_jpeg.h b/src/android/jpeg/post_processor_jpeg.h
new file mode 100644
index 00000000..5c399be9
--- /dev/null
+++ b/src/android/jpeg/post_processor_jpeg.h
@@ -0,0 +1,45 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * post_processor_jpeg.h - JPEG Post Processor
+ */
+#ifndef __ANDROID_POST_PROCESSOR_JPEG_H__
+#define __ANDROID_POST_PROCESSOR_JPEG_H__
+
+#include "../post_processor.h"
+#include "encoder_libjpeg.h"
+#include "thumbnailer.h"
+
+#include <libcamera/geometry.h>
+
+#include "libcamera/internal/framebuffer.h"
+
+class CameraDevice;
+
+class PostProcessorJpeg : public PostProcessor
+{
+public:
+ PostProcessorJpeg(CameraDevice *const device);
+
+ int configure(const libcamera::StreamConfiguration &incfg,
+ const libcamera::StreamConfiguration &outcfg) override;
+ int process(const libcamera::FrameBuffer &source,
+ CameraBuffer *destination,
+ const CameraMetadata &requestMetadata,
+ CameraMetadata *resultMetadata) override;
+
+private:
+ void generateThumbnail(const libcamera::FrameBuffer &source,
+ const libcamera::Size &targetSize,
+ unsigned int quality,
+ std::vector<unsigned char> *thumbnail);
+
+ CameraDevice *const cameraDevice_;
+ std::unique_ptr<Encoder> encoder_;
+ libcamera::Size streamSize_;
+ EncoderLibJpeg thumbnailEncoder_;
+ Thumbnailer thumbnailer_;
+};
+
+#endif /* __ANDROID_POST_PROCESSOR_JPEG_H__ */
diff --git a/src/android/jpeg/thumbnailer.cpp b/src/android/jpeg/thumbnailer.cpp
new file mode 100644
index 00000000..5cb00744
--- /dev/null
+++ b/src/android/jpeg/thumbnailer.cpp
@@ -0,0 +1,93 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * thumbnailer.cpp - Simple image thumbnailer
+ */
+
+#include "thumbnailer.h"
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/formats.h>
+
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(Thumbnailer)
+
+Thumbnailer::Thumbnailer()
+ : valid_(false)
+{
+}
+
+void Thumbnailer::configure(const Size &sourceSize, PixelFormat pixelFormat)
+{
+ sourceSize_ = sourceSize;
+ pixelFormat_ = pixelFormat;
+
+ if (pixelFormat_ != formats::NV12) {
+ LOG(Thumbnailer, Error)
+ << "Failed to configure: Pixel Format "
+ << pixelFormat_.toString() << " unsupported.";
+ return;
+ }
+
+ valid_ = true;
+}
+
+void Thumbnailer::createThumbnail(const FrameBuffer &source,
+ const Size &targetSize,
+ std::vector<unsigned char> *destination)
+{
+ MappedFrameBuffer frame(&source, PROT_READ);
+ if (!frame.isValid()) {
+ LOG(Thumbnailer, Error)
+ << "Failed to map FrameBuffer : "
+ << strerror(frame.error());
+ return;
+ }
+
+ if (!valid_) {
+ LOG(Thumbnailer, Error) << "Config is unconfigured or invalid.";
+ return;
+ }
+
+ const unsigned int sw = sourceSize_.width;
+ const unsigned int sh = sourceSize_.height;
+ const unsigned int tw = targetSize.width;
+ const unsigned int th = targetSize.height;
+
+ ASSERT(tw % 2 == 0 && th % 2 == 0);
+
+ /* Image scaling block implementing nearest-neighbour algorithm. */
+ unsigned char *src = static_cast<unsigned char *>(frame.maps()[0].data());
+ unsigned char *srcC = src + sh * sw;
+ unsigned char *srcCb, *srcCr;
+ unsigned char *dstY, *srcY;
+
+ size_t dstSize = (th * tw) + ((th / 2) * tw);
+ destination->resize(dstSize);
+ unsigned char *dst = destination->data();
+ unsigned char *dstC = dst + th * tw;
+
+ for (unsigned int y = 0; y < th; y += 2) {
+ unsigned int sourceY = (sh * y + th / 2) / th;
+
+ dstY = dst + y * tw;
+ srcY = src + sw * sourceY;
+ srcCb = srcC + (sourceY / 2) * sw + 0;
+ srcCr = srcC + (sourceY / 2) * sw + 1;
+
+ for (unsigned int x = 0; x < tw; x += 2) {
+ unsigned int sourceX = (sw * x + tw / 2) / tw;
+
+ dstY[x] = srcY[sourceX];
+ dstY[tw + x] = srcY[sw + sourceX];
+ dstY[x + 1] = srcY[sourceX + 1];
+ dstY[tw + x + 1] = srcY[sw + sourceX + 1];
+
+ dstC[(y / 2) * tw + x + 0] = srcCb[(sourceX / 2) * 2];
+ dstC[(y / 2) * tw + x + 1] = srcCr[(sourceX / 2) * 2];
+ }
+ }
+}
diff --git a/src/android/jpeg/thumbnailer.h b/src/android/jpeg/thumbnailer.h
new file mode 100644
index 00000000..68cbf743
--- /dev/null
+++ b/src/android/jpeg/thumbnailer.h
@@ -0,0 +1,34 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * thumbnailer.h - Simple image thumbnailer
+ */
+#ifndef __ANDROID_JPEG_THUMBNAILER_H__
+#define __ANDROID_JPEG_THUMBNAILER_H__
+
+#include <libcamera/geometry.h>
+
+#include "libcamera/internal/formats.h"
+#include "libcamera/internal/framebuffer.h"
+
+class Thumbnailer
+{
+public:
+ Thumbnailer();
+
+ void configure(const libcamera::Size &sourceSize,
+ libcamera::PixelFormat pixelFormat);
+ void createThumbnail(const libcamera::FrameBuffer &source,
+ const libcamera::Size &targetSize,
+ std::vector<unsigned char> *dest);
+ const libcamera::PixelFormat &pixelFormat() const { return pixelFormat_; }
+
+private:
+ libcamera::PixelFormat pixelFormat_;
+ libcamera::Size sourceSize_;
+
+ bool valid_;
+};
+
+#endif /* __ANDROID_JPEG_THUMBNAILER_H__ */
diff --git a/src/android/meson.build b/src/android/meson.build
index 0293c203..7d1e7e85 100644
--- a/src/android/meson.build
+++ b/src/android/meson.build
@@ -3,6 +3,8 @@
android_deps = [
dependency('libexif', required : get_option('android')),
dependency('libjpeg', required : get_option('android')),
+ dependency('yaml-0.1', required : get_option('android')),
+ libcamera_private,
]
android_enabled = true
@@ -14,20 +16,63 @@ foreach dep : android_deps
endif
endforeach
+libyuv_dep = dependency('libyuv', required : false)
+
+# Fallback to a subproject if libyuv isn't found, as it's typically not
+# provided by distributions.
+if not libyuv_dep.found()
+ cmake = import('cmake')
+
+ libyuv_vars = cmake.subproject_options()
+ libyuv_vars.add_cmake_defines({'CMAKE_POSITION_INDEPENDENT_CODE': 'ON'})
+ libyuv_vars.set_override_option('cpp_std', 'c++17')
+ libyuv_vars.append_compile_args('cpp',
+ '-Wno-sign-compare',
+ '-Wno-unused-variable',
+ '-Wno-unused-parameter')
+ libyuv_vars.append_link_args('-ljpeg')
+ libyuv = cmake.subproject('libyuv', options : libyuv_vars)
+ libyuv_dep = libyuv.dependency('yuv')
+endif
+
+android_deps += [libyuv_dep]
+
android_hal_sources = files([
'camera3_hal.cpp',
- 'camera_hal_manager.cpp',
+ 'camera_capabilities.cpp',
'camera_device.cpp',
+ 'camera_hal_config.cpp',
+ 'camera_hal_manager.cpp',
'camera_metadata.cpp',
'camera_ops.cpp',
+ 'camera_stream.cpp',
+ 'camera_worker.cpp',
'jpeg/encoder_libjpeg.cpp',
'jpeg/exif.cpp',
+ 'jpeg/post_processor_jpeg.cpp',
+ 'jpeg/thumbnailer.cpp',
+ 'yuv/post_processor_yuv.cpp'
])
+android_cpp_args = []
+
+subdir('cros')
+subdir('mm')
+
android_camera_metadata_sources = files([
'metadata/camera_metadata.c',
])
android_camera_metadata = static_library('camera_metadata',
android_camera_metadata_sources,
+ c_args : '-Wno-shadow',
include_directories : android_includes)
+
+libcamera_hal = shared_library('libcamera-hal',
+ android_hal_sources,
+ name_prefix : '',
+ link_with : android_camera_metadata,
+ install : true,
+ cpp_args : android_cpp_args,
+ include_directories : android_includes,
+ dependencies : android_deps)
diff --git a/src/android/mm/cros_camera_buffer.cpp b/src/android/mm/cros_camera_buffer.cpp
new file mode 100644
index 00000000..bb55b95e
--- /dev/null
+++ b/src/android/mm/cros_camera_buffer.cpp
@@ -0,0 +1,134 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * cros_camera_buffer.cpp - Chromium OS buffer backend using CameraBufferManager
+ */
+
+#include "../camera_buffer.h"
+
+#include <libcamera/base/log.h>
+
+#include "cros-camera/camera_buffer_manager.h"
+
+using namespace libcamera;
+
+LOG_DECLARE_CATEGORY(HAL)
+
+class CameraBuffer::Private : public Extensible::Private
+{
+ LIBCAMERA_DECLARE_PUBLIC(CameraBuffer)
+
+public:
+ Private(CameraBuffer *cameraBuffer,
+ buffer_handle_t camera3Buffer, int flags);
+ ~Private();
+
+ bool isValid() const { return valid_; }
+
+ unsigned int numPlanes() const;
+
+ Span<uint8_t> plane(unsigned int plane);
+
+ size_t jpegBufferSize(size_t maxJpegBufferSize) const;
+
+private:
+ cros::CameraBufferManager *bufferManager_;
+ buffer_handle_t handle_;
+ unsigned int numPlanes_;
+ bool valid_;
+ bool registered_;
+ union {
+ void *addr;
+ android_ycbcr ycbcr;
+ } mem;
+};
+
+CameraBuffer::Private::Private(CameraBuffer *cameraBuffer,
+ buffer_handle_t camera3Buffer,
+ [[maybe_unused]] int flags)
+ : Extensible::Private(cameraBuffer), handle_(camera3Buffer),
+ numPlanes_(0), valid_(false), registered_(false)
+{
+ bufferManager_ = cros::CameraBufferManager::GetInstance();
+
+ int ret = bufferManager_->Register(camera3Buffer);
+ if (ret) {
+ LOG(HAL, Error) << "Failed registering a buffer: " << ret;
+ return;
+ }
+
+ registered_ = true;
+ numPlanes_ = bufferManager_->GetNumPlanes(camera3Buffer);
+ switch (numPlanes_) {
+ case 1: {
+ ret = bufferManager_->Lock(handle_, 0, 0, 0, 0, 0, &mem.addr);
+ if (ret) {
+ LOG(HAL, Error) << "Single plane buffer mapping failed";
+ return;
+ }
+ break;
+ }
+ case 2:
+ case 3: {
+ ret = bufferManager_->LockYCbCr(handle_, 0, 0, 0, 0, 0,
+ &mem.ycbcr);
+ if (ret) {
+ LOG(HAL, Error) << "YCbCr buffer mapping failed";
+ return;
+ }
+ break;
+ }
+ default:
+ LOG(HAL, Error) << "Invalid number of planes: " << numPlanes_;
+ return;
+ }
+
+ valid_ = true;
+}
+
+CameraBuffer::Private::~Private()
+{
+ if (valid_)
+ bufferManager_->Unlock(handle_);
+ if (registered_)
+ bufferManager_->Deregister(handle_);
+}
+
+unsigned int CameraBuffer::Private::numPlanes() const
+{
+ return bufferManager_->GetNumPlanes(handle_);
+}
+
+Span<uint8_t> CameraBuffer::Private::plane(unsigned int plane)
+{
+ void *addr;
+
+ switch (numPlanes()) {
+ case 1:
+ addr = mem.addr;
+ break;
+ default:
+ switch (plane) {
+ case 0:
+ addr = mem.ycbcr.y;
+ break;
+ case 1:
+ addr = mem.ycbcr.cb;
+ break;
+ case 2:
+ addr = mem.ycbcr.cr;
+ break;
+ }
+ }
+
+ return { static_cast<uint8_t *>(addr),
+ bufferManager_->GetPlaneSize(handle_, plane) };
+}
+
+size_t CameraBuffer::Private::jpegBufferSize([[maybe_unused]] size_t maxJpegBufferSize) const
+{
+ return bufferManager_->GetPlaneSize(handle_, 0);
+}
+
+PUBLIC_CAMERA_BUFFER_IMPLEMENTATION
diff --git a/src/android/mm/generic_camera_buffer.cpp b/src/android/mm/generic_camera_buffer.cpp
new file mode 100644
index 00000000..166be36e
--- /dev/null
+++ b/src/android/mm/generic_camera_buffer.cpp
@@ -0,0 +1,91 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * generic_camera_buffer.cpp - Generic Android frame buffer backend
+ */
+
+#include "../camera_buffer.h"
+
+#include <unistd.h>
+
+#include <libcamera/base/log.h>
+
+#include "libcamera/internal/framebuffer.h"
+
+using namespace libcamera;
+
+LOG_DECLARE_CATEGORY(HAL)
+
+class CameraBuffer::Private : public Extensible::Private,
+ public libcamera::MappedBuffer
+{
+ LIBCAMERA_DECLARE_PUBLIC(CameraBuffer)
+
+public:
+ Private(CameraBuffer *cameraBuffer,
+ buffer_handle_t camera3Buffer, int flags);
+ ~Private();
+
+ unsigned int numPlanes() const;
+
+ Span<uint8_t> plane(unsigned int plane);
+
+ size_t jpegBufferSize(size_t maxJpegBufferSize) const;
+};
+
+CameraBuffer::Private::Private(CameraBuffer *cameraBuffer,
+ buffer_handle_t camera3Buffer, int flags)
+ : Extensible::Private(cameraBuffer)
+{
+ maps_.reserve(camera3Buffer->numFds);
+ error_ = 0;
+
+ for (int i = 0; i < camera3Buffer->numFds; i++) {
+ if (camera3Buffer->data[i] == -1)
+ continue;
+
+ off_t length = lseek(camera3Buffer->data[i], 0, SEEK_END);
+ if (length < 0) {
+ error_ = -errno;
+ LOG(HAL, Error) << "Failed to query plane length";
+ break;
+ }
+
+ void *address = mmap(nullptr, length, flags, MAP_SHARED,
+ camera3Buffer->data[i], 0);
+ if (address == MAP_FAILED) {
+ error_ = -errno;
+ LOG(HAL, Error) << "Failed to mmap plane";
+ break;
+ }
+
+ maps_.emplace_back(static_cast<uint8_t *>(address),
+ static_cast<size_t>(length));
+ }
+}
+
+CameraBuffer::Private::~Private()
+{
+}
+
+unsigned int CameraBuffer::Private::numPlanes() const
+{
+ return maps_.size();
+}
+
+Span<uint8_t> CameraBuffer::Private::plane(unsigned int plane)
+{
+ if (plane >= maps_.size())
+ return {};
+
+ return maps_[plane];
+}
+
+size_t CameraBuffer::Private::jpegBufferSize(size_t maxJpegBufferSize) const
+{
+ return std::min<unsigned int>(maps_[0].size(),
+ maxJpegBufferSize);
+}
+
+PUBLIC_CAMERA_BUFFER_IMPLEMENTATION
diff --git a/src/android/mm/meson.build b/src/android/mm/meson.build
new file mode 100644
index 00000000..eeb5cc2e
--- /dev/null
+++ b/src/android/mm/meson.build
@@ -0,0 +1,9 @@
+# SPDX-License-Identifier: CC0-1.0
+
+platform = get_option('android_platform')
+if platform == 'generic'
+ android_hal_sources += files(['generic_camera_buffer.cpp'])
+elif platform == 'cros'
+ android_hal_sources += files(['cros_camera_buffer.cpp'])
+ android_deps += [dependency('libcros_camera')]
+endif
diff --git a/src/android/post_processor.h b/src/android/post_processor.h
new file mode 100644
index 00000000..689f85d9
--- /dev/null
+++ b/src/android/post_processor.h
@@ -0,0 +1,32 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * post_processor.h - CameraStream Post Processing Interface
+ */
+#ifndef __ANDROID_POST_PROCESSOR_H__
+#define __ANDROID_POST_PROCESSOR_H__
+
+#include <libcamera/framebuffer.h>
+#include <libcamera/stream.h>
+
+#include "libcamera/internal/framebuffer.h"
+
+#include "camera_buffer.h"
+
+class CameraMetadata;
+
+class PostProcessor
+{
+public:
+ virtual ~PostProcessor() = default;
+
+ virtual int configure(const libcamera::StreamConfiguration &inCfg,
+ const libcamera::StreamConfiguration &outCfg) = 0;
+ virtual int process(const libcamera::FrameBuffer &source,
+ CameraBuffer *destination,
+ const CameraMetadata &requestMetadata,
+ CameraMetadata *resultMetadata) = 0;
+};
+
+#endif /* __ANDROID_POST_PROCESSOR_H__ */
diff --git a/src/android/yuv/post_processor_yuv.cpp b/src/android/yuv/post_processor_yuv.cpp
new file mode 100644
index 00000000..772e805b
--- /dev/null
+++ b/src/android/yuv/post_processor_yuv.cpp
@@ -0,0 +1,143 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * post_processor_yuv.cpp - Post Processor using libyuv
+ */
+
+#include "post_processor_yuv.h"
+
+#include <libyuv/scale.h>
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/formats.h>
+#include <libcamera/geometry.h>
+#include <libcamera/pixel_format.h>
+
+#include "libcamera/internal/formats.h"
+
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(YUV)
+
+int PostProcessorYuv::configure(const StreamConfiguration &inCfg,
+ const StreamConfiguration &outCfg)
+{
+ if (inCfg.pixelFormat != outCfg.pixelFormat) {
+ LOG(YUV, Error) << "Pixel format conversion is not supported"
+ << " (from " << inCfg.pixelFormat.toString()
+ << " to " << outCfg.pixelFormat.toString() << ")";
+ return -EINVAL;
+ }
+
+ if (inCfg.size < outCfg.size) {
+ LOG(YUV, Error) << "Up-scaling is not supported"
+ << " (from " << inCfg.size.toString()
+ << " to " << outCfg.size.toString() << ")";
+ return -EINVAL;
+ }
+
+ if (inCfg.pixelFormat != formats::NV12) {
+ LOG(YUV, Error) << "Unsupported format " << inCfg.pixelFormat
+ << " (only NV12 is supported)";
+ return -EINVAL;
+ }
+
+ calculateLengths(inCfg, outCfg);
+ return 0;
+}
+
+int PostProcessorYuv::process(const FrameBuffer &source,
+ CameraBuffer *destination,
+ [[maybe_unused]] const CameraMetadata &requestMetadata,
+ [[maybe_unused]] CameraMetadata *metadata)
+{
+ if (!isValidBuffers(source, *destination))
+ return -EINVAL;
+
+ const MappedFrameBuffer sourceMapped(&source, PROT_READ);
+ if (!sourceMapped.isValid()) {
+ LOG(YUV, Error) << "Failed to mmap camera frame buffer";
+ return -EINVAL;
+ }
+
+ int ret = libyuv::NV12Scale(sourceMapped.maps()[0].data(),
+ sourceStride_[0],
+ sourceMapped.maps()[1].data(),
+ sourceStride_[1],
+ sourceSize_.width, sourceSize_.height,
+ destination->plane(0).data(),
+ destinationStride_[0],
+ destination->plane(1).data(),
+ destinationStride_[1],
+ destinationSize_.width,
+ destinationSize_.height,
+ libyuv::FilterMode::kFilterBilinear);
+ if (ret) {
+ LOG(YUV, Error) << "Failed NV12 scaling: " << ret;
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
+bool PostProcessorYuv::isValidBuffers(const FrameBuffer &source,
+ const CameraBuffer &destination) const
+{
+ if (source.planes().size() != 2) {
+ LOG(YUV, Error) << "Invalid number of source planes: "
+ << source.planes().size();
+ return false;
+ }
+ if (destination.numPlanes() != 2) {
+ LOG(YUV, Error) << "Invalid number of destination planes: "
+ << destination.numPlanes();
+ return false;
+ }
+
+ if (source.planes()[0].length < sourceLength_[0] ||
+ source.planes()[1].length < sourceLength_[1]) {
+ LOG(YUV, Error)
+ << "The source planes lengths are too small, actual size: {"
+ << source.planes()[0].length << ", "
+ << source.planes()[1].length
+ << "}, expected size: {"
+ << sourceLength_[0] << ", "
+ << sourceLength_[1] << "}";
+ return false;
+ }
+ if (destination.plane(0).size() < destinationLength_[0] ||
+ destination.plane(1).size() < destinationLength_[1]) {
+ LOG(YUV, Error)
+ << "The destination planes lengths are too small, actual size: {"
+ << destination.plane(0).size() << ", "
+ << destination.plane(1).size()
+ << "}, expected size: {"
+ << sourceLength_[0] << ", "
+ << sourceLength_[1] << "}";
+ return false;
+ }
+
+ return true;
+}
+
+void PostProcessorYuv::calculateLengths(const StreamConfiguration &inCfg,
+ const StreamConfiguration &outCfg)
+{
+ sourceSize_ = inCfg.size;
+ destinationSize_ = outCfg.size;
+
+ const PixelFormatInfo &nv12Info = PixelFormatInfo::info(formats::NV12);
+ for (unsigned int i = 0; i < 2; i++) {
+ sourceStride_[i] = inCfg.stride;
+ destinationStride_[i] = nv12Info.stride(destinationSize_.width, i, 1);
+
+ const unsigned int vertSubSample =
+ nv12Info.planes[i].verticalSubSampling;
+ sourceLength_[i] = sourceStride_[i] *
+ ((sourceSize_.height + vertSubSample - 1) / vertSubSample);
+ destinationLength_[i] = destinationStride_[i] *
+ ((destinationSize_.height + vertSubSample - 1) / vertSubSample);
+ }
+}
diff --git a/src/android/yuv/post_processor_yuv.h b/src/android/yuv/post_processor_yuv.h
new file mode 100644
index 00000000..f8b1ba23
--- /dev/null
+++ b/src/android/yuv/post_processor_yuv.h
@@ -0,0 +1,42 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * post_processor_yuv.h - Post Processor using libyuv
+ */
+#ifndef __ANDROID_POST_PROCESSOR_YUV_H__
+#define __ANDROID_POST_PROCESSOR_YUV_H__
+
+#include "../post_processor.h"
+
+#include <libcamera/geometry.h>
+
+class CameraDevice;
+
+class PostProcessorYuv : public PostProcessor
+{
+public:
+ PostProcessorYuv() = default;
+
+ int configure(const libcamera::StreamConfiguration &incfg,
+ const libcamera::StreamConfiguration &outcfg) override;
+ int process(const libcamera::FrameBuffer &source,
+ CameraBuffer *destination,
+ const CameraMetadata &requestMetadata,
+ CameraMetadata *metadata) override;
+
+private:
+ bool isValidBuffers(const libcamera::FrameBuffer &source,
+ const CameraBuffer &destination) const;
+ void calculateLengths(const libcamera::StreamConfiguration &inCfg,
+ const libcamera::StreamConfiguration &outCfg);
+
+ libcamera::Size sourceSize_;
+ libcamera::Size destinationSize_;
+ unsigned int sourceLength_[2] = {};
+ unsigned int destinationLength_[2] = {};
+ unsigned int sourceStride_[2] = {};
+ unsigned int destinationStride_[2] = {};
+};
+
+#endif /* __ANDROID_POST_PROCESSOR_YUV_H__ */
diff --git a/src/cam/buffer_writer.h b/src/cam/buffer_writer.h
index 47e26103..604ce870 100644
--- a/src/cam/buffer_writer.h
+++ b/src/cam/buffer_writer.h
@@ -10,7 +10,7 @@
#include <map>
#include <string>
-#include <libcamera/buffer.h>
+#include <libcamera/framebuffer.h>
class BufferWriter
{
diff --git a/src/cam/capture.cpp b/src/cam/capture.cpp
index 5510c009..3c3e3a53 100644
--- a/src/cam/capture.cpp
+++ b/src/cam/capture.cpp
@@ -10,6 +10,8 @@
#include <limits.h>
#include <sstream>
+#include <libcamera/control_ids.h>
+
#include "capture.h"
#include "main.h"
@@ -17,8 +19,9 @@ using namespace libcamera;
Capture::Capture(std::shared_ptr<Camera> camera, CameraConfiguration *config,
EventLoop *loop)
- : camera_(camera), config_(config), writer_(nullptr), loop_(loop),
- captureCount_(0), captureLimit_(0)
+ : camera_(camera), config_(config), writer_(nullptr), last_(0), loop_(loop),
+ queueCount_(0), captureCount_(0), captureLimit_(0),
+ printMetadata_(false)
{
}
@@ -26,8 +29,10 @@ int Capture::run(const OptionsParser::Options &options)
{
int ret;
+ queueCount_ = 0;
captureCount_ = 0;
captureLimit_ = options[OptCapture].toInteger();
+ printMetadata_ = options.isSet(OptMetadata);
if (!camera_) {
std::cout << "Can't capture without a camera" << std::endl;
@@ -65,6 +70,8 @@ int Capture::run(const OptionsParser::Options &options)
writer_ = nullptr;
}
+ requests_.clear();
+
delete allocator;
return ret;
@@ -92,9 +99,8 @@ int Capture::capture(FrameBufferAllocator *allocator)
* example pushing a button. For now run all streams all the time.
*/
- std::vector<Request *> requests;
for (unsigned int i = 0; i < nbuffers; i++) {
- Request *request = camera_->createRequest();
+ std::unique_ptr<Request> request = camera_->createRequest();
if (!request) {
std::cerr << "Can't create request" << std::endl;
return -ENOMEM;
@@ -117,7 +123,7 @@ int Capture::capture(FrameBufferAllocator *allocator)
writer_->mapBuffer(buffer.get());
}
- requests.push_back(request);
+ requests_.push_back(std::move(request));
}
ret = camera_->start();
@@ -126,8 +132,8 @@ int Capture::capture(FrameBufferAllocator *allocator)
return ret;
}
- for (Request *request : requests) {
- ret = camera_->queueRequest(request);
+ for (std::unique_ptr<Request> &request : requests_) {
+ ret = queueRequest(request.get());
if (ret < 0) {
std::cerr << "Can't queue request" << std::endl;
camera_->stop();
@@ -151,11 +157,30 @@ int Capture::capture(FrameBufferAllocator *allocator)
return ret;
}
+int Capture::queueRequest(Request *request)
+{
+ if (captureLimit_ && queueCount_ >= captureLimit_)
+ return 0;
+
+ queueCount_++;
+
+ return camera_->queueRequest(request);
+}
+
void Capture::requestComplete(Request *request)
{
if (request->status() == Request::RequestCancelled)
return;
+ /*
+ * Defer processing of the completed request to the event loop, to avoid
+ * blocking the camera manager thread.
+ */
+ loop_->callLater([=]() { processRequest(request); });
+}
+
+void Capture::processRequest(Request *request)
+{
const Request::BufferMap &buffers = request->buffers();
/*
@@ -196,28 +221,21 @@ void Capture::requestComplete(Request *request)
std::cout << info.str() << std::endl;
+ if (printMetadata_) {
+ const ControlList &requestMetadata = request->metadata();
+ for (const auto &ctrl : requestMetadata) {
+ const ControlId *id = controls::controls.at(ctrl.first);
+ std::cout << "\t" << id->name() << " = "
+ << ctrl.second.toString() << std::endl;
+ }
+ }
+
captureCount_++;
if (captureLimit_ && captureCount_ >= captureLimit_) {
loop_->exit(0);
return;
}
- /*
- * Create a new request and populate it with one buffer for each
- * stream.
- */
- request = camera_->createRequest();
- if (!request) {
- std::cerr << "Can't create request" << std::endl;
- return;
- }
-
- for (auto it = buffers.begin(); it != buffers.end(); ++it) {
- const Stream *stream = it->first;
- FrameBuffer *buffer = it->second;
-
- request->addBuffer(stream, buffer);
- }
-
- camera_->queueRequest(request);
+ request->reuse(Request::ReuseBuffers);
+ queueRequest(request);
}
diff --git a/src/cam/capture.h b/src/cam/capture.h
index 0aebdac9..de478c98 100644
--- a/src/cam/capture.h
+++ b/src/cam/capture.h
@@ -9,9 +9,10 @@
#include <memory>
#include <stdint.h>
+#include <vector>
-#include <libcamera/buffer.h>
#include <libcamera/camera.h>
+#include <libcamera/framebuffer.h>
#include <libcamera/framebuffer_allocator.h>
#include <libcamera/request.h>
#include <libcamera/stream.h>
@@ -31,7 +32,9 @@ public:
private:
int capture(libcamera::FrameBufferAllocator *allocator);
+ int queueRequest(libcamera::Request *request);
void requestComplete(libcamera::Request *request);
+ void processRequest(libcamera::Request *request);
std::shared_ptr<libcamera::Camera> camera_;
libcamera::CameraConfiguration *config_;
@@ -41,8 +44,12 @@ private:
uint64_t last_;
EventLoop *loop_;
+ unsigned int queueCount_;
unsigned int captureCount_;
unsigned int captureLimit_;
+ bool printMetadata_;
+
+ std::vector<std::unique_ptr<libcamera::Request>> requests_;
};
#endif /* __CAM_CAPTURE_H__ */
diff --git a/src/cam/event_loop.cpp b/src/cam/event_loop.cpp
index e8ab8617..6a4c47f2 100644
--- a/src/cam/event_loop.cpp
+++ b/src/cam/event_loop.cpp
@@ -5,35 +5,78 @@
* event_loop.cpp - cam - Event loop
*/
-#include <libcamera/event_dispatcher.h>
-
#include "event_loop.h"
-using namespace libcamera;
+#include <assert.h>
+#include <event2/event.h>
+#include <event2/thread.h>
+
+EventLoop *EventLoop::instance_ = nullptr;
-EventLoop::EventLoop(EventDispatcher *dispatcher)
- : dispatcher_(dispatcher)
+EventLoop::EventLoop()
{
+ assert(!instance_);
+
+ evthread_use_pthreads();
+ base_ = event_base_new();
+ instance_ = this;
}
EventLoop::~EventLoop()
{
+ instance_ = nullptr;
+
+ event_base_free(base_);
+ libevent_global_shutdown();
+}
+
+EventLoop *EventLoop::instance()
+{
+ return instance_;
}
int EventLoop::exec()
{
exitCode_ = -1;
- exit_.store(false, std::memory_order_release);
-
- while (!exit_.load(std::memory_order_acquire))
- dispatcher_->processEvents();
-
+ event_base_loop(base_, EVLOOP_NO_EXIT_ON_EMPTY);
return exitCode_;
}
void EventLoop::exit(int code)
{
exitCode_ = code;
- exit_.store(true, std::memory_order_release);
- dispatcher_->interrupt();
+ event_base_loopbreak(base_);
+}
+
+void EventLoop::callLater(const std::function<void()> &func)
+{
+ {
+ std::unique_lock<std::mutex> locker(lock_);
+ calls_.push_back(func);
+ }
+
+ event_base_once(base_, -1, EV_TIMEOUT, dispatchCallback, this, nullptr);
+}
+
+void EventLoop::dispatchCallback([[maybe_unused]] evutil_socket_t fd,
+ [[maybe_unused]] short flags, void *param)
+{
+ EventLoop *loop = static_cast<EventLoop *>(param);
+ loop->dispatchCall();
+}
+
+void EventLoop::dispatchCall()
+{
+ std::function<void()> call;
+
+ {
+ std::unique_lock<std::mutex> locker(lock_);
+ if (calls_.empty())
+ return;
+
+ call = calls_.front();
+ calls_.pop_front();
+ }
+
+ call();
}
diff --git a/src/cam/event_loop.h b/src/cam/event_loop.h
index 581c7cba..ba3ba3a4 100644
--- a/src/cam/event_loop.h
+++ b/src/cam/event_loop.h
@@ -7,28 +7,39 @@
#ifndef __CAM_EVENT_LOOP_H__
#define __CAM_EVENT_LOOP_H__
-#include <atomic>
+#include <functional>
+#include <list>
+#include <mutex>
-#include <libcamera/event_notifier.h>
+#include <event2/util.h>
-namespace libcamera {
-class EventDispatcher;
-}
+struct event_base;
class EventLoop
{
public:
- EventLoop(libcamera::EventDispatcher *dispatcher);
+ EventLoop();
~EventLoop();
+ static EventLoop *instance();
+
int exec();
void exit(int code = 0);
+ void callLater(const std::function<void()> &func);
+
private:
- libcamera::EventDispatcher *dispatcher_;
+ static EventLoop *instance_;
- std::atomic<bool> exit_;
+ struct event_base *base_;
int exitCode_;
+
+ std::list<std::function<void()>> calls_;
+ std::mutex lock_;
+
+ static void dispatchCallback(evutil_socket_t fd, short flags,
+ void *param);
+ void dispatchCall();
};
#endif /* __CAM_EVENT_LOOP_H__ */
diff --git a/src/cam/main.cpp b/src/cam/main.cpp
index 244720b4..70e9f62c 100644
--- a/src/cam/main.cpp
+++ b/src/cam/main.cpp
@@ -45,12 +45,14 @@ private:
int infoConfiguration();
int run();
+ std::string const cameraName(const Camera *camera);
+
static CamApp *app_;
OptionsParser::Options options_;
CameraManager *cm_;
std::shared_ptr<Camera> camera_;
std::unique_ptr<libcamera::CameraConfiguration> config_;
- EventLoop *loop_;
+ EventLoop loop_;
bool strictFormats_;
};
@@ -58,7 +60,7 @@ private:
CamApp *CamApp::app_ = nullptr;
CamApp::CamApp()
- : cm_(nullptr), camera_(nullptr), config_(nullptr), loop_(nullptr),
+ : cm_(nullptr), camera_(nullptr), config_(nullptr),
strictFormats_(false)
{
CamApp::app_ = this;
@@ -132,16 +134,11 @@ int CamApp::init(int argc, char **argv)
std::cout << "Monitoring new hotplug and unplug events" << std::endl;
}
- loop_ = new EventLoop(cm_->eventDispatcher());
-
return 0;
}
void CamApp::cleanup()
{
- delete loop_;
- loop_ = nullptr;
-
if (camera_) {
camera_->release();
camera_.reset();
@@ -164,8 +161,7 @@ int CamApp::exec()
void CamApp::quit()
{
- if (loop_)
- loop_->exit();
+ loop_.exit();
}
int CamApp::parseOptions(int argc, char *argv[])
@@ -201,6 +197,9 @@ int CamApp::parseOptions(int argc, char *argv[])
parser.addOption(OptStrictFormats, OptionNone,
"Do not allow requested stream format(s) to be adjusted",
"strict-formats");
+ parser.addOption(OptMetadata, OptionNone,
+ "Print the metadata for completed requests",
+ "metadata");
options_ = parser.parse(argc, argv);
if (!options_.valid())
@@ -340,7 +339,7 @@ int CamApp::run()
unsigned int index = 1;
for (const std::shared_ptr<Camera> &cam : cm_->cameras()) {
- std::cout << index << ": " << cam->id() << std::endl;
+ std::cout << index << ": " << cameraName(cam.get()) << std::endl;
index++;
}
}
@@ -364,13 +363,13 @@ int CamApp::run()
}
if (options_.isSet(OptCapture)) {
- Capture capture(camera_, config_.get(), loop_);
+ Capture capture(camera_, config_.get(), &loop_);
return capture.run(options_);
}
if (options_.isSet(OptMonitor)) {
std::cout << "Press Ctrl-C to interrupt" << std::endl;
- ret = loop_->exec();
+ ret = loop_.exec();
if (ret)
std::cout << "Failed to run monitor loop" << std::endl;
}
@@ -378,6 +377,45 @@ int CamApp::run()
return 0;
}
+std::string const CamApp::cameraName(const Camera *camera)
+{
+ const ControlList &props = camera->properties();
+ bool addModel = true;
+ std::string name;
+
+ /*
+ * Construct the name from the camera location, model and ID. The model
+ * is only used if the location isn't present or is set to External.
+ */
+ if (props.contains(properties::Location)) {
+ switch (props.get(properties::Location)) {
+ case properties::CameraLocationFront:
+ addModel = false;
+ name = "Internal front camera ";
+ break;
+ case properties::CameraLocationBack:
+ addModel = false;
+ name = "Internal back camera ";
+ break;
+ case properties::CameraLocationExternal:
+ name = "External camera ";
+ break;
+ }
+ }
+
+ if (addModel && props.contains(properties::Model)) {
+ /*
+ * If the camera location is not availble use the camera model
+ * to build the camera name.
+ */
+ name = "'" + props.get(properties::Model) + "' ";
+ }
+
+ name += "(" + camera->id() + ")";
+
+ return name;
+}
+
void signalHandler([[maybe_unused]] int signal)
{
std::cout << "Exiting" << std::endl;
diff --git a/src/cam/main.h b/src/cam/main.h
index ea8dfd33..d22451f5 100644
--- a/src/cam/main.h
+++ b/src/cam/main.h
@@ -19,6 +19,7 @@ enum {
OptStream = 's',
OptListControls = 256,
OptStrictFormats = 257,
+ OptMetadata = 258,
};
#endif /* __CAM_MAIN_H__ */
diff --git a/src/cam/meson.build b/src/cam/meson.build
index 89e124fb..6234ed0a 100644
--- a/src/cam/meson.build
+++ b/src/cam/meson.build
@@ -1,5 +1,14 @@
# SPDX-License-Identifier: CC0-1.0
+libevent = dependency('libevent_pthreads', required : get_option('cam'))
+
+if not libevent.found()
+ cam_enabled = false
+ subdir_done()
+endif
+
+cam_enabled = true
+
cam_sources = files([
'buffer_writer.cpp',
'capture.cpp',
@@ -10,5 +19,9 @@ cam_sources = files([
])
cam = executable('cam', cam_sources,
- dependencies : [ libatomic, libcamera_dep ],
+ dependencies : [
+ libatomic,
+ libcamera_public,
+ libevent,
+ ],
install : true)
diff --git a/src/cam/options.cpp b/src/cam/options.cpp
index 77b3cc1f..417c3ab4 100644
--- a/src/cam/options.cpp
+++ b/src/cam/options.cpp
@@ -61,7 +61,12 @@ bool OptionsBase<T>::isSet(const T &opt) const
template<typename T>
const OptionValue &OptionsBase<T>::operator[](const T &opt) const
{
- return values_.find(opt)->second;
+ static const OptionValue empty;
+
+ auto it = values_.find(opt);
+ if (it != values_.end())
+ return it->second;
+ return empty;
}
template<typename T>
@@ -72,7 +77,7 @@ void OptionsBase<T>::invalidate()
template<typename T>
bool OptionsBase<T>::parseValue(const T &opt, const Option &option,
- const char *optarg)
+ const char *arg)
{
OptionValue value;
@@ -83,9 +88,9 @@ bool OptionsBase<T>::parseValue(const T &opt, const Option &option,
case OptionInteger:
unsigned int integer;
- if (optarg) {
+ if (arg) {
char *endptr;
- integer = strtoul(optarg, &endptr, 0);
+ integer = strtoul(arg, &endptr, 0);
if (*endptr != '\0')
return false;
} else {
@@ -96,12 +101,12 @@ bool OptionsBase<T>::parseValue(const T &opt, const Option &option,
break;
case OptionString:
- value = OptionValue(optarg ? optarg : "");
+ value = OptionValue(arg ? arg : "");
break;
case OptionKeyValue:
KeyValueParser *kvParser = option.keyValueParser;
- KeyValueParser::Options keyValues = kvParser->parse(optarg);
+ KeyValueParser::Options keyValues = kvParser->parse(arg);
if (!keyValues.valid())
return false;
diff --git a/src/cam/options.h b/src/cam/options.h
index 18486619..f02eeca2 100644
--- a/src/cam/options.h
+++ b/src/cam/options.h
@@ -73,7 +73,7 @@ public:
{
};
- virtual ~KeyValueParser() {}
+ virtual ~KeyValueParser() = default;
bool addOption(const char *name, OptionType type, const char *help,
OptionArgument argument = ArgumentNone);
diff --git a/src/cam/stream_options.cpp b/src/cam/stream_options.cpp
index 27cc3912..c58272c2 100644
--- a/src/cam/stream_options.cpp
+++ b/src/cam/stream_options.cpp
@@ -13,7 +13,7 @@ using namespace libcamera;
StreamKeyValueParser::StreamKeyValueParser()
{
addOption("role", OptionString,
- "Role for the stream (viewfinder, video, still, stillraw)",
+ "Role for the stream (viewfinder, video, still, raw)",
ArgumentRequired);
addOption("width", OptionInteger, "Width in pixels",
ArgumentRequired);
diff --git a/src/gstreamer/gstlibcamera.cpp b/src/gstreamer/gstlibcamera.cpp
index 81c7bb19..52388b5e 100644
--- a/src/gstreamer/gstlibcamera.cpp
+++ b/src/gstreamer/gstlibcamera.cpp
@@ -24,4 +24,4 @@ plugin_init(GstPlugin *plugin)
GST_PLUGIN_DEFINE(GST_VERSION_MAJOR, GST_VERSION_MINOR,
libcamera, "libcamera capture plugin",
- plugin_init, VERSION, "LGPL", PACKAGE, "https://libcamera.org");
+ plugin_init, VERSION, "LGPL", PACKAGE, "https://libcamera.org")
diff --git a/src/gstreamer/gstlibcameraallocator.cpp b/src/gstreamer/gstlibcameraallocator.cpp
index 78ded402..7bd8ba2d 100644
--- a/src/gstreamer/gstlibcameraallocator.cpp
+++ b/src/gstreamer/gstlibcameraallocator.cpp
@@ -101,7 +101,7 @@ struct _GstLibcameraAllocator {
};
G_DEFINE_TYPE(GstLibcameraAllocator, gst_libcamera_allocator,
- GST_TYPE_DMABUF_ALLOCATOR);
+ GST_TYPE_DMABUF_ALLOCATOR)
static gboolean
gst_libcamera_allocator_release(GstMiniObject *mini_object)
@@ -183,13 +183,15 @@ gst_libcamera_allocator_class_init(GstLibcameraAllocatorClass *klass)
}
GstLibcameraAllocator *
-gst_libcamera_allocator_new(std::shared_ptr<Camera> camera)
+gst_libcamera_allocator_new(std::shared_ptr<Camera> camera,
+ CameraConfiguration *config_)
{
auto *self = GST_LIBCAMERA_ALLOCATOR(g_object_new(GST_TYPE_LIBCAMERA_ALLOCATOR,
nullptr));
self->fb_allocator = new FrameBufferAllocator(camera);
- for (Stream *stream : camera->streams()) {
+ for (StreamConfiguration &streamCfg : *config_) {
+ Stream *stream = streamCfg.stream();
gint ret;
ret = self->fb_allocator->allocate(stream);
diff --git a/src/gstreamer/gstlibcameraallocator.h b/src/gstreamer/gstlibcameraallocator.h
index befdcad6..0dbd00d0 100644
--- a/src/gstreamer/gstlibcameraallocator.h
+++ b/src/gstreamer/gstlibcameraallocator.h
@@ -12,13 +12,15 @@
#include <gst/gst.h>
#include <gst/allocators/allocators.h>
+#include <libcamera/camera.h>
#include <libcamera/stream.h>
#define GST_TYPE_LIBCAMERA_ALLOCATOR gst_libcamera_allocator_get_type()
G_DECLARE_FINAL_TYPE(GstLibcameraAllocator, gst_libcamera_allocator,
GST_LIBCAMERA, ALLOCATOR, GstDmaBufAllocator)
-GstLibcameraAllocator *gst_libcamera_allocator_new(std::shared_ptr<libcamera::Camera> camera);
+GstLibcameraAllocator *gst_libcamera_allocator_new(std::shared_ptr<libcamera::Camera> camera,
+ libcamera::CameraConfiguration *config_);
bool gst_libcamera_allocator_prepare_buffer(GstLibcameraAllocator *self,
libcamera::Stream *stream,
diff --git a/src/gstreamer/gstlibcamerapad.cpp b/src/gstreamer/gstlibcamerapad.cpp
index 9f3e2be5..c00e81c8 100644
--- a/src/gstreamer/gstlibcamerapad.cpp
+++ b/src/gstreamer/gstlibcamerapad.cpp
@@ -27,7 +27,7 @@ enum {
PROP_STREAM_ROLE
};
-G_DEFINE_TYPE(GstLibcameraPad, gst_libcamera_pad, GST_TYPE_PAD);
+G_DEFINE_TYPE(GstLibcameraPad, gst_libcamera_pad, GST_TYPE_PAD)
static void
gst_libcamera_pad_set_property(GObject *object, guint prop_id,
diff --git a/src/gstreamer/gstlibcamerapool.cpp b/src/gstreamer/gstlibcamerapool.cpp
index b756ee35..1fde4213 100644
--- a/src/gstreamer/gstlibcamerapool.cpp
+++ b/src/gstreamer/gstlibcamerapool.cpp
@@ -29,7 +29,7 @@ struct _GstLibcameraPool {
Stream *stream;
};
-G_DEFINE_TYPE(GstLibcameraPool, gst_libcamera_pool, GST_TYPE_BUFFER_POOL);
+G_DEFINE_TYPE(GstLibcameraPool, gst_libcamera_pool, GST_TYPE_BUFFER_POOL)
static GstFlowReturn
gst_libcamera_pool_acquire_buffer(GstBufferPool *pool, GstBuffer **buffer,
@@ -40,8 +40,10 @@ gst_libcamera_pool_acquire_buffer(GstBufferPool *pool, GstBuffer **buffer,
if (!buf)
return GST_FLOW_ERROR;
- if (!gst_libcamera_allocator_prepare_buffer(self->allocator, self->stream, buf))
+ if (!gst_libcamera_allocator_prepare_buffer(self->allocator, self->stream, buf)) {
+ gst_atomic_queue_push(self->queue, buf);
return GST_FLOW_ERROR;
+ }
*buffer = buf;
return GST_FLOW_OK;
diff --git a/src/gstreamer/gstlibcameraprovider.cpp b/src/gstreamer/gstlibcameraprovider.cpp
index cd850d81..29da6c32 100644
--- a/src/gstreamer/gstlibcameraprovider.cpp
+++ b/src/gstreamer/gstlibcameraprovider.cpp
@@ -35,14 +35,14 @@ enum {
#define GST_TYPE_LIBCAMERA_DEVICE gst_libcamera_device_get_type()
G_DECLARE_FINAL_TYPE(GstLibcameraDevice, gst_libcamera_device,
- GST_LIBCAMERA, DEVICE, GstDevice);
+ GST_LIBCAMERA, DEVICE, GstDevice)
struct _GstLibcameraDevice {
GstDevice parent;
gchar *name;
};
-G_DEFINE_TYPE(GstLibcameraDevice, gst_libcamera_device, GST_TYPE_DEVICE);
+G_DEFINE_TYPE(GstLibcameraDevice, gst_libcamera_device, GST_TYPE_DEVICE)
static GstElement *
gst_libcamera_device_create_element(GstDevice *device, const gchar *name)
@@ -101,7 +101,7 @@ gst_libcamera_device_finalize(GObject *object)
g_free(self->name);
- G_OBJECT_GET_CLASS(klass)->finalize(object);
+ G_OBJECT_CLASS(klass)->finalize(object);
}
static void
@@ -164,7 +164,7 @@ struct _GstLibcameraProvider {
G_DEFINE_TYPE_WITH_CODE(GstLibcameraProvider, gst_libcamera_provider,
GST_TYPE_DEVICE_PROVIDER,
GST_DEBUG_CATEGORY_INIT(provider_debug, "libcamera-provider", 0,
- "libcamera Device Provider"));
+ "libcamera Device Provider"))
static GList *
gst_libcamera_provider_probe(GstDeviceProvider *provider)
@@ -218,7 +218,7 @@ gst_libcamera_provider_finalize(GObject *object)
delete self->cm;
- return G_OBJECT_GET_CLASS(klass)->finalize(object);
+ return G_OBJECT_CLASS(klass)->finalize(object);
}
static void
diff --git a/src/gstreamer/gstlibcamerasrc.cpp b/src/gstreamer/gstlibcamerasrc.cpp
index 1bfc2e2f..ea53c2b5 100644
--- a/src/gstreamer/gstlibcamerasrc.cpp
+++ b/src/gstreamer/gstlibcamerasrc.cpp
@@ -52,19 +52,18 @@ GST_DEBUG_CATEGORY_STATIC(source_debug);
#define GST_CAT_DEFAULT source_debug
struct RequestWrap {
- RequestWrap(Request *request);
+ RequestWrap(std::unique_ptr<Request> request);
~RequestWrap();
void attachBuffer(GstBuffer *buffer);
GstBuffer *detachBuffer(Stream *stream);
- /* For ptr comparison only. */
- Request *request_;
+ std::unique_ptr<Request> request_;
std::map<Stream *, GstBuffer *> buffers_;
};
-RequestWrap::RequestWrap(Request *request)
- : request_(request)
+RequestWrap::RequestWrap(std::unique_ptr<Request> request)
+ : request_(std::move(request))
{
}
@@ -138,7 +137,7 @@ enum {
G_DEFINE_TYPE_WITH_CODE(GstLibcameraSrc, gst_libcamera_src, GST_TYPE_ELEMENT,
GST_DEBUG_CATEGORY_INIT(source_debug, "libcamerasrc", 0,
- "libcamera Source"));
+ "libcamera Source"))
#define TEMPLATE_CAPS GST_STATIC_CAPS("video/x-raw; image/jpeg")
@@ -162,7 +161,7 @@ GstLibcameraSrcState::requestCompleted(Request *request)
std::unique_ptr<RequestWrap> wrap = std::move(requests_.front());
requests_.pop();
- g_return_if_fail(wrap->request_ == request);
+ g_return_if_fail(wrap->request_.get() == request);
if ((request->status() == Request::RequestCancelled)) {
GST_DEBUG_OBJECT(src_, "Request was cancelled");
@@ -266,8 +265,19 @@ gst_libcamera_src_task_run(gpointer user_data)
GstLibcameraSrc *self = GST_LIBCAMERA_SRC(user_data);
GstLibcameraSrcState *state = self->state;
- Request *request = state->cam_->createRequest();
- auto wrap = std::make_unique<RequestWrap>(request);
+ std::unique_ptr<Request> request = state->cam_->createRequest();
+ if (!request) {
+ GST_ELEMENT_ERROR(self, RESOURCE, NO_SPACE_LEFT,
+ ("Failed to allocate request for camera '%s'.",
+ state->cam_->id().c_str()),
+ ("libcamera::Camera::createRequest() failed"));
+ gst_task_stop(self->task);
+ return;
+ }
+
+ std::unique_ptr<RequestWrap> wrap =
+ std::make_unique<RequestWrap>(std::move(request));
+
for (GstPad *srcpad : state->srcpads_) {
GstLibcameraPool *pool = gst_libcamera_pad_get_pool(srcpad);
GstBuffer *buffer;
@@ -277,22 +287,23 @@ gst_libcamera_src_task_run(gpointer user_data)
&buffer, nullptr);
if (ret != GST_FLOW_OK) {
/*
- * RequestWrap does not take ownership, and we won't be
- * queueing this one due to lack of buffers.
+ * RequestWrap has ownership of the rquest, and we
+ * won't be queueing this one due to lack of buffers.
*/
- delete request;
- request = nullptr;
+ wrap.release();
break;
}
wrap->attachBuffer(buffer);
}
- if (request) {
+ if (wrap) {
GLibLocker lock(GST_OBJECT(self));
GST_TRACE_OBJECT(self, "Requesting buffers");
- state->cam_->queueRequest(request);
+ state->cam_->queueRequest(wrap->request_.get());
state->requests_.push(std::move(wrap));
+
+ /* The RequestWrap will be deleted in the completion handler. */
}
GstFlowReturn ret = GST_FLOW_OK;
@@ -350,10 +361,12 @@ gst_libcamera_src_task_enter(GstTask *task, [[maybe_unused]] GThread *thread,
GST_DEBUG_OBJECT(self, "Streaming thread has started");
guint group_id = gst_util_group_id_next();
+ gint stream_id_num = 0;
StreamRoles roles;
for (GstPad *srcpad : state->srcpads_) {
/* Create stream-id and push stream-start. */
- g_autofree gchar *stream_id = gst_pad_create_stream_id(srcpad, GST_ELEMENT(self), nullptr);
+ g_autofree gchar *stream_id_intermediate = g_strdup_printf("%i%i", group_id, stream_id_num++);
+ g_autofree gchar *stream_id = gst_pad_create_stream_id(srcpad, GST_ELEMENT(self), stream_id_intermediate);
GstEvent *event = gst_event_new_stream_start(stream_id);
gst_event_set_group_id(event, group_id);
gst_pad_push_event(srcpad, event);
@@ -364,10 +377,13 @@ gst_libcamera_src_task_enter(GstTask *task, [[maybe_unused]] GThread *thread,
/* Generate the stream configurations, there should be one per pad. */
state->config_ = state->cam_->generateConfiguration(roles);
- /*
- * \todo Check if camera may increase or decrease the number of streams
- * regardless of the number of roles.
- */
+ if (state->config_ == nullptr) {
+ GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS,
+ ("Failed to generate camera configuration from roles"),
+ ("Camera::generateConfiguration() returned nullptr"));
+ gst_task_stop(task);
+ return;
+ }
g_assert(state->config_->size() == state->srcpads_.size());
for (gsize i = 0; i < state->srcpads_.size(); i++) {
@@ -425,7 +441,7 @@ gst_libcamera_src_task_enter(GstTask *task, [[maybe_unused]] GThread *thread,
return;
}
- self->allocator = gst_libcamera_allocator_new(state->cam_);
+ self->allocator = gst_libcamera_allocator_new(state->cam_, state->config_.get());
if (!self->allocator) {
GST_ELEMENT_ERROR(self, RESOURCE, NO_SPACE_LEFT,
("Failed to allocate memory"),
@@ -495,6 +511,8 @@ gst_libcamera_src_close(GstLibcameraSrc *self)
GST_DEBUG_OBJECT(self, "Releasing resources");
+ state->config_.reset();
+
ret = state->cam_->release();
if (ret) {
GST_ELEMENT_WARNING(self, RESOURCE, BUSY,
@@ -624,6 +642,53 @@ gst_libcamera_src_init(GstLibcameraSrc *self)
self->state = state;
}
+static GstPad *
+gst_libcamera_src_request_new_pad(GstElement *element, GstPadTemplate *templ,
+ const gchar *name, [[maybe_unused]] const GstCaps *caps)
+{
+ GstLibcameraSrc *self = GST_LIBCAMERA_SRC(element);
+ g_autoptr(GstPad) pad = NULL;
+
+ GST_DEBUG_OBJECT(self, "new request pad created");
+
+ pad = gst_pad_new_from_template(templ, name);
+ g_object_ref_sink(pad);
+
+ if (gst_element_add_pad(element, pad)) {
+ GLibLocker lock(GST_OBJECT(self));
+ self->state->srcpads_.push_back(reinterpret_cast<GstPad *>(g_object_ref(pad)));
+ } else {
+ GST_ELEMENT_ERROR(element, STREAM, FAILED,
+ ("Internal data stream error."),
+ ("Could not add pad to element"));
+ return NULL;
+ }
+
+ return reinterpret_cast<GstPad *>(g_steal_pointer(&pad));
+}
+
+static void
+gst_libcamera_src_release_pad(GstElement *element, GstPad *pad)
+{
+ GstLibcameraSrc *self = GST_LIBCAMERA_SRC(element);
+
+ GST_DEBUG_OBJECT(self, "Pad %" GST_PTR_FORMAT " being released", pad);
+
+ {
+ GLibLocker lock(GST_OBJECT(self));
+ std::vector<GstPad *> &pads = self->state->srcpads_;
+ auto begin_iterator = pads.begin();
+ auto end_iterator = pads.end();
+ auto pad_iterator = std::find(begin_iterator, end_iterator, pad);
+
+ if (pad_iterator != end_iterator) {
+ g_object_unref(*pad_iterator);
+ pads.erase(pad_iterator);
+ }
+ }
+ gst_element_remove_pad(element, pad);
+}
+
static void
gst_libcamera_src_class_init(GstLibcameraSrcClass *klass)
{
@@ -634,6 +699,8 @@ gst_libcamera_src_class_init(GstLibcameraSrcClass *klass)
object_class->get_property = gst_libcamera_src_get_property;
object_class->finalize = gst_libcamera_src_finalize;
+ element_class->request_new_pad = gst_libcamera_src_request_new_pad;
+ element_class->release_pad = gst_libcamera_src_release_pad;
element_class->change_state = gst_libcamera_src_change_state;
gst_element_class_set_metadata(element_class,
diff --git a/src/gstreamer/meson.build b/src/gstreamer/meson.build
index c9f0c13d..77c79140 100644
--- a/src/gstreamer/meson.build
+++ b/src/gstreamer/meson.build
@@ -1,5 +1,20 @@
# SPDX-License-Identifier: CC0-1.0
+glib_dep = dependency('glib-2.0', required : get_option('gstreamer'))
+
+gst_dep_version = '>=1.14.0'
+gstvideo_dep = dependency('gstreamer-video-1.0', version : gst_dep_version,
+ required : get_option('gstreamer'))
+gstallocator_dep = dependency('gstreamer-allocators-1.0', version : gst_dep_version,
+ required : get_option('gstreamer'))
+
+if not glib_dep.found() or not gstvideo_dep.found() or not gstallocator_dep.found()
+ gst_enabled = false
+ subdir_done()
+endif
+
+gst_enabled = true
+
libcamera_gst_sources = [
'gstlibcamera-utils.cpp',
'gstlibcamera.cpp',
@@ -13,30 +28,21 @@ libcamera_gst_sources = [
libcamera_gst_cpp_args = [
'-DVERSION="@0@"'.format(libcamera_git_version),
'-DPACKAGE="@0@"'.format(meson.project_name()),
+ '-DGLIB_VERSION_MIN_REQUIRED=GLIB_VERSION_2_40',
]
-glib_dep = dependency('glib-2.0', required : get_option('gstreamer'))
-
-gst_dep_version = '>=1.14.0'
-gstvideo_dep = dependency('gstreamer-video-1.0', version : gst_dep_version,
- required : get_option('gstreamer'))
-gstallocator_dep = dependency('gstreamer-allocators-1.0', version : gst_dep_version,
- required : get_option('gstreamer'))
-
-if glib_dep.found() and gstvideo_dep.found() and gstallocator_dep.found()
- # The G_DECLARE_FINAL_TYPE macro creates static inline functions that were
- # not marked as possibly unused prior to GLib v2.63.0. This causes clang to
- # complain about the ones we are not using. Silence the -Wunused-function
- # warning in that case.
- if cc.get_id() == 'clang' and glib_dep.version().version_compare('<2.63.0')
- libcamera_gst_cpp_args += [ '-Wno-unused-function' ]
- endif
-
- libcamera_gst = shared_library('gstlibcamera',
- libcamera_gst_sources,
- cpp_args : libcamera_gst_cpp_args,
- dependencies : [libcamera_dep, gstvideo_dep, gstallocator_dep],
- install: true,
- install_dir : '@0@/gstreamer-1.0'.format(get_option('libdir')),
- )
+# The G_DECLARE_FINAL_TYPE macro creates static inline functions that were
+# not marked as possibly unused prior to GLib v2.63.0. This causes clang to
+# complain about the ones we are not using. Silence the -Wunused-function
+# warning in that case.
+if cc.get_id() == 'clang' and glib_dep.version().version_compare('<2.63.0')
+ libcamera_gst_cpp_args += ['-Wno-unused-function']
endif
+
+libcamera_gst = shared_library('gstlibcamera',
+ libcamera_gst_sources,
+ cpp_args : libcamera_gst_cpp_args,
+ dependencies : [libcamera_public, gstvideo_dep, gstallocator_dep],
+ install: true,
+ install_dir : '@0@/gstreamer-1.0'.format(get_option('libdir')),
+)
diff --git a/src/ipa/ipu3/ipu3.cpp b/src/ipa/ipu3/ipu3.cpp
new file mode 100644
index 00000000..71698d36
--- /dev/null
+++ b/src/ipa/ipu3/ipu3.cpp
@@ -0,0 +1,349 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * ipu3.cpp - IPU3 Image Processing Algorithms
+ */
+
+#include <stdint.h>
+#include <sys/mman.h>
+
+#include <linux/intel-ipu3.h>
+#include <linux/v4l2-controls.h>
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/control_ids.h>
+#include <libcamera/framebuffer.h>
+#include <libcamera/ipa/ipa_interface.h>
+#include <libcamera/ipa/ipa_module_info.h>
+#include <libcamera/ipa/ipu3_ipa_interface.h>
+#include <libcamera/request.h>
+
+#include "libcamera/internal/framebuffer.h"
+
+#include "ipu3_agc.h"
+#include "ipu3_awb.h"
+#include "libipa/camera_sensor_helper.h"
+
+static constexpr uint32_t kMaxCellWidthPerSet = 160;
+static constexpr uint32_t kMaxCellHeightPerSet = 56;
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(IPAIPU3)
+
+namespace ipa::ipu3 {
+
+class IPAIPU3 : public IPAIPU3Interface
+{
+public:
+ int init(const IPASettings &settings) override;
+ int start() override;
+ void stop() override {}
+
+ int configure(const IPAConfigInfo &configInfo) override;
+
+ void mapBuffers(const std::vector<IPABuffer> &buffers) override;
+ void unmapBuffers(const std::vector<unsigned int> &ids) override;
+ void processEvent(const IPU3Event &event) override;
+
+private:
+ void processControls(unsigned int frame, const ControlList &controls);
+ void fillParams(unsigned int frame, ipu3_uapi_params *params);
+ void parseStatistics(unsigned int frame,
+ int64_t frameTimestamp,
+ const ipu3_uapi_stats_3a *stats);
+
+ void setControls(unsigned int frame);
+ void calculateBdsGrid(const Size &bdsOutputSize);
+
+ std::map<unsigned int, MappedFrameBuffer> buffers_;
+
+ ControlInfoMap ctrls_;
+
+ IPACameraSensorInfo sensorInfo_;
+
+ /* Camera sensor controls. */
+ uint32_t defVBlank_;
+ uint32_t exposure_;
+ uint32_t minExposure_;
+ uint32_t maxExposure_;
+ uint32_t gain_;
+ uint32_t minGain_;
+ uint32_t maxGain_;
+
+ /* Interface to the AWB algorithm */
+ std::unique_ptr<IPU3Awb> awbAlgo_;
+ /* Interface to the AEC/AGC algorithm */
+ std::unique_ptr<IPU3Agc> agcAlgo_;
+ /* Interface to the Camera Helper */
+ std::unique_ptr<CameraSensorHelper> camHelper_;
+
+ /* Local parameter storage */
+ struct ipu3_uapi_params params_;
+
+ struct ipu3_uapi_grid_config bdsGrid_;
+};
+
+int IPAIPU3::init(const IPASettings &settings)
+{
+ camHelper_ = CameraSensorHelperFactory::create(settings.sensorModel);
+ if (camHelper_ == nullptr) {
+ LOG(IPAIPU3, Error) << "Failed to create camera sensor helper for " << settings.sensorModel;
+ return -ENODEV;
+ }
+
+ return 0;
+}
+
+int IPAIPU3::start()
+{
+ setControls(0);
+
+ return 0;
+}
+
+/**
+ * This method calculates a grid for the AWB algorithm in the IPU3 firmware.
+ * Its input is the BDS output size calculated in the ImgU.
+ * It is limited for now to the simplest method: find the lesser error
+ * with the width/height and respective log2 width/height of the cells.
+ *
+ * \todo The frame is divided into cells which can be 8x8 => 128x128.
+ * As a smaller cell improves the algorithm precision, adapting the
+ * x_start and y_start parameters of the grid would provoke a loss of
+ * some pixels but would also result in more accurate algorithms.
+ */
+void IPAIPU3::calculateBdsGrid(const Size &bdsOutputSize)
+{
+ uint32_t minError = std::numeric_limits<uint32_t>::max();
+ Size best;
+ Size bestLog2;
+ bdsGrid_ = {};
+
+ for (uint32_t widthShift = 3; widthShift <= 7; ++widthShift) {
+ uint32_t width = std::min(kMaxCellWidthPerSet,
+ bdsOutputSize.width >> widthShift);
+ width = width << widthShift;
+ for (uint32_t heightShift = 3; heightShift <= 7; ++heightShift) {
+ int32_t height = std::min(kMaxCellHeightPerSet,
+ bdsOutputSize.height >> heightShift);
+ height = height << heightShift;
+ uint32_t error = std::abs(static_cast<int>(width - bdsOutputSize.width))
+ + std::abs(static_cast<int>(height - bdsOutputSize.height));
+
+ if (error > minError)
+ continue;
+
+ minError = error;
+ best.width = width;
+ best.height = height;
+ bestLog2.width = widthShift;
+ bestLog2.height = heightShift;
+ }
+ }
+
+ bdsGrid_.width = best.width >> bestLog2.width;
+ bdsGrid_.block_width_log2 = bestLog2.width;
+ bdsGrid_.height = best.height >> bestLog2.height;
+ bdsGrid_.block_height_log2 = bestLog2.height;
+
+ LOG(IPAIPU3, Debug) << "Best grid found is: ("
+ << (int)bdsGrid_.width << " << " << (int)bdsGrid_.block_width_log2 << ") x ("
+ << (int)bdsGrid_.height << " << " << (int)bdsGrid_.block_height_log2 << ")";
+}
+
+int IPAIPU3::configure(const IPAConfigInfo &configInfo)
+{
+ if (configInfo.entityControls.empty()) {
+ LOG(IPAIPU3, Error) << "No controls provided";
+ return -ENODATA;
+ }
+
+ sensorInfo_ = configInfo.sensorInfo;
+
+ ctrls_ = configInfo.entityControls.at(0);
+
+ const auto itExp = ctrls_.find(V4L2_CID_EXPOSURE);
+ if (itExp == ctrls_.end()) {
+ LOG(IPAIPU3, Error) << "Can't find exposure control";
+ return -EINVAL;
+ }
+
+ const auto itGain = ctrls_.find(V4L2_CID_ANALOGUE_GAIN);
+ if (itGain == ctrls_.end()) {
+ LOG(IPAIPU3, Error) << "Can't find gain control";
+ return -EINVAL;
+ }
+
+ const auto itVBlank = ctrls_.find(V4L2_CID_VBLANK);
+ if (itVBlank == ctrls_.end()) {
+ LOG(IPAIPU3, Error) << "Can't find VBLANK control";
+ return -EINVAL;
+ }
+
+ minExposure_ = std::max(itExp->second.min().get<int32_t>(), 1);
+ maxExposure_ = itExp->second.max().get<int32_t>();
+ exposure_ = minExposure_;
+
+ minGain_ = std::max(itGain->second.min().get<int32_t>(), 1);
+ maxGain_ = itGain->second.max().get<int32_t>();
+ gain_ = minGain_;
+
+ defVBlank_ = itVBlank->second.def().get<int32_t>();
+
+ params_ = {};
+
+ calculateBdsGrid(configInfo.bdsOutputSize);
+
+ awbAlgo_ = std::make_unique<IPU3Awb>();
+ awbAlgo_->initialise(params_, configInfo.bdsOutputSize, bdsGrid_);
+
+ agcAlgo_ = std::make_unique<IPU3Agc>();
+ agcAlgo_->initialise(bdsGrid_, sensorInfo_);
+
+ return 0;
+}
+
+void IPAIPU3::mapBuffers(const std::vector<IPABuffer> &buffers)
+{
+ for (const IPABuffer &buffer : buffers) {
+ const FrameBuffer fb(buffer.planes);
+ buffers_.emplace(buffer.id,
+ MappedFrameBuffer(&fb, PROT_READ | PROT_WRITE));
+ }
+}
+
+void IPAIPU3::unmapBuffers(const std::vector<unsigned int> &ids)
+{
+ for (unsigned int id : ids) {
+ auto it = buffers_.find(id);
+ if (it == buffers_.end())
+ continue;
+
+ buffers_.erase(it);
+ }
+}
+
+void IPAIPU3::processEvent(const IPU3Event &event)
+{
+ switch (event.op) {
+ case EventProcessControls: {
+ processControls(event.frame, event.controls);
+ break;
+ }
+ case EventStatReady: {
+ auto it = buffers_.find(event.bufferId);
+ if (it == buffers_.end()) {
+ LOG(IPAIPU3, Error) << "Could not find stats buffer!";
+ return;
+ }
+
+ Span<uint8_t> mem = it->second.maps()[0];
+ const ipu3_uapi_stats_3a *stats =
+ reinterpret_cast<ipu3_uapi_stats_3a *>(mem.data());
+
+ parseStatistics(event.frame, event.frameTimestamp, stats);
+ break;
+ }
+ case EventFillParams: {
+ auto it = buffers_.find(event.bufferId);
+ if (it == buffers_.end()) {
+ LOG(IPAIPU3, Error) << "Could not find param buffer!";
+ return;
+ }
+
+ Span<uint8_t> mem = it->second.maps()[0];
+ ipu3_uapi_params *params =
+ reinterpret_cast<ipu3_uapi_params *>(mem.data());
+
+ fillParams(event.frame, params);
+ break;
+ }
+ default:
+ LOG(IPAIPU3, Error) << "Unknown event " << event.op;
+ break;
+ }
+}
+
+void IPAIPU3::processControls([[maybe_unused]] unsigned int frame,
+ [[maybe_unused]] const ControlList &controls)
+{
+ /* \todo Start processing for 'frame' based on 'controls'. */
+}
+
+void IPAIPU3::fillParams(unsigned int frame, ipu3_uapi_params *params)
+{
+ if (agcAlgo_->updateControls())
+ awbAlgo_->updateWbParameters(params_, agcAlgo_->gamma());
+
+ *params = params_;
+
+ IPU3Action op;
+ op.op = ActionParamFilled;
+
+ queueFrameAction.emit(frame, op);
+}
+
+void IPAIPU3::parseStatistics(unsigned int frame,
+ [[maybe_unused]] int64_t frameTimestamp,
+ [[maybe_unused]] const ipu3_uapi_stats_3a *stats)
+{
+ ControlList ctrls(controls::controls);
+
+ double gain = camHelper_->gain(gain_);
+ agcAlgo_->process(stats, exposure_, gain);
+ gain_ = camHelper_->gainCode(gain);
+
+ awbAlgo_->calculateWBGains(stats);
+
+ if (agcAlgo_->updateControls())
+ setControls(frame);
+
+ /* \todo Use VBlank value calculated from each frame exposure. */
+ int64_t frameDuration = sensorInfo_.lineLength * (defVBlank_ + sensorInfo_.outputSize.height) /
+ (sensorInfo_.pixelRate / 1e6);
+ ctrls.set(controls::FrameDuration, frameDuration);
+
+ IPU3Action op;
+ op.op = ActionMetadataReady;
+ op.controls = ctrls;
+
+ queueFrameAction.emit(frame, op);
+}
+
+void IPAIPU3::setControls(unsigned int frame)
+{
+ IPU3Action op;
+ op.op = ActionSetSensorControls;
+
+ ControlList ctrls(ctrls_);
+ ctrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure_));
+ ctrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast<int32_t>(gain_));
+ op.controls = ctrls;
+
+ queueFrameAction.emit(frame, op);
+}
+
+} /* namespace ipa::ipu3 */
+
+/*
+ * External IPA module interface
+ */
+
+extern "C" {
+const struct IPAModuleInfo ipaModuleInfo = {
+ IPA_MODULE_API_VERSION,
+ 1,
+ "PipelineHandlerIPU3",
+ "ipu3",
+};
+
+IPAInterface *ipaCreate()
+{
+ return new ipa::ipu3::IPAIPU3();
+}
+}
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/ipu3_agc.cpp b/src/ipa/ipu3/ipu3_agc.cpp
new file mode 100644
index 00000000..6253ab94
--- /dev/null
+++ b/src/ipa/ipu3/ipu3_agc.cpp
@@ -0,0 +1,205 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Ideas On Board
+ *
+ * ipu3_agc.cpp - AGC/AEC control algorithm
+ */
+
+#include "ipu3_agc.h"
+
+#include <algorithm>
+#include <cmath>
+#include <numeric>
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/ipa/core_ipa_interface.h>
+
+#include "libipa/histogram.h"
+
+namespace libcamera {
+
+using namespace std::literals::chrono_literals;
+
+namespace ipa::ipu3 {
+
+LOG_DEFINE_CATEGORY(IPU3Agc)
+
+/* Number of frames to wait before calculating stats on minimum exposure */
+static constexpr uint32_t kInitialFrameMinAECount = 4;
+/* Number of frames to wait between new gain/exposure estimations */
+static constexpr uint32_t kFrameSkipCount = 6;
+
+/* Maximum ISO value for analogue gain */
+static constexpr uint32_t kMinISO = 100;
+static constexpr uint32_t kMaxISO = 1500;
+
+/* Maximum analogue gain value
+ * \todo grab it from a camera helper */
+static constexpr uint32_t kMinGain = kMinISO / 100;
+static constexpr uint32_t kMaxGain = kMaxISO / 100;
+
+/* \todo use calculated value based on sensor */
+static constexpr uint32_t kMinExposure = 1;
+static constexpr uint32_t kMaxExposure = 1976;
+
+/* Histogram constants */
+static constexpr uint32_t knumHistogramBins = 256;
+static constexpr double kEvGainTarget = 0.5;
+
+/* A cell is 8 bytes and contains averages for RGB values and saturation ratio */
+static constexpr uint8_t kCellSize = 8;
+
+IPU3Agc::IPU3Agc()
+ : frameCount_(0), lastFrame_(0), converged_(false),
+ updateControls_(false), iqMean_(0.0), gamma_(1.0),
+ lineDuration_(0s), maxExposureTime_(0s),
+ prevExposure_(0s), prevExposureNoDg_(0s),
+ currentExposure_(0s), currentExposureNoDg_(0s)
+{
+}
+
+void IPU3Agc::initialise(struct ipu3_uapi_grid_config &bdsGrid, const IPACameraSensorInfo &sensorInfo)
+{
+ aeGrid_ = bdsGrid;
+
+ lineDuration_ = sensorInfo.lineLength * 1.0s / sensorInfo.pixelRate;
+ maxExposureTime_ = kMaxExposure * lineDuration_;
+}
+
+void IPU3Agc::processBrightness(const ipu3_uapi_stats_3a *stats)
+{
+ const struct ipu3_uapi_grid_config statsAeGrid = stats->stats_4a_config.awb_config.grid;
+ Rectangle aeRegion = { statsAeGrid.x_start,
+ statsAeGrid.y_start,
+ static_cast<unsigned int>(statsAeGrid.x_end - statsAeGrid.x_start) + 1,
+ static_cast<unsigned int>(statsAeGrid.y_end - statsAeGrid.y_start) + 1 };
+ Point topleft = aeRegion.topLeft();
+ int topleftX = topleft.x >> aeGrid_.block_width_log2;
+ int topleftY = topleft.y >> aeGrid_.block_height_log2;
+
+ /* Align to the grid cell width and height */
+ uint32_t startX = topleftX << aeGrid_.block_width_log2;
+ uint32_t startY = topleftY * aeGrid_.width << aeGrid_.block_width_log2;
+ uint32_t endX = (startX + (aeRegion.size().width >> aeGrid_.block_width_log2)) << aeGrid_.block_width_log2;
+ uint32_t i, j;
+ uint32_t count = 0;
+
+ uint32_t hist[knumHistogramBins] = { 0 };
+ for (j = topleftY;
+ j < topleftY + (aeRegion.size().height >> aeGrid_.block_height_log2);
+ j++) {
+ for (i = startX + startY; i < endX + startY; i += kCellSize) {
+ /*
+ * The grid width (and maybe height) is not reliable.
+ * We observed a bit shift which makes the value 160 to be 32 in the stats grid.
+ * Use the one passed at init time.
+ */
+ if (stats->awb_raw_buffer.meta_data[i + 4 + j * aeGrid_.width] == 0) {
+ uint8_t Gr = stats->awb_raw_buffer.meta_data[i + 0 + j * aeGrid_.width];
+ uint8_t Gb = stats->awb_raw_buffer.meta_data[i + 3 + j * aeGrid_.width];
+ hist[(Gr + Gb) / 2]++;
+ count++;
+ }
+ }
+ }
+
+ /* Limit the gamma effect for now */
+ gamma_ = 1.1;
+
+ /* Estimate the quantile mean of the top 2% of the histogram */
+ iqMean_ = Histogram(Span<uint32_t>(hist)).interQuantileMean(0.98, 1.0);
+}
+
+void IPU3Agc::filterExposure()
+{
+ double speed = 0.2;
+ if (prevExposure_ == 0s) {
+ /* DG stands for digital gain.*/
+ prevExposure_ = currentExposure_;
+ prevExposureNoDg_ = currentExposureNoDg_;
+ } else {
+ /*
+ * If we are close to the desired result, go faster to avoid making
+ * multiple micro-adjustments.
+ * \ todo: Make this customisable?
+ */
+ if (prevExposure_ < 1.2 * currentExposure_ &&
+ prevExposure_ > 0.8 * currentExposure_)
+ speed = sqrt(speed);
+
+ prevExposure_ = speed * currentExposure_ +
+ prevExposure_ * (1.0 - speed);
+ prevExposureNoDg_ = speed * currentExposureNoDg_ +
+ prevExposureNoDg_ * (1.0 - speed);
+ }
+ /*
+ * We can't let the no_dg exposure deviate too far below the
+ * total exposure, as there might not be enough digital gain available
+ * in the ISP to hide it (which will cause nasty oscillation).
+ */
+ double fastReduceThreshold = 0.4;
+ if (prevExposureNoDg_ <
+ prevExposure_ * fastReduceThreshold)
+ prevExposureNoDg_ = prevExposure_ * fastReduceThreshold;
+ LOG(IPU3Agc, Debug) << "After filtering, total_exposure " << prevExposure_;
+}
+
+void IPU3Agc::lockExposureGain(uint32_t &exposure, double &gain)
+{
+ updateControls_ = false;
+
+ /* Algorithm initialization should wait for first valid frames */
+ /* \todo - have a number of frames given by DelayedControls ?
+ * - implement a function for IIR */
+ if ((frameCount_ < kInitialFrameMinAECount) || (frameCount_ - lastFrame_ < kFrameSkipCount))
+ return;
+
+ /* Are we correctly exposed ? */
+ if (std::abs(iqMean_ - kEvGainTarget * knumHistogramBins) <= 1) {
+ LOG(IPU3Agc, Debug) << "!!! Good exposure with iqMean = " << iqMean_;
+ converged_ = true;
+ } else {
+ double newGain = kEvGainTarget * knumHistogramBins / iqMean_;
+
+ /* extracted from Rpi::Agc::computeTargetExposure */
+ libcamera::utils::Duration currentShutter = exposure * lineDuration_;
+ currentExposureNoDg_ = currentShutter * gain;
+ LOG(IPU3Agc, Debug) << "Actual total exposure " << currentExposureNoDg_
+ << " Shutter speed " << currentShutter
+ << " Gain " << gain;
+ currentExposure_ = currentExposureNoDg_ * newGain;
+ libcamera::utils::Duration maxTotalExposure = maxExposureTime_ * kMaxGain;
+ currentExposure_ = std::min(currentExposure_, maxTotalExposure);
+ LOG(IPU3Agc, Debug) << "Target total exposure " << currentExposure_;
+
+ /* \todo: estimate if we need to desaturate */
+ filterExposure();
+
+ libcamera::utils::Duration newExposure = 0.0s;
+ if (currentShutter < maxExposureTime_) {
+ exposure = std::clamp(static_cast<uint32_t>(exposure * currentExposure_ / currentExposureNoDg_), kMinExposure, kMaxExposure);
+ newExposure = currentExposure_ / exposure;
+ gain = std::clamp(static_cast<uint32_t>(gain * currentExposure_ / newExposure), kMinGain, kMaxGain);
+ updateControls_ = true;
+ } else if (currentShutter >= maxExposureTime_) {
+ gain = std::clamp(static_cast<uint32_t>(gain * currentExposure_ / currentExposureNoDg_), kMinGain, kMaxGain);
+ newExposure = currentExposure_ / gain;
+ exposure = std::clamp(static_cast<uint32_t>(exposure * currentExposure_ / newExposure), kMinExposure, kMaxExposure);
+ updateControls_ = true;
+ }
+ LOG(IPU3Agc, Debug) << "Adjust exposure " << exposure * lineDuration_ << " and gain " << gain;
+ }
+ lastFrame_ = frameCount_;
+}
+
+void IPU3Agc::process(const ipu3_uapi_stats_3a *stats, uint32_t &exposure, double &gain)
+{
+ processBrightness(stats);
+ lockExposureGain(exposure, gain);
+ frameCount_++;
+}
+
+} /* namespace ipa::ipu3 */
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/ipu3_agc.h b/src/ipa/ipu3/ipu3_agc.h
new file mode 100644
index 00000000..3deca3ae
--- /dev/null
+++ b/src/ipa/ipu3/ipu3_agc.h
@@ -0,0 +1,71 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Ideas On Board
+ *
+ * ipu3_agc.h - IPU3 AGC/AEC control algorithm
+ */
+#ifndef __LIBCAMERA_IPU3_AGC_H__
+#define __LIBCAMERA_IPU3_AGC_H__
+
+#include <array>
+#include <unordered_map>
+
+#include <linux/intel-ipu3.h>
+
+#include <libcamera/base/utils.h>
+
+#include <libcamera/geometry.h>
+
+#include "libipa/algorithm.h"
+
+namespace libcamera {
+
+struct IPACameraSensorInfo;
+
+namespace ipa::ipu3 {
+
+using utils::Duration;
+
+class IPU3Agc : public Algorithm
+{
+public:
+ IPU3Agc();
+ ~IPU3Agc() = default;
+
+ void initialise(struct ipu3_uapi_grid_config &bdsGrid, const IPACameraSensorInfo &sensorInfo);
+ void process(const ipu3_uapi_stats_3a *stats, uint32_t &exposure, double &gain);
+ bool converged() { return converged_; }
+ bool updateControls() { return updateControls_; }
+ /* \todo Use a metadata exchange between IPAs */
+ double gamma() { return gamma_; }
+
+private:
+ void processBrightness(const ipu3_uapi_stats_3a *stats);
+ void filterExposure();
+ void lockExposureGain(uint32_t &exposure, double &gain);
+
+ struct ipu3_uapi_grid_config aeGrid_;
+
+ uint64_t frameCount_;
+ uint64_t lastFrame_;
+
+ bool converged_;
+ bool updateControls_;
+
+ double iqMean_;
+ double gamma_;
+
+ Duration lineDuration_;
+ Duration maxExposureTime_;
+
+ Duration prevExposure_;
+ Duration prevExposureNoDg_;
+ Duration currentExposure_;
+ Duration currentExposureNoDg_;
+};
+
+} /* namespace ipa::ipu3 */
+
+} /* namespace libcamera */
+
+#endif /* __LIBCAMERA_IPU3_AGC_H__ */
diff --git a/src/ipa/ipu3/ipu3_awb.cpp b/src/ipa/ipu3/ipu3_awb.cpp
new file mode 100644
index 00000000..9b409c8f
--- /dev/null
+++ b/src/ipa/ipu3/ipu3_awb.cpp
@@ -0,0 +1,382 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Ideas On Board
+ *
+ * ipu3_awb.cpp - AWB control algorithm
+ */
+#include "ipu3_awb.h"
+
+#include <cmath>
+#include <numeric>
+#include <unordered_map>
+
+#include <libcamera/base/log.h>
+
+namespace libcamera {
+
+namespace ipa::ipu3 {
+
+LOG_DEFINE_CATEGORY(IPU3Awb)
+
+static constexpr uint32_t kMinZonesCounted = 16;
+static constexpr uint32_t kMinGreenLevelInZone = 32;
+
+/**
+ * \struct IspStatsRegion
+ * \brief RGB statistics for a given region
+ *
+ * The IspStatsRegion structure is intended to abstract the ISP specific
+ * statistics and use an agnostic algorithm to compute AWB.
+ *
+ * \var IspStatsRegion::counted
+ * \brief Number of pixels used to calculate the sums
+ *
+ * \var IspStatsRegion::uncounted
+ * \brief Remaining number of pixels in the region
+ *
+ * \var IspStatsRegion::rSum
+ * \brief Sum of the red values in the region
+ *
+ * \var IspStatsRegion::gSum
+ * \brief Sum of the green values in the region
+ *
+ * \var IspStatsRegion::bSum
+ * \brief Sum of the blue values in the region
+ */
+
+/**
+ * \struct AwbStatus
+ * \brief AWB parameters calculated
+ *
+ * The AwbStatus structure is intended to store the AWB
+ * parameters calculated by the algorithm
+ *
+ * \var AwbStatus::temperatureK
+ * \brief Color temperature calculated
+ *
+ * \var AwbStatus::redGain
+ * \brief Gain calculated for the red channel
+ *
+ * \var AwbStatus::greenGain
+ * \brief Gain calculated for the green channel
+ *
+ * \var AwbStatus::blueGain
+ * \brief Gain calculated for the blue channel
+ */
+
+/**
+ * \struct Ipu3AwbCell
+ * \brief Memory layout for each cell in AWB metadata
+ *
+ * The Ipu3AwbCell structure is used to get individual values
+ * such as red average or saturation ratio in a particular cell.
+ *
+ * \var Ipu3AwbCell::greenRedAvg
+ * \brief Green average for red lines in the cell
+ *
+ * \var Ipu3AwbCell::redAvg
+ * \brief Red average in the cell
+ *
+ * \var Ipu3AwbCell::blueAvg
+ * \brief blue average in the cell
+ *
+ * \var Ipu3AwbCell::greenBlueAvg
+ * \brief Green average for blue lines
+ *
+ * \var Ipu3AwbCell::satRatio
+ * \brief Saturation ratio in the cell
+ *
+ * \var Ipu3AwbCell::padding
+ * \brief array of unused bytes for padding
+ */
+
+/* Default settings for Bayer noise reduction replicated from the Kernel */
+static const struct ipu3_uapi_bnr_static_config imguCssBnrDefaults = {
+ .wb_gains = { 16, 16, 16, 16 },
+ .wb_gains_thr = { 255, 255, 255, 255 },
+ .thr_coeffs = { 1700, 0, 31, 31, 0, 16 },
+ .thr_ctrl_shd = { 26, 26, 26, 26 },
+ .opt_center = { -648, 0, -366, 0 },
+ .lut = {
+ { 17, 23, 28, 32, 36, 39, 42, 45,
+ 48, 51, 53, 55, 58, 60, 62, 64,
+ 66, 68, 70, 72, 73, 75, 77, 78,
+ 80, 82, 83, 85, 86, 88, 89, 90 } },
+ .bp_ctrl = { 20, 0, 1, 40, 0, 6, 0, 6, 0 },
+ .dn_detect_ctrl = { 9, 3, 4, 0, 8, 0, 1, 1, 1, 1, 0 },
+ .column_size = 1296,
+ .opt_center_sqr = { 419904, 133956 },
+};
+
+/* Default settings for Auto White Balance replicated from the Kernel*/
+static const struct ipu3_uapi_awb_config_s imguCssAwbDefaults = {
+ .rgbs_thr_gr = 8191,
+ .rgbs_thr_r = 8191,
+ .rgbs_thr_gb = 8191,
+ .rgbs_thr_b = 8191 | IPU3_UAPI_AWB_RGBS_THR_B_EN | IPU3_UAPI_AWB_RGBS_THR_B_INCL_SAT,
+ .grid = {
+ .width = 160,
+ .height = 36,
+ .block_width_log2 = 3,
+ .block_height_log2 = 4,
+ .height_per_slice = 1, /* Overridden by kernel. */
+ .x_start = 0,
+ .y_start = 0,
+ .x_end = 0,
+ .y_end = 0,
+ },
+};
+
+/* Default color correction matrix defined as an identity matrix */
+static const struct ipu3_uapi_ccm_mat_config imguCssCcmDefault = {
+ 8191, 0, 0, 0,
+ 0, 8191, 0, 0,
+ 0, 0, 8191, 0
+};
+
+/* Default settings for Gamma correction */
+const struct ipu3_uapi_gamma_corr_lut imguCssGammaLut = { {
+ 63, 79, 95, 111, 127, 143, 159, 175, 191, 207, 223, 239, 255, 271, 287,
+ 303, 319, 335, 351, 367, 383, 399, 415, 431, 447, 463, 479, 495, 511,
+ 527, 543, 559, 575, 591, 607, 623, 639, 655, 671, 687, 703, 719, 735,
+ 751, 767, 783, 799, 815, 831, 847, 863, 879, 895, 911, 927, 943, 959,
+ 975, 991, 1007, 1023, 1039, 1055, 1071, 1087, 1103, 1119, 1135, 1151,
+ 1167, 1183, 1199, 1215, 1231, 1247, 1263, 1279, 1295, 1311, 1327, 1343,
+ 1359, 1375, 1391, 1407, 1423, 1439, 1455, 1471, 1487, 1503, 1519, 1535,
+ 1551, 1567, 1583, 1599, 1615, 1631, 1647, 1663, 1679, 1695, 1711, 1727,
+ 1743, 1759, 1775, 1791, 1807, 1823, 1839, 1855, 1871, 1887, 1903, 1919,
+ 1935, 1951, 1967, 1983, 1999, 2015, 2031, 2047, 2063, 2079, 2095, 2111,
+ 2143, 2175, 2207, 2239, 2271, 2303, 2335, 2367, 2399, 2431, 2463, 2495,
+ 2527, 2559, 2591, 2623, 2655, 2687, 2719, 2751, 2783, 2815, 2847, 2879,
+ 2911, 2943, 2975, 3007, 3039, 3071, 3103, 3135, 3167, 3199, 3231, 3263,
+ 3295, 3327, 3359, 3391, 3423, 3455, 3487, 3519, 3551, 3583, 3615, 3647,
+ 3679, 3711, 3743, 3775, 3807, 3839, 3871, 3903, 3935, 3967, 3999, 4031,
+ 4063, 4095, 4127, 4159, 4223, 4287, 4351, 4415, 4479, 4543, 4607, 4671,
+ 4735, 4799, 4863, 4927, 4991, 5055, 5119, 5183, 5247, 5311, 5375, 5439,
+ 5503, 5567, 5631, 5695, 5759, 5823, 5887, 5951, 6015, 6079, 6143, 6207,
+ 6271, 6335, 6399, 6463, 6527, 6591, 6655, 6719, 6783, 6847, 6911, 6975,
+ 7039, 7103, 7167, 7231, 7295, 7359, 7423, 7487, 7551, 7615, 7679, 7743,
+ 7807, 7871, 7935, 7999, 8063, 8127, 8191
+} };
+
+IPU3Awb::IPU3Awb()
+ : Algorithm()
+{
+ asyncResults_.blueGain = 1.0;
+ asyncResults_.greenGain = 1.0;
+ asyncResults_.redGain = 1.0;
+ asyncResults_.temperatureK = 4500;
+}
+
+IPU3Awb::~IPU3Awb()
+{
+}
+
+void IPU3Awb::initialise(ipu3_uapi_params &params, const Size &bdsOutputSize, struct ipu3_uapi_grid_config &bdsGrid)
+{
+ params.use.acc_awb = 1;
+ params.acc_param.awb.config = imguCssAwbDefaults;
+
+ awbGrid_ = bdsGrid;
+ params.acc_param.awb.config.grid = awbGrid_;
+
+ params.use.acc_bnr = 1;
+ params.acc_param.bnr = imguCssBnrDefaults;
+ /**
+ * Optical center is column (respectively row) startminus X (respectively Y) center.
+ * For the moment use BDS as a first approximation, but it should
+ * be calculated based on Shading (SHD) parameters.
+ */
+ params.acc_param.bnr.column_size = bdsOutputSize.width;
+ params.acc_param.bnr.opt_center.x_reset = awbGrid_.x_start - (bdsOutputSize.width / 2);
+ params.acc_param.bnr.opt_center.y_reset = awbGrid_.y_start - (bdsOutputSize.height / 2);
+ params.acc_param.bnr.opt_center_sqr.x_sqr_reset = params.acc_param.bnr.opt_center.x_reset
+ * params.acc_param.bnr.opt_center.x_reset;
+ params.acc_param.bnr.opt_center_sqr.y_sqr_reset = params.acc_param.bnr.opt_center.y_reset
+ * params.acc_param.bnr.opt_center.y_reset;
+
+ params.use.acc_ccm = 1;
+ params.acc_param.ccm = imguCssCcmDefault;
+
+ params.use.acc_gamma = 1;
+ params.acc_param.gamma.gc_lut = imguCssGammaLut;
+ params.acc_param.gamma.gc_ctrl.enable = 1;
+
+ zones_.reserve(kAwbStatsSizeX * kAwbStatsSizeY);
+}
+
+/**
+ * The function estimates the correlated color temperature using
+ * from RGB color space input.
+ * In physics and color science, the Planckian locus or black body locus is
+ * the path or locus that the color of an incandescent black body would take
+ * in a particular chromaticity space as the blackbody temperature changes.
+ *
+ * If a narrow range of color temperatures is considered (those encapsulating
+ * daylight being the most practical case) one can approximate the Planckian
+ * locus in order to calculate the CCT in terms of chromaticity coordinates.
+ *
+ * More detailed information can be found in:
+ * https://en.wikipedia.org/wiki/Color_temperature#Approximation
+ */
+uint32_t IPU3Awb::estimateCCT(double red, double green, double blue)
+{
+ /* Convert the RGB values to CIE tristimulus values (XYZ) */
+ double X = (-0.14282) * (red) + (1.54924) * (green) + (-0.95641) * (blue);
+ double Y = (-0.32466) * (red) + (1.57837) * (green) + (-0.73191) * (blue);
+ double Z = (-0.68202) * (red) + (0.77073) * (green) + (0.56332) * (blue);
+
+ /* Calculate the normalized chromaticity values */
+ double x = X / (X + Y + Z);
+ double y = Y / (X + Y + Z);
+
+ /* Calculate CCT */
+ double n = (x - 0.3320) / (0.1858 - y);
+ return 449 * n * n * n + 3525 * n * n + 6823.3 * n + 5520.33;
+}
+
+/* Generate an RGB vector with the average values for each region */
+void IPU3Awb::generateZones(std::vector<RGB> &zones)
+{
+ for (unsigned int i = 0; i < kAwbStatsSizeX * kAwbStatsSizeY; i++) {
+ RGB zone;
+ double counted = awbStats_[i].counted;
+ if (counted >= kMinZonesCounted) {
+ zone.G = awbStats_[i].gSum / counted;
+ if (zone.G >= kMinGreenLevelInZone) {
+ zone.R = awbStats_[i].rSum / counted;
+ zone.B = awbStats_[i].bSum / counted;
+ zones.push_back(zone);
+ }
+ }
+ }
+}
+
+/* Translate the IPU3 statistics into the default statistics region array */
+void IPU3Awb::generateAwbStats(const ipu3_uapi_stats_3a *stats)
+{
+ uint32_t regionWidth = round(awbGrid_.width / static_cast<double>(kAwbStatsSizeX));
+ uint32_t regionHeight = round(awbGrid_.height / static_cast<double>(kAwbStatsSizeY));
+
+ /*
+ * Generate a (kAwbStatsSizeX x kAwbStatsSizeY) array from the IPU3 grid which is
+ * (awbGrid_.width x awbGrid_.height).
+ */
+ for (unsigned int j = 0; j < kAwbStatsSizeY * regionHeight; j++) {
+ for (unsigned int i = 0; i < kAwbStatsSizeX * regionWidth; i++) {
+ uint32_t cellPosition = j * awbGrid_.width + i;
+ uint32_t cellX = (cellPosition / regionWidth) % kAwbStatsSizeX;
+ uint32_t cellY = ((cellPosition / awbGrid_.width) / regionHeight) % kAwbStatsSizeY;
+
+ uint32_t awbRegionPosition = cellY * kAwbStatsSizeX + cellX;
+ cellPosition *= 8;
+
+ /* Cast the initial IPU3 structure to simplify the reading */
+ Ipu3AwbCell *currentCell = reinterpret_cast<Ipu3AwbCell *>(const_cast<uint8_t *>(&stats->awb_raw_buffer.meta_data[cellPosition]));
+ if (currentCell->satRatio == 0) {
+ /* The cell is not saturated, use the current cell */
+ awbStats_[awbRegionPosition].counted++;
+ uint32_t greenValue = currentCell->greenRedAvg + currentCell->greenBlueAvg;
+ awbStats_[awbRegionPosition].gSum += greenValue / 2;
+ awbStats_[awbRegionPosition].rSum += currentCell->redAvg;
+ awbStats_[awbRegionPosition].bSum += currentCell->blueAvg;
+ }
+ }
+ }
+}
+
+void IPU3Awb::clearAwbStats()
+{
+ for (unsigned int i = 0; i < kAwbStatsSizeX * kAwbStatsSizeY; i++) {
+ awbStats_[i].bSum = 0;
+ awbStats_[i].rSum = 0;
+ awbStats_[i].gSum = 0;
+ awbStats_[i].counted = 0;
+ awbStats_[i].uncounted = 0;
+ }
+}
+
+void IPU3Awb::awbGreyWorld()
+{
+ LOG(IPU3Awb, Debug) << "Grey world AWB";
+ /*
+ * Make a separate list of the derivatives for each of red and blue, so
+ * that we can sort them to exclude the extreme gains. We could
+ * consider some variations, such as normalising all the zones first, or
+ * doing an L2 average etc.
+ */
+ std::vector<RGB> &redDerivative(zones_);
+ std::vector<RGB> blueDerivative(redDerivative);
+ std::sort(redDerivative.begin(), redDerivative.end(),
+ [](RGB const &a, RGB const &b) {
+ return a.G * b.R < b.G * a.R;
+ });
+ std::sort(blueDerivative.begin(), blueDerivative.end(),
+ [](RGB const &a, RGB const &b) {
+ return a.G * b.B < b.G * a.B;
+ });
+
+ /* Average the middle half of the values. */
+ int discard = redDerivative.size() / 4;
+
+ RGB sumRed(0, 0, 0);
+ RGB sumBlue(0, 0, 0);
+ for (auto ri = redDerivative.begin() + discard,
+ bi = blueDerivative.begin() + discard;
+ ri != redDerivative.end() - discard; ri++, bi++)
+ sumRed += *ri, sumBlue += *bi;
+
+ double redGain = sumRed.G / (sumRed.R + 1),
+ blueGain = sumBlue.G / (sumBlue.B + 1);
+
+ /* Color temperature is not relevant in Grey world but still useful to estimate it :-) */
+ asyncResults_.temperatureK = estimateCCT(sumRed.R, sumRed.G, sumBlue.B);
+ asyncResults_.redGain = redGain;
+ asyncResults_.greenGain = 1.0;
+ asyncResults_.blueGain = blueGain;
+}
+
+void IPU3Awb::calculateWBGains(const ipu3_uapi_stats_3a *stats)
+{
+ ASSERT(stats->stats_3a_status.awb_en);
+ zones_.clear();
+ clearAwbStats();
+ generateAwbStats(stats);
+ generateZones(zones_);
+ LOG(IPU3Awb, Debug) << "Valid zones: " << zones_.size();
+ if (zones_.size() > 10) {
+ awbGreyWorld();
+ LOG(IPU3Awb, Debug) << "Gain found for red: " << asyncResults_.redGain
+ << " and for blue: " << asyncResults_.blueGain;
+ }
+}
+
+void IPU3Awb::updateWbParameters(ipu3_uapi_params &params, double agcGamma)
+{
+ /*
+ * Green gains should not be touched and considered 1.
+ * Default is 16, so do not change it at all.
+ * 4096 is the value for a gain of 1.0
+ */
+ params.acc_param.bnr.wb_gains.gr = 16;
+ params.acc_param.bnr.wb_gains.r = 4096 * asyncResults_.redGain;
+ params.acc_param.bnr.wb_gains.b = 4096 * asyncResults_.blueGain;
+ params.acc_param.bnr.wb_gains.gb = 16;
+
+ LOG(IPU3Awb, Debug) << "Color temperature estimated: " << asyncResults_.temperatureK
+ << " and gamma calculated: " << agcGamma;
+
+ /* The CCM matrix may change when color temperature will be used */
+ params.acc_param.ccm = imguCssCcmDefault;
+
+ for (uint32_t i = 0; i < 256; i++) {
+ double j = i / 255.0;
+ double gamma = std::pow(j, 1.0 / agcGamma);
+ /* The maximum value 255 is represented on 13 bits in the IPU3 */
+ params.acc_param.gamma.gc_lut.lut[i] = gamma * 8191;
+ }
+}
+
+} /* namespace ipa::ipu3 */
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/ipu3_awb.h b/src/ipa/ipu3/ipu3_awb.h
new file mode 100644
index 00000000..122cf68c
--- /dev/null
+++ b/src/ipa/ipu3/ipu3_awb.h
@@ -0,0 +1,91 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Ideas On Board
+ *
+ * ipu3_awb.h - IPU3 AWB control algorithm
+ */
+#ifndef __LIBCAMERA_IPU3_AWB_H__
+#define __LIBCAMERA_IPU3_AWB_H__
+
+#include <vector>
+
+#include <linux/intel-ipu3.h>
+
+#include <libcamera/geometry.h>
+
+#include "libipa/algorithm.h"
+
+namespace libcamera {
+
+namespace ipa::ipu3 {
+
+/* Region size for the statistics generation algorithm */
+static constexpr uint32_t kAwbStatsSizeX = 16;
+static constexpr uint32_t kAwbStatsSizeY = 12;
+
+class IPU3Awb : public Algorithm
+{
+public:
+ IPU3Awb();
+ ~IPU3Awb();
+
+ void initialise(ipu3_uapi_params &params, const Size &bdsOutputSize, struct ipu3_uapi_grid_config &bdsGrid);
+ void calculateWBGains(const ipu3_uapi_stats_3a *stats);
+ void updateWbParameters(ipu3_uapi_params &params, double agcGamma);
+
+ struct Ipu3AwbCell {
+ unsigned char greenRedAvg;
+ unsigned char redAvg;
+ unsigned char blueAvg;
+ unsigned char greenBlueAvg;
+ unsigned char satRatio;
+ unsigned char padding[3];
+ } __attribute__((packed));
+
+ /* \todo Make these three structs available to all the ISPs ? */
+ struct RGB {
+ RGB(double _R = 0, double _G = 0, double _B = 0)
+ : R(_R), G(_G), B(_B)
+ {
+ }
+ double R, G, B;
+ RGB &operator+=(RGB const &other)
+ {
+ R += other.R, G += other.G, B += other.B;
+ return *this;
+ }
+ };
+
+ struct IspStatsRegion {
+ unsigned int counted;
+ unsigned int uncounted;
+ unsigned long long rSum;
+ unsigned long long gSum;
+ unsigned long long bSum;
+ };
+
+ struct AwbStatus {
+ double temperatureK;
+ double redGain;
+ double greenGain;
+ double blueGain;
+ };
+
+private:
+ void generateZones(std::vector<RGB> &zones);
+ void generateAwbStats(const ipu3_uapi_stats_3a *stats);
+ void clearAwbStats();
+ void awbGreyWorld();
+ uint32_t estimateCCT(double red, double green, double blue);
+
+ struct ipu3_uapi_grid_config awbGrid_;
+
+ std::vector<RGB> zones_;
+ IspStatsRegion awbStats_[kAwbStatsSizeX * kAwbStatsSizeY];
+ AwbStatus asyncResults_;
+};
+
+} /* namespace ipa::ipu3 */
+
+} /* namespace libcamera*/
+#endif /* __LIBCAMERA_IPU3_AWB_H__ */
diff --git a/src/ipa/ipu3/meson.build b/src/ipa/ipu3/meson.build
new file mode 100644
index 00000000..b6364190
--- /dev/null
+++ b/src/ipa/ipu3/meson.build
@@ -0,0 +1,27 @@
+# SPDX-License-Identifier: CC0-1.0
+
+ipa_name = 'ipa_ipu3'
+
+ipu3_ipa_sources = files([
+ 'ipu3.cpp',
+ 'ipu3_agc.cpp',
+ 'ipu3_awb.cpp',
+])
+
+mod = shared_module(ipa_name,
+ [ipu3_ipa_sources, libcamera_generated_ipa_headers],
+ name_prefix : '',
+ include_directories : [ipa_includes, libipa_includes],
+ dependencies : libcamera_private,
+ link_with : libipa,
+ install : true,
+ install_dir : ipa_install_dir)
+
+if ipa_sign_module
+ custom_target(ipa_name + '.so.sign',
+ input : mod,
+ output : ipa_name + '.so.sign',
+ command : [ipa_sign, ipa_priv_key, '@INPUT@', '@OUTPUT@'],
+ install : false,
+ build_by_default : true)
+endif
diff --git a/src/ipa/libipa/algorithm.cpp b/src/ipa/libipa/algorithm.cpp
new file mode 100644
index 00000000..930f9353
--- /dev/null
+++ b/src/ipa/libipa/algorithm.cpp
@@ -0,0 +1,39 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Ideas On Board
+ *
+ * algorithm.cpp - ISP control algorithms
+ */
+
+#include "algorithm.h"
+
+/**
+ * \file algorithm.h
+ * \brief Algorithm common interface
+ */
+
+namespace libcamera {
+
+/**
+ * \brief The IPA namespace
+ *
+ * The IPA namespace groups all types specific to IPA modules. It serves as the
+ * top-level namespace for the IPA library libipa, and also contains
+ * module-specific namespaces for IPA modules.
+ */
+namespace ipa {
+
+/**
+ * \class Algorithm
+ * \brief The base class for all IPA algorithms
+ *
+ * The Algorithm class defines a standard interface for IPA algorithms. By
+ * abstracting algorithms, it makes possible the implementation of generic code
+ * to manage algorithms regardless of their specific type.
+ */
+
+Algorithm::~Algorithm() = default;
+
+} /* namespace ipa */
+
+} /* namespace libcamera */
diff --git a/src/ipa/libipa/algorithm.h b/src/ipa/libipa/algorithm.h
new file mode 100644
index 00000000..89cee4c4
--- /dev/null
+++ b/src/ipa/libipa/algorithm.h
@@ -0,0 +1,24 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Ideas On Board
+ *
+ * algorithm.h - ISP control algorithm interface
+ */
+#ifndef __LIBCAMERA_IPA_LIBIPA_ALGORITHM_H__
+#define __LIBCAMERA_IPA_LIBIPA_ALGORITHM_H__
+
+namespace libcamera {
+
+namespace ipa {
+
+class Algorithm
+{
+public:
+ virtual ~Algorithm();
+};
+
+} /* namespace ipa */
+
+} /* namespace libcamera */
+
+#endif /* __LIBCAMERA_IPA_LIBIPA_ALGORITHM_H__ */
diff --git a/src/ipa/libipa/camera_sensor_helper.cpp b/src/ipa/libipa/camera_sensor_helper.cpp
new file mode 100644
index 00000000..84d8ccf7
--- /dev/null
+++ b/src/ipa/libipa/camera_sensor_helper.cpp
@@ -0,0 +1,322 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera_sensor_helper.cpp - Helper class that performs sensor-specific
+ * parameter computations
+ */
+#include "camera_sensor_helper.h"
+
+#include <libcamera/base/log.h>
+
+/**
+ * \file camera_sensor_helper.h
+ * \brief Helper class that performs sensor-specific parameter computations
+ *
+ * Computation of sensor configuration parameters is a sensor specific
+ * operation. Each CameraHelper derived class computes the value of
+ * configuration parameters, for example the analogue gain value, using
+ * sensor-specific functions and constants.
+ *
+ * Every subclass of CameraSensorHelper shall be registered with libipa using
+ * the REGISTER_CAMERA_SENSOR_HELPER() macro.
+ */
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(CameraSensorHelper)
+
+namespace ipa {
+
+/**
+ * \class CameraSensorHelper
+ * \brief Base class for computing sensor tuning parameters using
+ * sensor-specific constants
+ *
+ * Instances derived from CameraSensorHelper class are sensor-specific.
+ * Each supported sensor will have an associated base class defined.
+ */
+
+/**
+ * \brief Construct a CameraSensorHelper instance
+ *
+ * CameraSensorHelper derived class instances shall never be constructed
+ * manually but always through the CameraSensorHelperFactory::create() method.
+ */
+
+/**
+ * \brief Compute gain code from the analogue gain absolute value
+ * \param[in] gain The real gain to pass
+ *
+ * This function aims to abstract the calculation of the gain letting the IPA
+ * use the real gain for its estimations.
+ *
+ * The parameters come from the MIPI Alliance Camera Specification for
+ * Camera Command Set (CCS).
+ *
+ * \return The gain code to pass to V4L2
+ */
+uint32_t CameraSensorHelper::gainCode(double gain) const
+{
+ ASSERT(analogueGainConstants_.m0 == 0 || analogueGainConstants_.m1 == 0);
+ ASSERT(analogueGainConstants_.type == AnalogueGainLinear);
+
+ return (analogueGainConstants_.c0 - analogueGainConstants_.c1 * gain) /
+ (analogueGainConstants_.m1 * gain - analogueGainConstants_.m0);
+}
+
+/**
+ * \brief Compute the real gain from the V4L2 subdev control gain code
+ * \param[in] gainCode The V4L2 subdev control gain
+ *
+ * This function aims to abstract the calculation of the gain letting the IPA
+ * use the real gain for its estimations. It is the counterpart of the function
+ * CameraSensorHelper::gainCode.
+ *
+ * The parameters come from the MIPI Alliance Camera Specification for
+ * Camera Command Set (CCS).
+ *
+ * \return The real gain
+ */
+double CameraSensorHelper::gain(uint32_t gainCode) const
+{
+ ASSERT(analogueGainConstants_.m0 == 0 || analogueGainConstants_.m1 == 0);
+ ASSERT(analogueGainConstants_.type == AnalogueGainLinear);
+
+ return (analogueGainConstants_.m0 * static_cast<double>(gainCode) + analogueGainConstants_.c0) /
+ (analogueGainConstants_.m1 * static_cast<double>(gainCode) + analogueGainConstants_.c1);
+}
+
+/**
+ * \enum CameraSensorHelper::AnalogueGainType
+ * \brief The gain calculation modes as defined by the MIPI CCS
+ *
+ * Describes the image sensor analogue gain capabilities.
+ * Two modes are possible, depending on the sensor: Linear and Exponential.
+ */
+
+/**
+ * \var CameraSensorHelper::AnalogueGainLinear
+ * \brief Gain is computed using linear gain estimation
+ *
+ * The relationship between the integer gain parameter and the resulting gain
+ * multiplier is given by the following equation:
+ *
+ * \f$gain=\frac{m0x+c0}{m1x+c1}\f$
+ *
+ * Where 'x' is the gain control parameter, and m0, m1, c0 and c1 are
+ * image-sensor-specific constants of the sensor.
+ * These constants are static parameters, and for any given image sensor either
+ * m0 or m1 shall be zero.
+ *
+ * The full Gain equation therefore reduces to either:
+ *
+ * \f$gain=\frac{c0}{m1x+c1}\f$ or \f$\frac{m0x+c0}{c1}\f$
+ */
+
+/**
+ * \var CameraSensorHelper::AnalogueGainExponential
+ * \brief Gain is computed using exponential gain estimation
+ * (introduced in CCS v1.1)
+ *
+ * Starting with CCS v1.1, Alternate Global Analogue Gain is also available.
+ * If the image sensor supports it, then the global analogue gain can be
+ * controlled by linear and exponential gain formula:
+ *
+ * \f$gain = analogLinearGainGlobal * 2^{analogExponentialGainGlobal}\f$
+ * \todo not implemented in libipa
+ */
+
+/**
+ * \struct CameraSensorHelper::AnalogueGainConstants
+ * \brief Analogue gain constants used for gain calculation
+ */
+
+/**
+ * \var CameraSensorHelper::AnalogueGainConstants::type
+ * \brief Analogue gain calculation mode
+ */
+
+/**
+ * \var CameraSensorHelper::AnalogueGainConstants::m0
+ * \brief Constant used in the analogue Gain coding/decoding
+ *
+ * \note Either m0 or m1 shall be zero.
+ */
+
+/**
+ * \var CameraSensorHelper::AnalogueGainConstants::c0
+ * \brief Constant used in the analogue gain coding/decoding
+ */
+
+/**
+ * \var CameraSensorHelper::AnalogueGainConstants::m1
+ * \brief Constant used in the analogue gain coding/decoding
+ *
+ * \note Either m0 or m1 shall be zero.
+ */
+
+/**
+ * \var CameraSensorHelper::AnalogueGainConstants::c1
+ * \brief Constant used in the analogue gain coding/decoding
+ */
+
+/**
+ * \var CameraSensorHelper::analogueGainConstants_
+ * \brief The analogue gain parameters used for calculation
+ *
+ * The analogue gain is calculated through a formula, and its parameters are
+ * sensor specific. Use this variable to store the values at init time.
+ */
+
+/**
+ * \class CameraSensorHelperFactory
+ * \brief Registration of CameraSensorHelperFactory classes and creation of instances
+ *
+ * To facilitate discovery and instantiation of CameraSensorHelper classes, the
+ * CameraSensorHelperFactory class maintains a registry of camera sensor helper
+ * sub-classes. Each CameraSensorHelper subclass shall register itself using the
+ * REGISTER_CAMERA_SENSOR_HELPER() macro, which will create a corresponding
+ * instance of a CameraSensorHelperFactory subclass and register it with the
+ * static list of factories.
+ */
+
+/**
+ * \brief Construct a camera sensor helper factory
+ * \param[in] name Name of the camera sensor helper class
+ *
+ * Creating an instance of the factory registers it with the global list of
+ * factories, accessible through the factories() function.
+ *
+ * The factory \a name is used for debug purpose and shall be unique.
+ */
+CameraSensorHelperFactory::CameraSensorHelperFactory(const std::string name)
+ : name_(name)
+{
+ registerType(this);
+}
+
+/**
+ * \brief Create an instance of the CameraSensorHelper corresponding to
+ * a named factory
+ * \param[in] name Name of the factory
+ *
+ * \return A unique pointer to a new instance of the CameraSensorHelper subclass
+ * corresponding to the named factory or a null pointer if no such factory
+ * exists
+ */
+std::unique_ptr<CameraSensorHelper> CameraSensorHelperFactory::create(const std::string &name)
+{
+ std::vector<CameraSensorHelperFactory *> &factories =
+ CameraSensorHelperFactory::factories();
+
+ for (CameraSensorHelperFactory *factory : factories) {
+ if (name != factory->name_)
+ continue;
+
+ CameraSensorHelper *helper = factory->createInstance();
+ return std::unique_ptr<CameraSensorHelper>(helper);
+ }
+
+ return nullptr;
+}
+
+/**
+ * \brief Add a camera sensor helper class to the registry
+ * \param[in] factory Factory to use to construct the camera sensor helper
+ *
+ * The caller is responsible to guarantee the uniqueness of the camera sensor
+ * helper name.
+ */
+void CameraSensorHelperFactory::registerType(CameraSensorHelperFactory *factory)
+{
+ std::vector<CameraSensorHelperFactory *> &factories =
+ CameraSensorHelperFactory::factories();
+
+ factories.push_back(factory);
+}
+
+/**
+ * \brief Retrieve the list of all camera sensor helper factories
+ * \return The list of camera sensor helper factories
+ */
+std::vector<CameraSensorHelperFactory *> &CameraSensorHelperFactory::factories()
+{
+ /*
+ * The static factories map is defined inside the function to ensure
+ * it gets initialized on first use, without any dependency on link
+ * order.
+ */
+ static std::vector<CameraSensorHelperFactory *> factories;
+ return factories;
+}
+
+/**
+ * \fn CameraSensorHelperFactory::createInstance()
+ * \brief Create an instance of the CameraSensorHelper corresponding to the
+ * factory
+ *
+ * This virtual function is implemented by the REGISTER_CAMERA_SENSOR_HELPER()
+ * macro. It creates a camera sensor helper instance associated with the camera
+ * sensor model.
+ *
+ * \return A pointer to a newly constructed instance of the CameraSensorHelper
+ * subclass corresponding to the factory
+ */
+
+/**
+ * \var CameraSensorHelperFactory::name_
+ * \brief The name of the factory
+ */
+
+/**
+ * \def REGISTER_CAMERA_SENSOR_HELPER
+ * \brief Register a camera sensor helper with the camera sensor helper factory
+ * \param[in] name Sensor model name used to register the class
+ * \param[in] helper Class name of CameraSensorHelper derived class to register
+ *
+ * Register a CameraSensorHelper subclass with the factory and make it available
+ * to try and match sensors.
+ */
+
+/* -----------------------------------------------------------------------------
+ * Sensor-specific subclasses
+ */
+
+#ifndef __DOXYGEN__
+
+class CameraSensorHelperImx219 : public CameraSensorHelper
+{
+public:
+ CameraSensorHelperImx219()
+ {
+ analogueGainConstants_ = { AnalogueGainLinear, 0, -1, 256, 256 };
+ }
+};
+REGISTER_CAMERA_SENSOR_HELPER("imx219", CameraSensorHelperImx219)
+
+class CameraSensorHelperOv5670 : public CameraSensorHelper
+{
+public:
+ CameraSensorHelperOv5670()
+ {
+ analogueGainConstants_ = { AnalogueGainLinear, 1, 0, 0, 256 };
+ }
+};
+REGISTER_CAMERA_SENSOR_HELPER("ov5670", CameraSensorHelperOv5670)
+
+class CameraSensorHelperOv5693 : public CameraSensorHelper
+{
+public:
+ CameraSensorHelperOv5693()
+ {
+ analogueGainConstants_ = { AnalogueGainLinear, 1, 0, 0, 16 };
+ }
+};
+REGISTER_CAMERA_SENSOR_HELPER("ov5693", CameraSensorHelperOv5693)
+
+#endif /* __DOXYGEN__ */
+
+} /* namespace ipa */
+
+} /* namespace libcamera */
diff --git a/src/ipa/libipa/camera_sensor_helper.h b/src/ipa/libipa/camera_sensor_helper.h
new file mode 100644
index 00000000..a7e4ab3b
--- /dev/null
+++ b/src/ipa/libipa/camera_sensor_helper.h
@@ -0,0 +1,89 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera_sensor_helper.h - Helper class that performs sensor-specific parameter computations
+ */
+#ifndef __LIBCAMERA_IPA_LIBIPA_CAMERA_SENSOR_HELPER_H__
+#define __LIBCAMERA_IPA_LIBIPA_CAMERA_SENSOR_HELPER_H__
+
+#include <stdint.h>
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include <libcamera/base/class.h>
+
+namespace libcamera {
+
+namespace ipa {
+
+class CameraSensorHelper
+{
+public:
+ CameraSensorHelper() = default;
+ virtual ~CameraSensorHelper() = default;
+
+ virtual uint32_t gainCode(double gain) const;
+ virtual double gain(uint32_t gainCode) const;
+
+protected:
+ enum AnalogueGainType {
+ AnalogueGainLinear,
+ AnalogueGainExponential,
+ };
+
+ struct AnalogueGainConstants {
+ AnalogueGainType type;
+ int16_t m0;
+ int16_t c0;
+ int16_t m1;
+ int16_t c1;
+ };
+
+ AnalogueGainConstants analogueGainConstants_;
+
+private:
+ LIBCAMERA_DISABLE_COPY_AND_MOVE(CameraSensorHelper)
+};
+
+class CameraSensorHelperFactory
+{
+public:
+ CameraSensorHelperFactory(const std::string name);
+ virtual ~CameraSensorHelperFactory() = default;
+
+ static std::unique_ptr<CameraSensorHelper> create(const std::string &name);
+
+ static void registerType(CameraSensorHelperFactory *factory);
+ static std::vector<CameraSensorHelperFactory *> &factories();
+
+protected:
+ virtual CameraSensorHelper *createInstance() = 0;
+
+private:
+ LIBCAMERA_DISABLE_COPY_AND_MOVE(CameraSensorHelperFactory)
+
+ std::string name_;
+};
+
+#define REGISTER_CAMERA_SENSOR_HELPER(name, helper) \
+class helper##Factory final : public CameraSensorHelperFactory \
+{ \
+public: \
+ helper##Factory() : CameraSensorHelperFactory(name) {} \
+ \
+private: \
+ CameraSensorHelper *createInstance() \
+ { \
+ return new helper(); \
+ } \
+}; \
+static helper##Factory global_##helper##Factory;
+
+} /* namespace ipa */
+
+} /* namespace libcamera */
+
+#endif /* __LIBCAMERA_IPA_LIBIPA_CAMERA_SENSOR_HELPER_H__ */
diff --git a/src/ipa/libipa/histogram.cpp b/src/ipa/libipa/histogram.cpp
new file mode 100644
index 00000000..fb1eb2a2
--- /dev/null
+++ b/src/ipa/libipa/histogram.cpp
@@ -0,0 +1,153 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2019, Raspberry Pi (Trading) Limited
+ *
+ * histogram.cpp - histogram calculations
+ */
+#include "histogram.h"
+
+#include <cmath>
+
+#include <libcamera/base/log.h>
+
+/**
+ * \file histogram.h
+ * \brief Class to represent Histograms and manipulate them
+ */
+
+namespace libcamera {
+
+namespace ipa {
+
+/**
+ * \class Histogram
+ * \brief The base class for creating histograms
+ *
+ * This class stores a cumulative frequency histogram, which is a mapping that
+ * counts the cumulative number of observations in all of the bins up to the
+ * specified bin. It can be used to find quantiles and averages between quantiles.
+ */
+
+/**
+ * \brief Create a cumulative histogram
+ * \param[in] data A pre-sorted histogram to be passed
+ */
+Histogram::Histogram(Span<uint32_t> data)
+{
+ cumulative_.reserve(data.size());
+ cumulative_.push_back(0);
+ for (const uint32_t &value : data)
+ cumulative_.push_back(cumulative_.back() + value);
+}
+
+/**
+ * \fn Histogram::bins()
+ * \brief Retrieve the number of bins currently used by the Histogram
+ * \return Number of bins
+ */
+
+/**
+ * \fn Histogram::total()
+ * \brief Retrieve the total number of values in the data set
+ * \return Number of values
+ */
+
+/**
+ * \brief Cumulative frequency up to a (fractional) point in a bin.
+ * \param[in] bin The bin up to which to cumulate
+ *
+ * With F(p) the cumulative frequency of the histogram, the value is 0 at
+ * the bottom of the histogram, and the maximum is the number of bins.
+ * The pixels are spread evenly throughout the “bin” in which they lie, so that
+ * F(p) is a continuous (monotonically increasing) function.
+ *
+ * \return The cumulative frequency from 0 up to the specified bin
+ */
+uint64_t Histogram::cumulativeFrequency(double bin) const
+{
+ if (bin <= 0)
+ return 0;
+ else if (bin >= bins())
+ return total();
+ int b = static_cast<int32_t>(bin);
+ return cumulative_[b] +
+ (bin - b) * (cumulative_[b + 1] - cumulative_[b]);
+}
+
+/**
+ * \brief Return the (fractional) bin of the point through the histogram
+ * \param[in] q the desired point (0 <= q <= 1)
+ * \param[in] first low limit (default is 0)
+ * \param[in] last high limit (default is UINT_MAX)
+ *
+ * A quantile gives us the point p = Q(q) in the range such that a proportion
+ * q of the pixels lie below p. A familiar quantile is Q(0.5) which is the median
+ * of a distribution.
+ *
+ * \return The fractional bin of the point
+ */
+double Histogram::quantile(double q, uint32_t first, uint32_t last) const
+{
+ if (last == UINT_MAX)
+ last = cumulative_.size() - 2;
+ ASSERT(first <= last);
+
+ uint64_t item = q * total();
+ /* Binary search to find the right bin */
+ while (first < last) {
+ int middle = (first + last) / 2;
+ /* Is it between first and middle ? */
+ if (cumulative_[middle + 1] > item)
+ last = middle;
+ else
+ first = middle + 1;
+ }
+ ASSERT(item >= cumulative_[first] && item <= cumulative_[last + 1]);
+
+ double frac;
+ if (cumulative_[first + 1] == cumulative_[first])
+ frac = 0;
+ else
+ frac = (item - cumulative_[first]) / (cumulative_[first + 1] - cumulative_[first]);
+ return first + frac;
+}
+
+/**
+ * \brief Calculate the mean between two quantiles
+ * \param[in] lowQuantile low Quantile
+ * \param[in] highQuantile high Quantile
+ *
+ * Quantiles are not ideal for metering as they suffer several limitations.
+ * Instead, a concept is introduced here: inter-quantile mean.
+ * It returns the mean of all pixels between lowQuantile and highQuantile.
+ *
+ * \return The mean histogram bin value between the two quantiles
+ */
+double Histogram::interQuantileMean(double lowQuantile, double highQuantile) const
+{
+ ASSERT(highQuantile > lowQuantile);
+ /* Proportion of pixels which lies below lowQuantile */
+ double lowPoint = quantile(lowQuantile);
+ /* Proportion of pixels which lies below highQuantile */
+ double highPoint = quantile(highQuantile, static_cast<uint32_t>(lowPoint));
+ double sumBinFreq = 0, cumulFreq = 0;
+
+ for (double p_next = floor(lowPoint) + 1.0;
+ p_next <= ceil(highPoint);
+ lowPoint = p_next, p_next += 1.0) {
+ int bin = floor(lowPoint);
+ double freq = (cumulative_[bin + 1] - cumulative_[bin])
+ * (std::min(p_next, highPoint) - lowPoint);
+
+ /* Accumulate weigthed bin */
+ sumBinFreq += bin * freq;
+ /* Accumulate weights */
+ cumulFreq += freq;
+ }
+ /* add 0.5 to give an average for bin mid-points */
+ return sumBinFreq / cumulFreq + 0.5;
+}
+
+} /* namespace ipa */
+
+} /* namespace libcamera */
diff --git a/src/ipa/libipa/histogram.h b/src/ipa/libipa/histogram.h
new file mode 100644
index 00000000..c2761cb2
--- /dev/null
+++ b/src/ipa/libipa/histogram.h
@@ -0,0 +1,40 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2019, Raspberry Pi (Trading) Limited
+ *
+ * histogram.h - histogram calculation interface
+ */
+#ifndef __LIBCAMERA_IPA_LIBIPA_HISTOGRAM_H__
+#define __LIBCAMERA_IPA_LIBIPA_HISTOGRAM_H__
+
+#include <assert.h>
+#include <limits.h>
+#include <stdint.h>
+
+#include <vector>
+
+#include <libcamera/base/span.h>
+
+namespace libcamera {
+
+namespace ipa {
+
+class Histogram
+{
+public:
+ Histogram(Span<uint32_t> data);
+ size_t bins() const { return cumulative_.size() - 1; }
+ uint64_t total() const { return cumulative_[cumulative_.size() - 1]; }
+ uint64_t cumulativeFrequency(double bin) const;
+ double quantile(double q, uint32_t first = 0, uint32_t last = UINT_MAX) const;
+ double interQuantileMean(double lowQuantile, double hiQuantile) const;
+
+private:
+ std::vector<uint64_t> cumulative_;
+};
+
+} /* namespace ipa */
+
+} /* namespace libcamera */
+
+#endif /* __LIBCAMERA_IPA_LIBIPA_HISTOGRAM_H__ */
diff --git a/src/ipa/libipa/ipa_interface_wrapper.cpp b/src/ipa/libipa/ipa_interface_wrapper.cpp
deleted file mode 100644
index cee532e3..00000000
--- a/src/ipa/libipa/ipa_interface_wrapper.cpp
+++ /dev/null
@@ -1,285 +0,0 @@
-/* SPDX-License-Identifier: LGPL-2.1-or-later */
-/*
- * Copyright (C) 2019, Google Inc.
- *
- * ipa_interface_wrapper.cpp - Image Processing Algorithm interface wrapper
- */
-
-#include "ipa_interface_wrapper.h"
-
-#include <map>
-#include <string.h>
-#include <unistd.h>
-#include <vector>
-
-#include <libcamera/ipa/ipa_interface.h>
-
-#include "libcamera/internal/byte_stream_buffer.h"
-#include "libcamera/internal/camera_sensor.h"
-
-/**
- * \file ipa_interface_wrapper.h
- * \brief Image Processing Algorithm interface wrapper
- */
-
-namespace libcamera {
-
-/**
- * \class IPAInterfaceWrapper
- * \brief Wrap an IPAInterface and expose it as an ipa_context
- *
- * This class implements the ipa_context API based on a provided IPAInterface.
- * It helps IPAs that implement the IPAInterface API to provide the external
- * ipa_context API.
- *
- * To use the wrapper, an IPA module simple creates a new instance of its
- * IPAInterface implementation, and passes it to the constructor of the
- * IPAInterfaceWrapper. As IPAInterfaceWrapper inherits from ipa_context, the
- * constructed wrapper can then be directly returned from the IPA module's
- * ipaCreate() function.
- *
- * \code{.cpp}
- * class MyIPA : public IPAInterface
- * {
- * ...
- * };
- *
- * struct ipa_context *ipaCreate()
- * {
- * return new IPAInterfaceWrapper(std::make_unique<MyIPA>());
- * }
- * \endcode
- *
- * The wrapper takes ownership of the IPAInterface and will automatically
- * delete it when the wrapper is destroyed.
- */
-
-/**
- * \brief Construct an IPAInterfaceWrapper wrapping \a interface
- * \param[in] interface The interface to wrap
- */
-IPAInterfaceWrapper::IPAInterfaceWrapper(std::unique_ptr<IPAInterface> interface)
- : ipa_(std::move(interface)), callbacks_(nullptr), cb_ctx_(nullptr)
-{
- ops = &operations_;
-
- ipa_->queueFrameAction.connect(this, &IPAInterfaceWrapper::queueFrameAction);
-}
-
-void IPAInterfaceWrapper::destroy(struct ipa_context *_ctx)
-{
- IPAInterfaceWrapper *ctx = static_cast<IPAInterfaceWrapper *>(_ctx);
-
- delete ctx;
-}
-
-void *IPAInterfaceWrapper::get_interface(struct ipa_context *_ctx)
-{
- IPAInterfaceWrapper *ctx = static_cast<IPAInterfaceWrapper *>(_ctx);
-
- return ctx->ipa_.get();
-}
-
-void IPAInterfaceWrapper::init(struct ipa_context *_ctx,
- const struct ipa_settings *settings)
-{
- IPAInterfaceWrapper *ctx = static_cast<IPAInterfaceWrapper *>(_ctx);
-
- IPASettings ipaSettings{
- .configurationFile = settings->configuration_file
- };
- ctx->ipa_->init(ipaSettings);
-}
-
-int IPAInterfaceWrapper::start(struct ipa_context *_ctx)
-{
- IPAInterfaceWrapper *ctx = static_cast<IPAInterfaceWrapper *>(_ctx);
-
- return ctx->ipa_->start();
-}
-
-void IPAInterfaceWrapper::stop(struct ipa_context *_ctx)
-{
- IPAInterfaceWrapper *ctx = static_cast<IPAInterfaceWrapper *>(_ctx);
-
- ctx->ipa_->stop();
-}
-
-void IPAInterfaceWrapper::register_callbacks(struct ipa_context *_ctx,
- const struct ipa_callback_ops *callbacks,
- void *cb_ctx)
-{
- IPAInterfaceWrapper *ctx = static_cast<IPAInterfaceWrapper *>(_ctx);
-
- ctx->callbacks_ = callbacks;
- ctx->cb_ctx_ = cb_ctx;
-}
-
-void IPAInterfaceWrapper::configure(struct ipa_context *_ctx,
- const struct ipa_sensor_info *sensor_info,
- const struct ipa_stream *streams,
- unsigned int num_streams,
- const struct ipa_control_info_map *maps,
- unsigned int num_maps)
-{
- IPAInterfaceWrapper *ctx = static_cast<IPAInterfaceWrapper *>(_ctx);
-
- ctx->serializer_.reset();
-
- /* Translate the IPA sensor info. */
- CameraSensorInfo sensorInfo{};
- sensorInfo.model = sensor_info->model;
- sensorInfo.bitsPerPixel = sensor_info->bits_per_pixel;
- sensorInfo.activeAreaSize = { sensor_info->active_area.width,
- sensor_info->active_area.height };
- sensorInfo.analogCrop = { sensor_info->analog_crop.left,
- sensor_info->analog_crop.top,
- sensor_info->analog_crop.width,
- sensor_info->analog_crop.height };
- sensorInfo.outputSize = { sensor_info->output_size.width,
- sensor_info->output_size.height };
- sensorInfo.pixelRate = sensor_info->pixel_rate;
- sensorInfo.lineLength = sensor_info->line_length;
-
- /* Translate the IPA stream configurations map. */
- std::map<unsigned int, IPAStream> ipaStreams;
-
- for (unsigned int i = 0; i < num_streams; ++i) {
- const struct ipa_stream &stream = streams[i];
-
- ipaStreams[stream.id] = {
- stream.pixel_format,
- Size(stream.width, stream.height),
- };
- }
-
- /* Translate the IPA entity controls map. */
- std::map<unsigned int, const ControlInfoMap &> entityControls;
- std::map<unsigned int, ControlInfoMap> infoMaps;
-
- for (unsigned int i = 0; i < num_maps; ++i) {
- const struct ipa_control_info_map &ipa_map = maps[i];
- ByteStreamBuffer byteStream(ipa_map.data, ipa_map.size);
- unsigned int id = ipa_map.id;
-
- infoMaps[id] = ctx->serializer_.deserialize<ControlInfoMap>(byteStream);
- entityControls.emplace(id, infoMaps[id]);
- }
-
- /* \todo Translate the ipaConfig and result. */
- IPAOperationData ipaConfig;
- ctx->ipa_->configure(sensorInfo, ipaStreams, entityControls, ipaConfig,
- nullptr);
-}
-
-void IPAInterfaceWrapper::map_buffers(struct ipa_context *_ctx,
- const struct ipa_buffer *_buffers,
- size_t num_buffers)
-{
- IPAInterfaceWrapper *ctx = static_cast<IPAInterfaceWrapper *>(_ctx);
- std::vector<IPABuffer> buffers(num_buffers);
-
- for (unsigned int i = 0; i < num_buffers; ++i) {
- const struct ipa_buffer &_buffer = _buffers[i];
- IPABuffer &buffer = buffers[i];
- std::vector<FrameBuffer::Plane> &planes = buffer.planes;
-
- buffer.id = _buffer.id;
-
- planes.resize(_buffer.num_planes);
- for (unsigned int j = 0; j < _buffer.num_planes; ++j) {
- planes[j].fd = FileDescriptor(_buffer.planes[j].dmabuf);
- planes[j].length = _buffer.planes[j].length;
- }
- }
-
- ctx->ipa_->mapBuffers(buffers);
-}
-
-void IPAInterfaceWrapper::unmap_buffers(struct ipa_context *_ctx,
- const unsigned int *_ids,
- size_t num_buffers)
-{
- IPAInterfaceWrapper *ctx = static_cast<IPAInterfaceWrapper *>(_ctx);
- std::vector<unsigned int> ids(_ids, _ids + num_buffers);
- ctx->ipa_->unmapBuffers(ids);
-}
-
-void IPAInterfaceWrapper::process_event(struct ipa_context *_ctx,
- const struct ipa_operation_data *data)
-{
- IPAInterfaceWrapper *ctx = static_cast<IPAInterfaceWrapper *>(_ctx);
- IPAOperationData opData;
-
- opData.operation = data->operation;
-
- opData.data.resize(data->num_data);
- memcpy(opData.data.data(), data->data,
- data->num_data * sizeof(*data->data));
-
- opData.controls.resize(data->num_lists);
- for (unsigned int i = 0; i < data->num_lists; ++i) {
- const struct ipa_control_list *c_list = &data->lists[i];
- ByteStreamBuffer byteStream(c_list->data, c_list->size);
- opData.controls[i] = ctx->serializer_.deserialize<ControlList>(byteStream);
- }
-
- ctx->ipa_->processEvent(opData);
-}
-
-void IPAInterfaceWrapper::queueFrameAction(unsigned int frame,
- const IPAOperationData &data)
-{
- if (!callbacks_)
- return;
-
- struct ipa_operation_data c_data;
- c_data.operation = data.operation;
- c_data.data = data.data.data();
- c_data.num_data = data.data.size();
-
- struct ipa_control_list control_lists[data.controls.size()];
- c_data.lists = control_lists;
- c_data.num_lists = data.controls.size();
-
- std::size_t listsSize = 0;
- for (const auto &list : data.controls)
- listsSize += serializer_.binarySize(list);
-
- std::vector<uint8_t> binaryData(listsSize);
- ByteStreamBuffer byteStreamBuffer(binaryData.data(), listsSize);
-
- unsigned int i = 0;
- for (const auto &list : data.controls) {
- struct ipa_control_list &c_list = control_lists[i];
- c_list.size = serializer_.binarySize(list);
-
- ByteStreamBuffer b = byteStreamBuffer.carveOut(c_list.size);
- serializer_.serialize(list, b);
-
- c_list.data = b.base();
- }
-
- callbacks_->queue_frame_action(cb_ctx_, frame, c_data);
-}
-
-#ifndef __DOXYGEN__
-/*
- * This construct confuses Doygen and makes it believe that all members of the
- * operations is a member of IPAInterfaceWrapper. It must thus be hidden.
- */
-const struct ipa_context_ops IPAInterfaceWrapper::operations_ = {
- .destroy = &IPAInterfaceWrapper::destroy,
- .get_interface = &IPAInterfaceWrapper::get_interface,
- .init = &IPAInterfaceWrapper::init,
- .start = &IPAInterfaceWrapper::start,
- .stop = &IPAInterfaceWrapper::stop,
- .register_callbacks = &IPAInterfaceWrapper::register_callbacks,
- .configure = &IPAInterfaceWrapper::configure,
- .map_buffers = &IPAInterfaceWrapper::map_buffers,
- .unmap_buffers = &IPAInterfaceWrapper::unmap_buffers,
- .process_event = &IPAInterfaceWrapper::process_event,
-};
-#endif
-
-} /* namespace libcamera */
diff --git a/src/ipa/libipa/ipa_interface_wrapper.h b/src/ipa/libipa/ipa_interface_wrapper.h
deleted file mode 100644
index a1c70159..00000000
--- a/src/ipa/libipa/ipa_interface_wrapper.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/* SPDX-License-Identifier: LGPL-2.1-or-later */
-/*
- * Copyright (C) 2019, Google Inc.
- *
- * ipa_interface_wrapper.h - Image Processing Algorithm interface wrapper
- */
-#ifndef __LIBCAMERA_IPA_INTERFACE_WRAPPER_H__
-#define __LIBCAMERA_IPA_INTERFACE_WRAPPER_H__
-
-#include <memory>
-
-#include <libcamera/ipa/ipa_interface.h>
-
-#include "libcamera/internal/control_serializer.h"
-
-namespace libcamera {
-
-class IPAInterfaceWrapper : public ipa_context
-{
-public:
- IPAInterfaceWrapper(std::unique_ptr<IPAInterface> interface);
-
-private:
- static void destroy(struct ipa_context *ctx);
- static void *get_interface(struct ipa_context *ctx);
- static void init(struct ipa_context *ctx,
- const struct ipa_settings *settings);
- static int start(struct ipa_context *ctx);
- static void stop(struct ipa_context *ctx);
- static void register_callbacks(struct ipa_context *ctx,
- const struct ipa_callback_ops *callbacks,
- void *cb_ctx);
- static void configure(struct ipa_context *ctx,
- const struct ipa_sensor_info *sensor_info,
- const struct ipa_stream *streams,
- unsigned int num_streams,
- const struct ipa_control_info_map *maps,
- unsigned int num_maps);
- static void map_buffers(struct ipa_context *ctx,
- const struct ipa_buffer *c_buffers,
- size_t num_buffers);
- static void unmap_buffers(struct ipa_context *ctx,
- const unsigned int *ids,
- size_t num_buffers);
- static void process_event(struct ipa_context *ctx,
- const struct ipa_operation_data *data);
-
- static const struct ipa_context_ops operations_;
-
- void queueFrameAction(unsigned int frame, const IPAOperationData &data);
-
- std::unique_ptr<IPAInterface> ipa_;
- const struct ipa_callback_ops *callbacks_;
- void *cb_ctx_;
-
- ControlSerializer serializer_;
-};
-
-} /* namespace libcamera */
-
-#endif /* __LIBCAMERA_IPA_INTERFACE_WRAPPER_H__ */
diff --git a/src/ipa/libipa/meson.build b/src/ipa/libipa/meson.build
index 22626405..3fda7c00 100644
--- a/src/ipa/libipa/meson.build
+++ b/src/ipa/libipa/meson.build
@@ -1,15 +1,19 @@
# SPDX-License-Identifier: CC0-1.0
libipa_headers = files([
- 'ipa_interface_wrapper.h',
+ 'algorithm.h',
+ 'camera_sensor_helper.h',
+ 'histogram.h'
])
libipa_sources = files([
- 'ipa_interface_wrapper.cpp',
+ 'algorithm.cpp',
+ 'camera_sensor_helper.cpp',
+ 'histogram.cpp'
])
libipa_includes = include_directories('..')
-libipa = static_library('ipa', libipa_sources,
+libipa = static_library('ipa', [libipa_sources, libipa_headers],
include_directories : ipa_includes,
- dependencies : libcamera_dep)
+ dependencies : libcamera_private)
diff --git a/src/ipa/meson.build b/src/ipa/meson.build
index 5a5de267..e15a8a06 100644
--- a/src/ipa/meson.build
+++ b/src/ipa/meson.build
@@ -1,31 +1,40 @@
# SPDX-License-Identifier: CC0-1.0
-ipa_install_dir = join_paths(get_option('libdir'), 'libcamera')
-ipa_data_dir = join_paths(get_option('datadir'), 'libcamera', 'ipa')
-ipa_sysconf_dir = join_paths(get_option('sysconfdir'), 'libcamera', 'ipa')
-
ipa_includes = [
libcamera_includes,
]
+ipa_install_dir = libcamera_libdir
+ipa_data_dir = libcamera_datadir / 'ipa'
+ipa_sysconf_dir = libcamera_sysconfdir / 'ipa'
+
config_h.set('IPA_CONFIG_DIR',
- '"' + join_paths(get_option('prefix'), ipa_sysconf_dir) +
- ':' + join_paths(get_option('prefix'), ipa_data_dir) + '"')
+ '"' + get_option('prefix') / ipa_sysconf_dir +
+ ':' + get_option('prefix') / ipa_data_dir + '"')
config_h.set('IPA_MODULE_DIR',
- '"' + join_paths(get_option('prefix'), ipa_install_dir) + '"')
+ '"' + get_option('prefix') / ipa_install_dir + '"')
+
+summary({
+ 'IPA_CONFIG_DIR' : config_h.get('IPA_CONFIG_DIR'),
+ 'IPA_MODULE_DIR' : config_h.get('IPA_MODULE_DIR'),
+ }, section : 'Paths')
subdir('libipa')
ipa_sign = files('ipa-sign.sh')
-ipas = ['raspberrypi', 'rkisp1', 'vimc']
ipa_names = []
-foreach pipeline : get_option('pipelines')
- if ipas.contains(pipeline)
+ipa_modules = get_option('ipas')
+
+# The ipa-sign-install.sh script which uses the ipa_names variable will itself
+# prepend MESON_INSTALL_DESTDIR_PREFIX to each ipa module name, therefore we
+# must not include the prefix string here.
+foreach pipeline : pipelines
+ if ipa_modules.contains(pipeline)
subdir(pipeline)
- ipa_names += join_paths(ipa_install_dir, ipa_name + '.so')
+ ipa_names += ipa_install_dir / ipa_name + '.so'
endif
endforeach
diff --git a/src/ipa/raspberrypi/cam_helper.cpp b/src/ipa/raspberrypi/cam_helper.cpp
index c8ac3232..3c6afce7 100644
--- a/src/ipa/raspberrypi/cam_helper.cpp
+++ b/src/ipa/raspberrypi/cam_helper.cpp
@@ -17,6 +17,12 @@
#include "md_parser.hpp"
using namespace RPiController;
+using namespace libcamera;
+using libcamera::utils::Duration;
+
+namespace libcamera {
+LOG_DECLARE_CATEGORY(IPARPI)
+}
static std::map<std::string, CamHelperCreateFunc> cam_helpers;
@@ -34,37 +40,80 @@ CamHelper *CamHelper::Create(std::string const &cam_name)
return nullptr;
}
-CamHelper::CamHelper(MdParser *parser)
- : parser_(parser), initialized_(false)
+CamHelper::CamHelper(std::unique_ptr<MdParser> parser, unsigned int frameIntegrationDiff)
+ : parser_(std::move(parser)), initialized_(false),
+ frameIntegrationDiff_(frameIntegrationDiff)
{
}
CamHelper::~CamHelper()
{
- delete parser_;
}
-uint32_t CamHelper::ExposureLines(double exposure_us) const
+void CamHelper::Prepare(Span<const uint8_t> buffer,
+ Metadata &metadata)
+{
+ parseEmbeddedData(buffer, metadata);
+}
+
+void CamHelper::Process([[maybe_unused]] StatisticsPtr &stats,
+ [[maybe_unused]] Metadata &metadata)
+{
+}
+
+uint32_t CamHelper::ExposureLines(const Duration exposure) const
+{
+ assert(initialized_);
+ return exposure / mode_.line_length;
+}
+
+Duration CamHelper::Exposure(uint32_t exposure_lines) const
{
assert(initialized_);
- return exposure_us * 1000.0 / mode_.line_length;
+ return exposure_lines * mode_.line_length;
}
-double CamHelper::Exposure(uint32_t exposure_lines) const
+uint32_t CamHelper::GetVBlanking(Duration &exposure,
+ Duration minFrameDuration,
+ Duration maxFrameDuration) const
{
+ uint32_t frameLengthMin, frameLengthMax, vblank;
+ uint32_t exposureLines = ExposureLines(exposure);
+
assert(initialized_);
- return exposure_lines * mode_.line_length / 1000.0;
+
+ /*
+ * minFrameDuration and maxFrameDuration are clamped by the caller
+ * based on the limits for the active sensor mode.
+ */
+ frameLengthMin = minFrameDuration / mode_.line_length;
+ frameLengthMax = maxFrameDuration / mode_.line_length;
+
+ /*
+ * Limit the exposure to the maximum frame duration requested, and
+ * re-calculate if it has been clipped.
+ */
+ exposureLines = std::min(frameLengthMax - frameIntegrationDiff_, exposureLines);
+ exposure = Exposure(exposureLines);
+
+ /* Limit the vblank to the range allowed by the frame length limits. */
+ vblank = std::clamp(exposureLines + frameIntegrationDiff_,
+ frameLengthMin, frameLengthMax) - mode_.height;
+ return vblank;
}
void CamHelper::SetCameraMode(const CameraMode &mode)
{
mode_ = mode;
- parser_->SetBitsPerPixel(mode.bitdepth);
- parser_->SetLineLengthBytes(0); /* We use SetBufferSize. */
+ if (parser_) {
+ parser_->SetBitsPerPixel(mode.bitdepth);
+ parser_->SetLineLengthBytes(0); /* We use SetBufferSize. */
+ }
initialized_ = true;
}
-void CamHelper::GetDelays(int &exposure_delay, int &gain_delay) const
+void CamHelper::GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const
{
/*
* These values are correct for many sensors. Other sensors will
@@ -72,6 +121,7 @@ void CamHelper::GetDelays(int &exposure_delay, int &gain_delay) const
*/
exposure_delay = 2;
gain_delay = 1;
+ vblank_delay = 2;
}
bool CamHelper::SensorEmbeddedDataPresent() const
@@ -82,10 +132,10 @@ bool CamHelper::SensorEmbeddedDataPresent() const
unsigned int CamHelper::HideFramesStartup() const
{
/*
- * By default, hide 6 frames completely at start-up while AGC etc. sort
- * themselves out (converge).
+ * The number of frames when a camera first starts that shouldn't be
+ * displayed as they are invalid in some way.
*/
- return 6;
+ return 0;
}
unsigned int CamHelper::HideFramesModeSwitch() const
@@ -106,6 +156,49 @@ unsigned int CamHelper::MistrustFramesModeSwitch() const
return 0;
}
+void CamHelper::parseEmbeddedData(Span<const uint8_t> buffer,
+ Metadata &metadata)
+{
+ MdParser::RegisterMap registers;
+ Metadata parsedMetadata;
+
+ if (buffer.empty())
+ return;
+
+ if (parser_->Parse(buffer, registers) != MdParser::Status::OK) {
+ LOG(IPARPI, Error) << "Embedded data buffer parsing failed";
+ return;
+ }
+
+ PopulateMetadata(registers, parsedMetadata);
+ metadata.Merge(parsedMetadata);
+
+ /*
+ * Overwrite the exposure/gain values in the existing DeviceStatus with
+ * values from the parsed embedded buffer. Fetch it first in case any
+ * other fields were set meaningfully.
+ */
+ DeviceStatus deviceStatus, parsedDeviceStatus;
+ if (metadata.Get("device.status", deviceStatus) ||
+ parsedMetadata.Get("device.status", parsedDeviceStatus)) {
+ LOG(IPARPI, Error) << "DeviceStatus not found";
+ return;
+ }
+
+ deviceStatus.shutter_speed = parsedDeviceStatus.shutter_speed;
+ deviceStatus.analogue_gain = parsedDeviceStatus.analogue_gain;
+ deviceStatus.frame_length = parsedDeviceStatus.frame_length;
+
+ LOG(IPARPI, Debug) << "Metadata updated - " << deviceStatus;
+
+ metadata.Set("device.status", deviceStatus);
+}
+
+void CamHelper::PopulateMetadata([[maybe_unused]] const MdParser::RegisterMap &registers,
+ [[maybe_unused]] Metadata &metadata) const
+{
+}
+
RegisterCamHelper::RegisterCamHelper(char const *cam_name,
CamHelperCreateFunc create_func)
{
diff --git a/src/ipa/raspberrypi/cam_helper.hpp b/src/ipa/raspberrypi/cam_helper.hpp
index 044c2866..200cc83f 100644
--- a/src/ipa/raspberrypi/cam_helper.hpp
+++ b/src/ipa/raspberrypi/cam_helper.hpp
@@ -6,9 +6,15 @@
*/
#pragma once
+#include <memory>
#include <string>
+#include <libcamera/base/span.h>
+#include <libcamera/base/utils.h>
+
#include "camera_mode.h"
+#include "controller/controller.hpp"
+#include "controller/metadata.hpp"
#include "md_parser.hpp"
#include "libcamera/internal/v4l2_videodevice.h"
@@ -16,8 +22,8 @@
namespace RPiController {
// The CamHelper class provides a number of facilities that anyone trying
-// trying to drive a camera will need to know, but which are not provided by
-// by the standard driver framework. Specifically, it provides:
+// to drive a camera will need to know, but which are not provided by the
+// standard driver framework. Specifically, it provides:
//
// A "CameraMode" structure to describe extra information about the chosen
// mode of the driver. For example, how it is cropped from the full sensor
@@ -28,14 +34,14 @@ namespace RPiController {
// exposure time, and to convert between the sensor's gain codes and actual
// gains.
//
-// A method to return the number of frames of delay between updating exposure
-// and analogue gain and the changes taking effect. For many sensors these
-// take the values 2 and 1 respectively, but sensors that are different will
-// need to over-ride the default method provided.
+// A method to return the number of frames of delay between updating exposure,
+// analogue gain and vblanking, and for the changes to take effect. For many
+// sensors these take the values 2, 1 and 2 respectively, but sensors that are
+// different will need to over-ride the default method provided.
//
// A method to query if the sensor outputs embedded data that can be parsed.
//
-// A parser to parse the metadata buffers provided by some sensors (for
+// A parser to parse the embedded data buffers provided by some sensors (for
// example, the imx219 does; the ov5647 doesn't). This allows us to know for
// sure the exposure and gain of the frame we're looking at. CamHelper
// provides methods for converting analogue gains to and from the sensor's
@@ -62,24 +68,43 @@ class CamHelper
{
public:
static CamHelper *Create(std::string const &cam_name);
- CamHelper(MdParser *parser);
+ CamHelper(std::unique_ptr<MdParser> parser, unsigned int frameIntegrationDiff);
virtual ~CamHelper();
void SetCameraMode(const CameraMode &mode);
- MdParser &Parser() const { return *parser_; }
- uint32_t ExposureLines(double exposure_us) const;
- double Exposure(uint32_t exposure_lines) const; // in us
+ virtual void Prepare(libcamera::Span<const uint8_t> buffer,
+ Metadata &metadata);
+ virtual void Process(StatisticsPtr &stats, Metadata &metadata);
+ uint32_t ExposureLines(libcamera::utils::Duration exposure) const;
+ libcamera::utils::Duration Exposure(uint32_t exposure_lines) const;
+ virtual uint32_t GetVBlanking(libcamera::utils::Duration &exposure,
+ libcamera::utils::Duration minFrameDuration,
+ libcamera::utils::Duration maxFrameDuration) const;
virtual uint32_t GainCode(double gain) const = 0;
virtual double Gain(uint32_t gain_code) const = 0;
- virtual void GetDelays(int &exposure_delay, int &gain_delay) const;
+ virtual void GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const;
virtual bool SensorEmbeddedDataPresent() const;
virtual unsigned int HideFramesStartup() const;
virtual unsigned int HideFramesModeSwitch() const;
virtual unsigned int MistrustFramesStartup() const;
virtual unsigned int MistrustFramesModeSwitch() const;
+
protected:
- MdParser *parser_;
+ void parseEmbeddedData(libcamera::Span<const uint8_t> buffer,
+ Metadata &metadata);
+ virtual void PopulateMetadata(const MdParser::RegisterMap &registers,
+ Metadata &metadata) const;
+
+ std::unique_ptr<MdParser> parser_;
CameraMode mode_;
+
+private:
bool initialized_;
+ /*
+ * Smallest difference between the frame length and integration time,
+ * in units of lines.
+ */
+ unsigned int frameIntegrationDiff_;
};
// This is for registering camera helpers with the system, so that the
diff --git a/src/ipa/raspberrypi/cam_helper_imx219.cpp b/src/ipa/raspberrypi/cam_helper_imx219.cpp
index db8ab879..a3caab71 100644
--- a/src/ipa/raspberrypi/cam_helper_imx219.cpp
+++ b/src/ipa/raspberrypi/cam_helper_imx219.cpp
@@ -11,35 +11,29 @@
#include <stdlib.h>
/*
- * We have observed the imx219 embedded data stream randomly return junk
- * reister values. Do not rely on embedded data until this has been resolved.
+ * We have observed that the imx219 embedded data stream randomly returns junk
+ * register values. Do not rely on embedded data until this has been resolved.
*/
#define ENABLE_EMBEDDED_DATA 0
#include "cam_helper.hpp"
#if ENABLE_EMBEDDED_DATA
#include "md_parser.hpp"
-#else
-#include "md_parser_rpi.hpp"
#endif
using namespace RPiController;
-/* Metadata parser implementation specific to Sony IMX219 sensors. */
-
-class MdParserImx219 : public MdParserSmia
-{
-public:
- MdParserImx219();
- Status Parse(void *data) override;
- Status GetExposureLines(unsigned int &lines) override;
- Status GetGainCode(unsigned int &gain_code) override;
-private:
- /* Offset of the register's value in the metadata block. */
- int reg_offsets_[3];
- /* Value of the register, once read from the metadata block. */
- int reg_values_[3];
-};
+/*
+ * We care about one gain register and a pair of exposure registers. Their I2C
+ * addresses from the Sony IMX219 datasheet:
+ */
+constexpr uint32_t gainReg = 0x157;
+constexpr uint32_t expHiReg = 0x15a;
+constexpr uint32_t expLoReg = 0x15b;
+constexpr uint32_t frameLengthHiReg = 0x160;
+constexpr uint32_t frameLengthLoReg = 0x161;
+constexpr std::initializer_list<uint32_t> registerList [[maybe_unused]]
+ = { expHiReg, expLoReg, gainReg, frameLengthHiReg, frameLengthLoReg };
class CamHelperImx219 : public CamHelper
{
@@ -49,13 +43,23 @@ public:
double Gain(uint32_t gain_code) const override;
unsigned int MistrustFramesModeSwitch() const override;
bool SensorEmbeddedDataPresent() const override;
+
+private:
+ /*
+ * Smallest difference between the frame length and integration time,
+ * in units of lines.
+ */
+ static constexpr int frameIntegrationDiff = 4;
+
+ void PopulateMetadata(const MdParser::RegisterMap &registers,
+ Metadata &metadata) const override;
};
CamHelperImx219::CamHelperImx219()
#if ENABLE_EMBEDDED_DATA
- : CamHelper(new MdParserImx219())
+ : CamHelper(std::make_unique<MdParserSmia>(registerList), frameIntegrationDiff)
#else
- : CamHelper(new MdParserRPi())
+ : CamHelper({}, frameIntegrationDiff)
#endif
{
}
@@ -85,89 +89,21 @@ bool CamHelperImx219::SensorEmbeddedDataPresent() const
return ENABLE_EMBEDDED_DATA;
}
-static CamHelper *Create()
+void CamHelperImx219::PopulateMetadata(const MdParser::RegisterMap &registers,
+ Metadata &metadata) const
{
- return new CamHelperImx219();
-}
-
-static RegisterCamHelper reg("imx219", &Create);
+ DeviceStatus deviceStatus;
-/*
- * We care about one gain register and a pair of exposure registers. Their I2C
- * addresses from the Sony IMX219 datasheet:
- */
-#define GAIN_REG 0x157
-#define EXPHI_REG 0x15A
-#define EXPLO_REG 0x15B
-
-/*
- * Index of each into the reg_offsets and reg_values arrays. Must be in
- * register address order.
- */
-#define GAIN_INDEX 0
-#define EXPHI_INDEX 1
-#define EXPLO_INDEX 2
-
-MdParserImx219::MdParserImx219()
-{
- reg_offsets_[0] = reg_offsets_[1] = reg_offsets_[2] = -1;
-}
+ deviceStatus.shutter_speed = Exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
+ deviceStatus.analogue_gain = Gain(registers.at(gainReg));
+ deviceStatus.frame_length = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
-MdParser::Status MdParserImx219::Parse(void *data)
-{
- bool try_again = false;
-
- if (reset_) {
- /*
- * Search again through the metadata for the gain and exposure
- * registers.
- */
- assert(bits_per_pixel_);
- assert(num_lines_ || buffer_size_bytes_);
- /* Need to be ordered */
- uint32_t regs[3] = { GAIN_REG, EXPHI_REG, EXPLO_REG };
- reg_offsets_[0] = reg_offsets_[1] = reg_offsets_[2] = -1;
- int ret = static_cast<int>(findRegs(static_cast<uint8_t *>(data),
- regs, reg_offsets_, 3));
- /*
- * > 0 means "worked partially but parse again next time",
- * < 0 means "hard error".
- */
- if (ret > 0)
- try_again = true;
- else if (ret < 0)
- return ERROR;
- }
-
- for (int i = 0; i < 3; i++) {
- if (reg_offsets_[i] == -1)
- continue;
-
- reg_values_[i] = static_cast<uint8_t *>(data)[reg_offsets_[i]];
- }
-
- /* Re-parse next time if we were unhappy in some way. */
- reset_ = try_again;
-
- return OK;
+ metadata.Set("device.status", deviceStatus);
}
-MdParser::Status MdParserImx219::GetExposureLines(unsigned int &lines)
+static CamHelper *Create()
{
- if (reg_offsets_[EXPHI_INDEX] == -1 || reg_offsets_[EXPLO_INDEX] == -1)
- return NOTFOUND;
-
- lines = reg_values_[EXPHI_INDEX] * 256 + reg_values_[EXPLO_INDEX];
-
- return OK;
+ return new CamHelperImx219();
}
-MdParser::Status MdParserImx219::GetGainCode(unsigned int &gain_code)
-{
- if (reg_offsets_[GAIN_INDEX] == -1)
- return NOTFOUND;
-
- gain_code = reg_values_[GAIN_INDEX];
-
- return OK;
-}
+static RegisterCamHelper reg("imx219", &Create);
diff --git a/src/ipa/raspberrypi/cam_helper_imx290.cpp b/src/ipa/raspberrypi/cam_helper_imx290.cpp
new file mode 100644
index 00000000..871c1f8e
--- /dev/null
+++ b/src/ipa/raspberrypi/cam_helper_imx290.cpp
@@ -0,0 +1,67 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2021, Raspberry Pi (Trading) Limited
+ *
+ * cam_helper_imx290.cpp - camera helper for imx290 sensor
+ */
+
+#include <math.h>
+
+#include "cam_helper.hpp"
+
+using namespace RPiController;
+
+class CamHelperImx290 : public CamHelper
+{
+public:
+ CamHelperImx290();
+ uint32_t GainCode(double gain) const override;
+ double Gain(uint32_t gain_code) const override;
+ void GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const override;
+ unsigned int HideFramesModeSwitch() const override;
+
+private:
+ /*
+ * Smallest difference between the frame length and integration time,
+ * in units of lines.
+ */
+ static constexpr int frameIntegrationDiff = 2;
+};
+
+CamHelperImx290::CamHelperImx290()
+ : CamHelper({}, frameIntegrationDiff)
+{
+}
+
+uint32_t CamHelperImx290::GainCode(double gain) const
+{
+ int code = 66.6667 * log10(gain);
+ return std::max(0, std::min(code, 0xf0));
+}
+
+double CamHelperImx290::Gain(uint32_t gain_code) const
+{
+ return pow(10, 0.015 * gain_code);
+}
+
+void CamHelperImx290::GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const
+{
+ exposure_delay = 2;
+ gain_delay = 2;
+ vblank_delay = 2;
+}
+
+unsigned int CamHelperImx290::HideFramesModeSwitch() const
+{
+ /* After a mode switch, we seem to get 1 bad frame. */
+ return 1;
+}
+
+static CamHelper *Create()
+{
+ return new CamHelperImx290();
+}
+
+static RegisterCamHelper reg("imx290", &Create);
diff --git a/src/ipa/raspberrypi/cam_helper_imx477.cpp b/src/ipa/raspberrypi/cam_helper_imx477.cpp
index 0e896ac7..338fdc0c 100644
--- a/src/ipa/raspberrypi/cam_helper_imx477.cpp
+++ b/src/ipa/raspberrypi/cam_helper_imx477.cpp
@@ -6,30 +6,36 @@
*/
#include <assert.h>
+#include <cmath>
#include <stddef.h>
#include <stdio.h>
#include <stdlib.h>
+#include <libcamera/base/log.h>
+
#include "cam_helper.hpp"
#include "md_parser.hpp"
using namespace RPiController;
+using namespace libcamera;
+using libcamera::utils::Duration;
-/* Metadata parser implementation specific to Sony IMX477 sensors. */
+namespace libcamera {
+LOG_DECLARE_CATEGORY(IPARPI)
+}
-class MdParserImx477 : public MdParserSmia
-{
-public:
- MdParserImx477();
- Status Parse(void *data) override;
- Status GetExposureLines(unsigned int &lines) override;
- Status GetGainCode(unsigned int &gain_code) override;
-private:
- /* Offset of the register's value in the metadata block. */
- int reg_offsets_[4];
- /* Value of the register, once read from the metadata block. */
- int reg_values_[4];
-};
+/*
+ * We care about two gain registers and a pair of exposure registers. Their
+ * I2C addresses from the Sony IMX477 datasheet:
+ */
+constexpr uint32_t expHiReg = 0x0202;
+constexpr uint32_t expLoReg = 0x0203;
+constexpr uint32_t gainHiReg = 0x0204;
+constexpr uint32_t gainLoReg = 0x0205;
+constexpr uint32_t frameLengthHiReg = 0x0340;
+constexpr uint32_t frameLengthLoReg = 0x0341;
+constexpr std::initializer_list<uint32_t> registerList =
+ { expHiReg, expLoReg, gainHiReg, gainLoReg, frameLengthHiReg, frameLengthLoReg };
class CamHelperImx477 : public CamHelper
{
@@ -37,11 +43,30 @@ public:
CamHelperImx477();
uint32_t GainCode(double gain) const override;
double Gain(uint32_t gain_code) const override;
+ void Prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata) override;
+ uint32_t GetVBlanking(Duration &exposure, Duration minFrameDuration,
+ Duration maxFrameDuration) const override;
+ void GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const override;
bool SensorEmbeddedDataPresent() const override;
+
+private:
+ /*
+ * Smallest difference between the frame length and integration time,
+ * in units of lines.
+ */
+ static constexpr int frameIntegrationDiff = 22;
+ /* Maximum frame length allowable for long exposure calculations. */
+ static constexpr int frameLengthMax = 0xffdc;
+ /* Largest long exposure scale factor given as a left shift on the frame length. */
+ static constexpr int longExposureShiftMax = 7;
+
+ void PopulateMetadata(const MdParser::RegisterMap &registers,
+ Metadata &metadata) const override;
};
CamHelperImx477::CamHelperImx477()
- : CamHelper(new MdParserImx477())
+ : CamHelper(std::make_unique<MdParserSmia>(registerList), frameIntegrationDiff)
{
}
@@ -55,101 +80,104 @@ double CamHelperImx477::Gain(uint32_t gain_code) const
return 1024.0 / (1024 - gain_code);
}
-bool CamHelperImx477::SensorEmbeddedDataPresent() const
-{
- return true;
-}
-
-static CamHelper *Create()
+void CamHelperImx477::Prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata)
{
- return new CamHelperImx477();
-}
-
-static RegisterCamHelper reg("imx477", &Create);
-
-/*
- * We care about two gain registers and a pair of exposure registers. Their
- * I2C addresses from the Sony IMX477 datasheet:
- */
-#define EXPHI_REG 0x0202
-#define EXPLO_REG 0x0203
-#define GAINHI_REG 0x0204
-#define GAINLO_REG 0x0205
+ MdParser::RegisterMap registers;
+ DeviceStatus deviceStatus;
-/*
- * Index of each into the reg_offsets and reg_values arrays. Must be in register
- * address order.
- */
-#define EXPHI_INDEX 0
-#define EXPLO_INDEX 1
-#define GAINHI_INDEX 2
-#define GAINLO_INDEX 3
+ if (metadata.Get("device.status", deviceStatus)) {
+ LOG(IPARPI, Error) << "DeviceStatus not found from DelayedControls";
+ return;
+ }
-MdParserImx477::MdParserImx477()
-{
- reg_offsets_[0] = reg_offsets_[1] = reg_offsets_[2] = reg_offsets_[3] = -1;
+ parseEmbeddedData(buffer, metadata);
+
+ /*
+ * The DeviceStatus struct is first populated with values obtained from
+ * DelayedControls. If this reports frame length is > frameLengthMax,
+ * it means we are using a long exposure mode. Since the long exposure
+ * scale factor is not returned back through embedded data, we must rely
+ * on the existing exposure lines and frame length values returned by
+ * DelayedControls.
+ *
+ * Otherwise, all values are updated with what is reported in the
+ * embedded data.
+ */
+ if (deviceStatus.frame_length > frameLengthMax) {
+ DeviceStatus parsedDeviceStatus;
+
+ metadata.Get("device.status", parsedDeviceStatus);
+ parsedDeviceStatus.shutter_speed = deviceStatus.shutter_speed;
+ parsedDeviceStatus.frame_length = deviceStatus.frame_length;
+ metadata.Set("device.status", parsedDeviceStatus);
+
+ LOG(IPARPI, Debug) << "Metadata updated for long exposure: "
+ << parsedDeviceStatus;
+ }
}
-MdParser::Status MdParserImx477::Parse(void *data)
+uint32_t CamHelperImx477::GetVBlanking(Duration &exposure,
+ Duration minFrameDuration,
+ Duration maxFrameDuration) const
{
- bool try_again = false;
-
- if (reset_) {
- /*
- * Search again through the metadata for the gain and exposure
- * registers.
- */
- assert(bits_per_pixel_);
- assert(num_lines_ || buffer_size_bytes_);
- /* Need to be ordered */
- uint32_t regs[4] = {
- EXPHI_REG,
- EXPLO_REG,
- GAINHI_REG,
- GAINLO_REG
- };
- reg_offsets_[0] = reg_offsets_[1] = reg_offsets_[2] = reg_offsets_[3] = -1;
- int ret = static_cast<int>(findRegs(static_cast<uint8_t *>(data),
- regs, reg_offsets_, 4));
- /*
- * > 0 means "worked partially but parse again next time",
- * < 0 means "hard error".
- */
- if (ret > 0)
- try_again = true;
- else if (ret < 0)
- return ERROR;
+ uint32_t frameLength, exposureLines;
+ unsigned int shift = 0;
+
+ frameLength = mode_.height + CamHelper::GetVBlanking(exposure, minFrameDuration,
+ maxFrameDuration);
+ /*
+ * Check if the frame length calculated needs to be setup for long
+ * exposure mode. This will require us to use a long exposure scale
+ * factor provided by a shift operation in the sensor.
+ */
+ while (frameLength > frameLengthMax) {
+ if (++shift > longExposureShiftMax) {
+ shift = longExposureShiftMax;
+ frameLength = frameLengthMax;
+ break;
+ }
+ frameLength >>= 1;
}
- for (int i = 0; i < 4; i++) {
- if (reg_offsets_[i] == -1)
- continue;
-
- reg_values_[i] = static_cast<uint8_t *>(data)[reg_offsets_[i]];
+ if (shift) {
+ /* Account for any rounding in the scaled frame length value. */
+ frameLength <<= shift;
+ exposureLines = ExposureLines(exposure);
+ exposureLines = std::min(exposureLines, frameLength - frameIntegrationDiff);
+ exposure = Exposure(exposureLines);
}
- /* Re-parse next time if we were unhappy in some way. */
- reset_ = try_again;
-
- return OK;
+ return frameLength - mode_.height;
}
-MdParser::Status MdParserImx477::GetExposureLines(unsigned int &lines)
+void CamHelperImx477::GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const
{
- if (reg_offsets_[EXPHI_INDEX] == -1 || reg_offsets_[EXPLO_INDEX] == -1)
- return NOTFOUND;
-
- lines = reg_values_[EXPHI_INDEX] * 256 + reg_values_[EXPLO_INDEX];
+ exposure_delay = 2;
+ gain_delay = 2;
+ vblank_delay = 3;
+}
- return OK;
+bool CamHelperImx477::SensorEmbeddedDataPresent() const
+{
+ return true;
}
-MdParser::Status MdParserImx477::GetGainCode(unsigned int &gain_code)
+void CamHelperImx477::PopulateMetadata(const MdParser::RegisterMap &registers,
+ Metadata &metadata) const
{
- if (reg_offsets_[GAINHI_INDEX] == -1 || reg_offsets_[GAINLO_INDEX] == -1)
- return NOTFOUND;
+ DeviceStatus deviceStatus;
- gain_code = reg_values_[GAINHI_INDEX] * 256 + reg_values_[GAINLO_INDEX];
+ deviceStatus.shutter_speed = Exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
+ deviceStatus.analogue_gain = Gain(registers.at(gainHiReg) * 256 + registers.at(gainLoReg));
+ deviceStatus.frame_length = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
- return OK;
+ metadata.Set("device.status", deviceStatus);
}
+
+static CamHelper *Create()
+{
+ return new CamHelperImx477();
+}
+
+static RegisterCamHelper reg("imx477", &Create);
diff --git a/src/ipa/raspberrypi/cam_helper_ov5647.cpp b/src/ipa/raspberrypi/cam_helper_ov5647.cpp
index dc5d8275..702c2d07 100644
--- a/src/ipa/raspberrypi/cam_helper_ov5647.cpp
+++ b/src/ipa/raspberrypi/cam_helper_ov5647.cpp
@@ -8,7 +8,6 @@
#include <assert.h>
#include "cam_helper.hpp"
-#include "md_parser_rpi.hpp"
using namespace RPiController;
@@ -18,10 +17,19 @@ public:
CamHelperOv5647();
uint32_t GainCode(double gain) const override;
double Gain(uint32_t gain_code) const override;
- void GetDelays(int &exposure_delay, int &gain_delay) const override;
+ void GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const override;
+ unsigned int HideFramesStartup() const override;
unsigned int HideFramesModeSwitch() const override;
unsigned int MistrustFramesStartup() const override;
unsigned int MistrustFramesModeSwitch() const override;
+
+private:
+ /*
+ * Smallest difference between the frame length and integration time,
+ * in units of lines.
+ */
+ static constexpr int frameIntegrationDiff = 4;
};
/*
@@ -30,7 +38,7 @@ public:
*/
CamHelperOv5647::CamHelperOv5647()
- : CamHelper(new MdParserRPi())
+ : CamHelper({}, frameIntegrationDiff)
{
}
@@ -44,7 +52,8 @@ double CamHelperOv5647::Gain(uint32_t gain_code) const
return static_cast<double>(gain_code) / 16.0;
}
-void CamHelperOv5647::GetDelays(int &exposure_delay, int &gain_delay) const
+void CamHelperOv5647::GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const
{
/*
* We run this sensor in a mode where the gain delay is bumped up to
@@ -52,6 +61,16 @@ void CamHelperOv5647::GetDelays(int &exposure_delay, int &gain_delay) const
*/
exposure_delay = 2;
gain_delay = 2;
+ vblank_delay = 2;
+}
+
+unsigned int CamHelperOv5647::HideFramesStartup() const
+{
+ /*
+ * On startup, we get a couple of under-exposed frames which
+ * we don't want shown.
+ */
+ return 2;
}
unsigned int CamHelperOv5647::HideFramesModeSwitch() const
diff --git a/src/ipa/raspberrypi/cam_helper_ov9281.cpp b/src/ipa/raspberrypi/cam_helper_ov9281.cpp
new file mode 100644
index 00000000..9de868c3
--- /dev/null
+++ b/src/ipa/raspberrypi/cam_helper_ov9281.cpp
@@ -0,0 +1,65 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2021, Raspberry Pi (Trading) Limited
+ *
+ * cam_helper_ov9281.cpp - camera information for ov9281 sensor
+ */
+
+#include <assert.h>
+
+#include "cam_helper.hpp"
+
+using namespace RPiController;
+
+class CamHelperOv9281 : public CamHelper
+{
+public:
+ CamHelperOv9281();
+ uint32_t GainCode(double gain) const override;
+ double Gain(uint32_t gain_code) const override;
+ void GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const override;
+
+private:
+ /*
+ * Smallest difference between the frame length and integration time,
+ * in units of lines.
+ */
+ static constexpr int frameIntegrationDiff = 4;
+};
+
+/*
+ * OV9281 doesn't output metadata, so we have to use the "unicam parser" which
+ * works by counting frames.
+ */
+
+CamHelperOv9281::CamHelperOv9281()
+ : CamHelper({}, frameIntegrationDiff)
+{
+}
+
+uint32_t CamHelperOv9281::GainCode(double gain) const
+{
+ return static_cast<uint32_t>(gain * 16.0);
+}
+
+double CamHelperOv9281::Gain(uint32_t gain_code) const
+{
+ return static_cast<double>(gain_code) / 16.0;
+}
+
+void CamHelperOv9281::GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const
+{
+ /* The driver appears to behave as follows: */
+ exposure_delay = 2;
+ gain_delay = 2;
+ vblank_delay = 2;
+}
+
+static CamHelper *Create()
+{
+ return new CamHelperOv9281();
+}
+
+static RegisterCamHelper reg("ov9281", &Create);
diff --git a/src/ipa/raspberrypi/controller/agc_algorithm.hpp b/src/ipa/raspberrypi/controller/agc_algorithm.hpp
index b4ea54fb..61595ea2 100644
--- a/src/ipa/raspberrypi/controller/agc_algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/agc_algorithm.hpp
@@ -6,6 +6,8 @@
*/
#pragma once
+#include <libcamera/base/utils.h>
+
#include "algorithm.hpp"
namespace RPiController {
@@ -15,9 +17,11 @@ class AgcAlgorithm : public Algorithm
public:
AgcAlgorithm(Controller *controller) : Algorithm(controller) {}
// An AGC algorithm must provide the following:
+ virtual unsigned int GetConvergenceFrames() const = 0;
virtual void SetEv(double ev) = 0;
- virtual void SetFlickerPeriod(double flicker_period) = 0;
- virtual void SetFixedShutter(double fixed_shutter) = 0; // microseconds
+ virtual void SetFlickerPeriod(libcamera::utils::Duration flicker_period) = 0;
+ virtual void SetFixedShutter(libcamera::utils::Duration fixed_shutter) = 0;
+ virtual void SetMaxShutter(libcamera::utils::Duration max_shutter) = 0;
virtual void SetFixedAnalogueGain(double fixed_analogue_gain) = 0;
virtual void SetMeteringMode(std::string const &metering_mode_name) = 0;
virtual void SetExposureMode(std::string const &exposure_mode_name) = 0;
diff --git a/src/ipa/raspberrypi/controller/agc_status.h b/src/ipa/raspberrypi/controller/agc_status.h
index 10381c90..20cb1b62 100644
--- a/src/ipa/raspberrypi/controller/agc_status.h
+++ b/src/ipa/raspberrypi/controller/agc_status.h
@@ -6,6 +6,8 @@
*/
#pragma once
+#include <libcamera/base/utils.h>
+
// The AGC algorithm should post the following structure into the image's
// "agc.status" metadata.
@@ -18,17 +20,17 @@ extern "C" {
// ignored until then.
struct AgcStatus {
- double total_exposure_value; // value for all exposure and gain for this image
- double target_exposure_value; // (unfiltered) target total exposure AGC is aiming for
- double shutter_time;
+ libcamera::utils::Duration total_exposure_value; // value for all exposure and gain for this image
+ libcamera::utils::Duration target_exposure_value; // (unfiltered) target total exposure AGC is aiming for
+ libcamera::utils::Duration shutter_time;
double analogue_gain;
char exposure_mode[32];
char constraint_mode[32];
char metering_mode[32];
double ev;
- double flicker_period;
+ libcamera::utils::Duration flicker_period;
int floating_region_enable;
- double fixed_shutter;
+ libcamera::utils::Duration fixed_shutter;
double fixed_analogue_gain;
double digital_gain;
int locked;
diff --git a/src/ipa/raspberrypi/controller/algorithm.hpp b/src/ipa/raspberrypi/controller/algorithm.hpp
index 6196b2f9..5123c87b 100644
--- a/src/ipa/raspberrypi/controller/algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/algorithm.hpp
@@ -12,9 +12,7 @@
#include <string>
#include <memory>
#include <map>
-#include <atomic>
-#include "logging.hpp"
#include "controller.hpp"
#include <boost/property_tree/ptree.hpp>
@@ -30,7 +28,7 @@ public:
: controller_(controller), paused_(false)
{
}
- virtual ~Algorithm() {}
+ virtual ~Algorithm() = default;
virtual char const *Name() const = 0;
virtual bool IsPaused() const { return paused_; }
virtual void Pause() { paused_ = true; }
@@ -47,7 +45,7 @@ public:
private:
Controller *controller_;
- std::atomic<bool> paused_;
+ bool paused_;
};
// This code is for automatic registration of Front End algorithms with the
diff --git a/src/ipa/raspberrypi/controller/awb_algorithm.hpp b/src/ipa/raspberrypi/controller/awb_algorithm.hpp
index 5be0c9f4..96f88afc 100644
--- a/src/ipa/raspberrypi/controller/awb_algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/awb_algorithm.hpp
@@ -15,6 +15,7 @@ class AwbAlgorithm : public Algorithm
public:
AwbAlgorithm(Controller *controller) : Algorithm(controller) {}
// An AWB algorithm must provide the following:
+ virtual unsigned int GetConvergenceFrames() const = 0;
virtual void SetMode(std::string const &mode_name) = 0;
virtual void SetManualGains(double manual_r, double manual_b) = 0;
};
diff --git a/src/ipa/raspberrypi/controller/camera_mode.h b/src/ipa/raspberrypi/controller/camera_mode.h
index 920f11be..65888230 100644
--- a/src/ipa/raspberrypi/controller/camera_mode.h
+++ b/src/ipa/raspberrypi/controller/camera_mode.h
@@ -8,6 +8,8 @@
#include <libcamera/transform.h>
+#include <libcamera/base/utils.h>
+
// Description of a "camera mode", holding enough information for control
// algorithms to adapt their behaviour to the different modes of the camera,
// including binning, scaling, cropping etc.
@@ -33,10 +35,12 @@ struct CameraMode {
double scale_x, scale_y;
// scaling of the noise compared to the native sensor mode
double noise_factor;
- // line time in nanoseconds
- double line_length;
+ // line time
+ libcamera::utils::Duration line_length;
// any camera transform *not* reflected already in the camera tuning
libcamera::Transform transform;
+ // minimum and maximum fame lengths in units of lines
+ uint32_t min_frame_length, max_frame_length;
};
#ifdef __cplusplus
diff --git a/src/ipa/raspberrypi/controller/controller.cpp b/src/ipa/raspberrypi/controller/controller.cpp
index 22461cc4..d3433ad2 100644
--- a/src/ipa/raspberrypi/controller/controller.cpp
+++ b/src/ipa/raspberrypi/controller/controller.cpp
@@ -5,6 +5,8 @@
* controller.cpp - ISP controller
*/
+#include <libcamera/base/log.h>
+
#include "algorithm.hpp"
#include "controller.hpp"
@@ -12,6 +14,9 @@
#include <boost/property_tree/ptree.hpp>
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiController)
Controller::Controller()
: switch_mode_called_(false) {}
@@ -27,7 +32,6 @@ Controller::~Controller() {}
void Controller::Read(char const *filename)
{
- RPI_LOG("Controller starting");
boost::property_tree::ptree root;
boost::property_tree::read_json(filename, root);
for (auto const &key_and_value : root) {
@@ -36,10 +40,9 @@ void Controller::Read(char const *filename)
algo->Read(key_and_value.second);
algorithms_.push_back(AlgorithmPtr(algo));
} else
- RPI_LOG("WARNING: No algorithm found for \""
- << key_and_value.first << "\"");
+ LOG(RPiController, Warning)
+ << "No algorithm found for \"" << key_and_value.first << "\"";
}
- RPI_LOG("Controller finished");
}
Algorithm *Controller::CreateAlgorithm(char const *name)
@@ -50,39 +53,31 @@ Algorithm *Controller::CreateAlgorithm(char const *name)
void Controller::Initialise()
{
- RPI_LOG("Controller starting");
for (auto &algo : algorithms_)
algo->Initialise();
- RPI_LOG("Controller finished");
}
void Controller::SwitchMode(CameraMode const &camera_mode, Metadata *metadata)
{
- RPI_LOG("Controller starting");
for (auto &algo : algorithms_)
algo->SwitchMode(camera_mode, metadata);
switch_mode_called_ = true;
- RPI_LOG("Controller finished");
}
void Controller::Prepare(Metadata *image_metadata)
{
- RPI_LOG("Controller::Prepare starting");
assert(switch_mode_called_);
for (auto &algo : algorithms_)
if (!algo->IsPaused())
algo->Prepare(image_metadata);
- RPI_LOG("Controller::Prepare finished");
}
void Controller::Process(StatisticsPtr stats, Metadata *image_metadata)
{
- RPI_LOG("Controller::Process starting");
assert(switch_mode_called_);
for (auto &algo : algorithms_)
if (!algo->IsPaused())
algo->Process(stats, image_metadata);
- RPI_LOG("Controller::Process finished");
}
Metadata &Controller::GetGlobalMetadata()
diff --git a/src/ipa/raspberrypi/controller/denoise_algorithm.hpp b/src/ipa/raspberrypi/controller/denoise_algorithm.hpp
new file mode 100644
index 00000000..39fcd7e9
--- /dev/null
+++ b/src/ipa/raspberrypi/controller/denoise_algorithm.hpp
@@ -0,0 +1,23 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2021, Raspberry Pi (Trading) Limited
+ *
+ * denoise.hpp - Denoise control algorithm interface
+ */
+#pragma once
+
+#include "algorithm.hpp"
+
+namespace RPiController {
+
+enum class DenoiseMode { Off, ColourOff, ColourFast, ColourHighQuality };
+
+class DenoiseAlgorithm : public Algorithm
+{
+public:
+ DenoiseAlgorithm(Controller *controller) : Algorithm(controller) {}
+ // A Denoise algorithm must provide the following:
+ virtual void SetMode(DenoiseMode mode) = 0;
+};
+
+} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/denoise_status.h b/src/ipa/raspberrypi/controller/denoise_status.h
new file mode 100644
index 00000000..67a3c361
--- /dev/null
+++ b/src/ipa/raspberrypi/controller/denoise_status.h
@@ -0,0 +1,24 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
+ *
+ * denoise_status.h - Denoise control algorithm status
+ */
+#pragma once
+
+// This stores the parameters required for Denoise.
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+struct DenoiseStatus {
+ double noise_constant;
+ double noise_slope;
+ double strength;
+ unsigned int mode;
+};
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/src/ipa/raspberrypi/controller/device_status.cpp b/src/ipa/raspberrypi/controller/device_status.cpp
new file mode 100644
index 00000000..f052ea8b
--- /dev/null
+++ b/src/ipa/raspberrypi/controller/device_status.cpp
@@ -0,0 +1,21 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2021, Raspberry Pi (Trading) Limited
+ *
+ * device_status.cpp - device (image sensor) status
+ */
+#include "device_status.h"
+
+using namespace libcamera; /* for the Duration operator<< overload */
+
+std::ostream &operator<<(std::ostream &out, const DeviceStatus &d)
+{
+ out << "Exposure: " << d.shutter_speed
+ << " Frame length: " << d.frame_length
+ << " Gain: " << d.analogue_gain
+ << " Aperture: " << d.aperture
+ << " Lens: " << d.lens_position
+ << " Flash: " << d.flash_intensity;
+
+ return out;
+}
diff --git a/src/ipa/raspberrypi/controller/device_status.h b/src/ipa/raspberrypi/controller/device_status.h
index aa08608b..c4a5d9c8 100644
--- a/src/ipa/raspberrypi/controller/device_status.h
+++ b/src/ipa/raspberrypi/controller/device_status.h
@@ -1,30 +1,39 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
*
* device_status.h - device (image sensor) status
*/
#pragma once
-// Definition of "device metadata" which stores things like shutter time and
-// analogue gain that downstream control algorithms will want to know.
+#include <iostream>
-#ifdef __cplusplus
-extern "C" {
-#endif
+#include <libcamera/base/utils.h>
+
+/*
+ * Definition of "device metadata" which stores things like shutter time and
+ * analogue gain that downstream control algorithms will want to know.
+ */
struct DeviceStatus {
- // time shutter is open, in microseconds
- double shutter_speed;
+ DeviceStatus()
+ : shutter_speed(std::chrono::seconds(0)), frame_length(0),
+ analogue_gain(0.0), lens_position(0.0), aperture(0.0),
+ flash_intensity(0.0)
+ {
+ }
+
+ friend std::ostream &operator<<(std::ostream &out, const DeviceStatus &d);
+
+ /* time shutter is open */
+ libcamera::utils::Duration shutter_speed;
+ /* frame length given in number of lines */
+ uint32_t frame_length;
double analogue_gain;
- // 1.0/distance-in-metres, or 0 if unknown
+ /* 1.0/distance-in-metres, or 0 if unknown */
double lens_position;
- // 1/f so that brightness quadruples when this doubles, or 0 if unknown
+ /* 1/f so that brightness quadruples when this doubles, or 0 if unknown */
double aperture;
- // proportional to brightness with 0 = no flash, 1 = maximum flash
+ /* proportional to brightness with 0 = no flash, 1 = maximum flash */
double flash_intensity;
};
-
-#ifdef __cplusplus
-}
-#endif
diff --git a/src/ipa/raspberrypi/controller/logging.hpp b/src/ipa/raspberrypi/controller/logging.hpp
deleted file mode 100644
index f0d306b6..00000000
--- a/src/ipa/raspberrypi/controller/logging.hpp
+++ /dev/null
@@ -1,30 +0,0 @@
-/* SPDX-License-Identifier: BSD-2-Clause */
-/*
- * Copyright (C) 2019-2020, Raspberry Pi (Trading) Limited
- *
- * logging.hpp - logging macros
- */
-#pragma once
-
-#include <iostream>
-
-#ifndef RPI_LOGGING_ENABLE
-#define RPI_LOGGING_ENABLE 0
-#endif
-
-#ifndef RPI_WARNING_ENABLE
-#define RPI_WARNING_ENABLE 1
-#endif
-
-#define RPI_LOG(stuff) \
- do { \
- if (RPI_LOGGING_ENABLE) \
- std::cout << __FUNCTION__ << ": " << stuff << "\n"; \
- } while (0)
-
-#define RPI_WARN(stuff) \
- do { \
- if (RPI_WARNING_ENABLE) \
- std::cout << __FUNCTION__ << " ***WARNING*** " \
- << stuff << "\n"; \
- } while (0)
diff --git a/src/ipa/raspberrypi/controller/metadata.hpp b/src/ipa/raspberrypi/controller/metadata.hpp
index f3a8dfab..fd6aac88 100644
--- a/src/ipa/raspberrypi/controller/metadata.hpp
+++ b/src/ipa/raspberrypi/controller/metadata.hpp
@@ -1,6 +1,6 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
*
* metadata.hpp - general metadata class
*/
@@ -8,68 +8,104 @@
// A simple class for carrying arbitrary metadata, for example about an image.
-#include <string>
-#include <mutex>
+#include <any>
#include <map>
#include <memory>
-
-#include <boost/any.hpp>
+#include <mutex>
+#include <string>
namespace RPiController {
class Metadata
{
public:
- template<typename T> void Set(std::string const &tag, T const &value)
+ Metadata() = default;
+
+ Metadata(Metadata const &other)
{
- std::lock_guard<std::mutex> lock(mutex_);
+ std::scoped_lock other_lock(other.mutex_);
+ data_ = other.data_;
+ }
+
+ Metadata(Metadata &&other)
+ {
+ std::scoped_lock other_lock(other.mutex_);
+ data_ = std::move(other.data_);
+ other.data_.clear();
+ }
+
+ template<typename T>
+ void Set(std::string const &tag, T const &value)
+ {
+ std::scoped_lock lock(mutex_);
data_[tag] = value;
}
- template<typename T> int Get(std::string const &tag, T &value) const
+
+ template<typename T>
+ int Get(std::string const &tag, T &value) const
{
- std::lock_guard<std::mutex> lock(mutex_);
+ std::scoped_lock lock(mutex_);
auto it = data_.find(tag);
if (it == data_.end())
return -1;
- value = boost::any_cast<T>(it->second);
+ value = std::any_cast<T>(it->second);
return 0;
}
+
void Clear()
{
- std::lock_guard<std::mutex> lock(mutex_);
+ std::scoped_lock lock(mutex_);
data_.clear();
}
+
Metadata &operator=(Metadata const &other)
{
- std::lock_guard<std::mutex> lock(mutex_);
- std::lock_guard<std::mutex> other_lock(other.mutex_);
+ std::scoped_lock lock(mutex_, other.mutex_);
data_ = other.data_;
return *this;
}
- template<typename T> T *GetLocked(std::string const &tag)
+
+ Metadata &operator=(Metadata &&other)
+ {
+ std::scoped_lock lock(mutex_, other.mutex_);
+ data_ = std::move(other.data_);
+ other.data_.clear();
+ return *this;
+ }
+
+ void Merge(Metadata &other)
+ {
+ std::scoped_lock lock(mutex_, other.mutex_);
+ data_.merge(other.data_);
+ }
+
+ template<typename T>
+ T *GetLocked(std::string const &tag)
{
// This allows in-place access to the Metadata contents,
// for which you should be holding the lock.
auto it = data_.find(tag);
if (it == data_.end())
return nullptr;
- return boost::any_cast<T>(&it->second);
+ return std::any_cast<T>(&it->second);
}
+
template<typename T>
void SetLocked(std::string const &tag, T const &value)
{
// Use this only if you're holding the lock yourself.
data_[tag] = value;
}
+
// Note: use of (lowercase) lock and unlock means you can create scoped
// locks with the standard lock classes.
- // e.g. std::lock_guard<PisP::Metadata> lock(metadata)
+ // e.g. std::lock_guard<RPiController::Metadata> lock(metadata)
void lock() { mutex_.lock(); }
void unlock() { mutex_.unlock(); }
private:
mutable std::mutex mutex_;
- std::map<std::string, boost::any> data_;
+ std::map<std::string, std::any> data_;
};
typedef std::shared_ptr<Metadata> MetadataPtr;
diff --git a/src/ipa/raspberrypi/controller/pwl.cpp b/src/ipa/raspberrypi/controller/pwl.cpp
index aa134a1f..130c820b 100644
--- a/src/ipa/raspberrypi/controller/pwl.cpp
+++ b/src/ipa/raspberrypi/controller/pwl.cpp
@@ -114,6 +114,36 @@ Pwl::PerpType Pwl::Invert(Point const &xy, Point &perp, int &span,
return PerpType::None;
}
+Pwl Pwl::Inverse(bool *true_inverse, const double eps) const
+{
+ bool appended = false, prepended = false, neither = false;
+ Pwl inverse;
+
+ for (Point const &p : points_) {
+ if (inverse.Empty())
+ inverse.Append(p.y, p.x, eps);
+ else if (std::abs(inverse.points_.back().x - p.y) <= eps ||
+ std::abs(inverse.points_.front().x - p.y) <= eps)
+ /* do nothing */;
+ else if (p.y > inverse.points_.back().x) {
+ inverse.Append(p.y, p.x, eps);
+ appended = true;
+ } else if (p.y < inverse.points_.front().x) {
+ inverse.Prepend(p.y, p.x, eps);
+ prepended = true;
+ } else
+ neither = true;
+ }
+
+ // This is not a proper inverse if we found ourselves putting points
+ // onto both ends of the inverse, or if there were points that couldn't
+ // go on either.
+ if (true_inverse)
+ *true_inverse = !(neither || (appended && prepended));
+
+ return inverse;
+}
+
Pwl Pwl::Compose(Pwl const &other, const double eps) const
{
double this_x = points_[0].x, this_y = points_[0].y;
diff --git a/src/ipa/raspberrypi/controller/pwl.hpp b/src/ipa/raspberrypi/controller/pwl.hpp
index 4f168551..484672f6 100644
--- a/src/ipa/raspberrypi/controller/pwl.hpp
+++ b/src/ipa/raspberrypi/controller/pwl.hpp
@@ -80,6 +80,9 @@ public:
};
PerpType Invert(Point const &xy, Point &perp, int &span,
const double eps = 1e-6) const;
+ // Compute the inverse function. Indicate if it is a proper (true)
+ // inverse, or only a best effort (e.g. input was non-monotonic).
+ Pwl Inverse(bool *true_inverse = nullptr, const double eps = 1e-6) const;
// Compose two Pwls together, doing "this" first and "other" after.
Pwl Compose(Pwl const &other, const double eps = 1e-6) const;
// Apply function to (x,y) values at every control point.
diff --git a/src/ipa/raspberrypi/controller/rpi/agc.cpp b/src/ipa/raspberrypi/controller/rpi/agc.cpp
index df4d3647..f57783f8 100644
--- a/src/ipa/raspberrypi/controller/rpi/agc.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/agc.cpp
@@ -9,16 +9,21 @@
#include "linux/bcm2835-isp.h"
+#include <libcamera/base/log.h>
+
#include "../awb_status.h"
#include "../device_status.h"
#include "../histogram.hpp"
-#include "../logging.hpp"
#include "../lux_status.h"
#include "../metadata.hpp"
#include "agc.hpp"
using namespace RPiController;
+using namespace libcamera;
+using libcamera::utils::Duration;
+
+LOG_DEFINE_CATEGORY(RPiAgc)
#define NAME "rpi.agc"
@@ -51,19 +56,26 @@ read_metering_modes(std::map<std::string, AgcMeteringMode> &metering_modes,
return first;
}
-static int read_double_list(std::vector<double> &list,
- boost::property_tree::ptree const &params)
+static int read_list(std::vector<double> &list,
+ boost::property_tree::ptree const &params)
{
for (auto &p : params)
list.push_back(p.second.get_value<double>());
return list.size();
}
+static int read_list(std::vector<Duration> &list,
+ boost::property_tree::ptree const &params)
+{
+ for (auto &p : params)
+ list.push_back(p.second.get_value<double>() * 1us);
+ return list.size();
+}
+
void AgcExposureMode::Read(boost::property_tree::ptree const &params)
{
- int num_shutters =
- read_double_list(shutter, params.get_child("shutter"));
- int num_ags = read_double_list(gain, params.get_child("gain"));
+ int num_shutters = read_list(shutter, params.get_child("shutter"));
+ int num_ags = read_list(gain, params.get_child("gain"));
if (num_shutters < 2 || num_ags < 2)
throw std::runtime_error(
"AgcConfig: must have at least two entries in exposure profile");
@@ -128,7 +140,7 @@ static std::string read_constraint_modes(
void AgcConfig::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG("AgcConfig");
+ LOG(RPiAgc, Debug) << "AgcConfig";
default_metering_mode = read_metering_modes(
metering_modes, params.get_child("metering_modes"));
default_exposure_mode = read_exposure_modes(
@@ -138,25 +150,28 @@ void AgcConfig::Read(boost::property_tree::ptree const &params)
Y_target.Read(params.get_child("y_target"));
speed = params.get<double>("speed", 0.2);
startup_frames = params.get<uint16_t>("startup_frames", 10);
+ convergence_frames = params.get<unsigned int>("convergence_frames", 6);
fast_reduce_threshold =
params.get<double>("fast_reduce_threshold", 0.4);
base_ev = params.get<double>("base_ev", 1.0);
+ // Start with quite a low value as ramping up is easier than ramping down.
+ default_exposure_time = params.get<double>("default_exposure_time", 1000) * 1us;
+ default_analogue_gain = params.get<double>("default_analogue_gain", 1.0);
}
Agc::Agc(Controller *controller)
: AgcAlgorithm(controller), metering_mode_(nullptr),
exposure_mode_(nullptr), constraint_mode_(nullptr),
- frame_count_(0), lock_count_(0)
+ frame_count_(0), lock_count_(0),
+ last_target_exposure_(0s),
+ ev_(1.0), flicker_period_(0s),
+ max_shutter_(0s), fixed_shutter_(0s), fixed_analogue_gain_(0.0)
{
- ev_ = status_.ev = 1.0;
- flicker_period_ = status_.flicker_period = 0.0;
- fixed_shutter_ = status_.fixed_shutter = 0;
- fixed_analogue_gain_ = status_.fixed_analogue_gain = 0.0;
- // set to zero initially, so we can tell it's not been calculated
- status_.total_exposure_value = 0.0;
- status_.target_exposure_value = 0.0;
- status_.locked = false;
- output_status_ = status_;
+ memset(&awb_, 0, sizeof(awb_));
+ // Setting status_.total_exposure_value_ to zero initially tells us
+ // it's not been calculated yet (i.e. Process hasn't yet run).
+ memset(&status_, 0, sizeof(status_));
+ status_.ev = ev_;
}
char const *Agc::Name() const
@@ -166,7 +181,7 @@ char const *Agc::Name() const
void Agc::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG("Agc");
+ LOG(RPiAgc, Debug) << "Agc";
config_.Read(params);
// Set the config's defaults (which are the first ones it read) as our
// current modes, until someone changes them. (they're all known to
@@ -177,122 +192,155 @@ void Agc::Read(boost::property_tree::ptree const &params)
exposure_mode_ = &config_.exposure_modes[exposure_mode_name_];
constraint_mode_name_ = config_.default_constraint_mode;
constraint_mode_ = &config_.constraint_modes[constraint_mode_name_];
+ // Set up the "last shutter/gain" values, in case AGC starts "disabled".
+ status_.shutter_time = config_.default_exposure_time;
+ status_.analogue_gain = config_.default_analogue_gain;
+}
+
+bool Agc::IsPaused() const
+{
+ return false;
+}
+
+void Agc::Pause()
+{
+ fixed_shutter_ = status_.shutter_time;
+ fixed_analogue_gain_ = status_.analogue_gain;
+}
+
+void Agc::Resume()
+{
+ fixed_shutter_ = 0s;
+ fixed_analogue_gain_ = 0;
+}
+
+unsigned int Agc::GetConvergenceFrames() const
+{
+ // If shutter and gain have been explicitly set, there is no
+ // convergence to happen, so no need to drop any frames - return zero.
+ if (fixed_shutter_ && fixed_analogue_gain_)
+ return 0;
+ else
+ return config_.convergence_frames;
}
void Agc::SetEv(double ev)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
ev_ = ev;
}
-void Agc::SetFlickerPeriod(double flicker_period)
+void Agc::SetFlickerPeriod(Duration flicker_period)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
flicker_period_ = flicker_period;
}
-void Agc::SetFixedShutter(double fixed_shutter)
+void Agc::SetMaxShutter(Duration max_shutter)
+{
+ max_shutter_ = max_shutter;
+}
+
+void Agc::SetFixedShutter(Duration fixed_shutter)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
fixed_shutter_ = fixed_shutter;
+ // Set this in case someone calls Pause() straight after.
+ status_.shutter_time = clipShutter(fixed_shutter_);
}
void Agc::SetFixedAnalogueGain(double fixed_analogue_gain)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
fixed_analogue_gain_ = fixed_analogue_gain;
+ // Set this in case someone calls Pause() straight after.
+ status_.analogue_gain = fixed_analogue_gain;
}
void Agc::SetMeteringMode(std::string const &metering_mode_name)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
metering_mode_name_ = metering_mode_name;
}
void Agc::SetExposureMode(std::string const &exposure_mode_name)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
exposure_mode_name_ = exposure_mode_name;
}
void Agc::SetConstraintMode(std::string const &constraint_mode_name)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
constraint_mode_name_ = constraint_mode_name;
}
void Agc::SwitchMode([[maybe_unused]] CameraMode const &camera_mode,
Metadata *metadata)
{
- // On a mode switch, it's possible the exposure profile could change,
- // so we run through the dividing up of exposure/gain again and
- // write the results into the metadata we've been given.
- if (status_.total_exposure_value) {
- housekeepConfig();
- divvyupExposure();
- writeAndFinish(metadata, false);
+ housekeepConfig();
+
+ Duration fixed_shutter = clipShutter(fixed_shutter_);
+ if (fixed_shutter && fixed_analogue_gain_) {
+ // We're going to reset the algorithm here with these fixed values.
+
+ fetchAwbStatus(metadata);
+ double min_colour_gain = std::min({ awb_.gain_r, awb_.gain_g, awb_.gain_b, 1.0 });
+ ASSERT(min_colour_gain != 0.0);
+
+ // This is the equivalent of computeTargetExposure and applyDigitalGain.
+ target_.total_exposure_no_dg = fixed_shutter * fixed_analogue_gain_;
+ target_.total_exposure = target_.total_exposure_no_dg / min_colour_gain;
+
+ // Equivalent of filterExposure. This resets any "history".
+ filtered_ = target_;
+
+ // Equivalent of divideUpExposure.
+ filtered_.shutter = fixed_shutter;
+ filtered_.analogue_gain = fixed_analogue_gain_;
+ } else if (status_.total_exposure_value) {
+ // On a mode switch, it's possible the exposure profile could change,
+ // or a fixed exposure/gain might be set so we divide up the exposure/
+ // gain again, but we don't change any target values.
+ divideUpExposure();
+ } else {
+ // We come through here on startup, when at least one of the shutter
+ // or gain has not been fixed. We must still write those values out so
+ // that they will be applied immediately. We supply some arbitrary defaults
+ // for any that weren't set.
+
+ // Equivalent of divideUpExposure.
+ filtered_.shutter = fixed_shutter ? fixed_shutter : config_.default_exposure_time;
+ filtered_.analogue_gain = fixed_analogue_gain_ ? fixed_analogue_gain_ : config_.default_analogue_gain;
}
+
+ writeAndFinish(metadata, false);
}
void Agc::Prepare(Metadata *image_metadata)
{
- AgcStatus status;
- {
- std::unique_lock<std::mutex> lock(output_mutex_);
- status = output_status_;
- }
- int lock_count = lock_count_;
- lock_count_ = 0;
- status.digital_gain = 1.0;
+ status_.digital_gain = 1.0;
+ fetchAwbStatus(image_metadata); // always fetch it so that Process knows it's been done
+
if (status_.total_exposure_value) {
// Process has run, so we have meaningful values.
DeviceStatus device_status;
if (image_metadata->Get("device.status", device_status) == 0) {
- double actual_exposure = device_status.shutter_speed *
- device_status.analogue_gain;
+ Duration actual_exposure = device_status.shutter_speed *
+ device_status.analogue_gain;
if (actual_exposure) {
- status.digital_gain =
+ status_.digital_gain =
status_.total_exposure_value /
actual_exposure;
- RPI_LOG("Want total exposure " << status_.total_exposure_value);
+ LOG(RPiAgc, Debug) << "Want total exposure " << status_.total_exposure_value;
// Never ask for a gain < 1.0, and also impose
// some upper limit. Make it customisable?
- status.digital_gain = std::max(
+ status_.digital_gain = std::max(
1.0,
- std::min(status.digital_gain, 4.0));
- RPI_LOG("Actual exposure " << actual_exposure);
- RPI_LOG("Use digital_gain " << status.digital_gain);
- RPI_LOG("Effective exposure " << actual_exposure * status.digital_gain);
+ std::min(status_.digital_gain, 4.0));
+ LOG(RPiAgc, Debug) << "Actual exposure " << actual_exposure;
+ LOG(RPiAgc, Debug) << "Use digital_gain " << status_.digital_gain;
+ LOG(RPiAgc, Debug) << "Effective exposure "
+ << actual_exposure * status_.digital_gain;
// Decide whether AEC/AGC has converged.
- // Insist AGC is steady for MAX_LOCK_COUNT
- // frames before we say we are "locked".
- // (The hard-coded constants may need to
- // become customisable.)
- if (status.target_exposure_value) {
-#define MAX_LOCK_COUNT 3
- double err = 0.10 * status.target_exposure_value + 200;
- if (actual_exposure <
- status.target_exposure_value + err
- && actual_exposure >
- status.target_exposure_value - err)
- lock_count_ =
- std::min(lock_count + 1,
- MAX_LOCK_COUNT);
- else if (actual_exposure <
- status.target_exposure_value
- + 1.5 * err &&
- actual_exposure >
- status.target_exposure_value
- - 1.5 * err)
- lock_count_ = lock_count;
- RPI_LOG("Lock count: " << lock_count_);
- }
+ updateLockStatus(device_status);
}
} else
- RPI_LOG(Name() << ": no device metadata");
- status.locked = lock_count_ >= MAX_LOCK_COUNT;
- //printf("%s\n", status.locked ? "+++++++++" : "-");
- image_metadata->Set("agc.status", status);
+ LOG(RPiAgc, Warning) << Name() << ": no device metadata";
+ image_metadata->Set("agc.status", status_);
}
}
@@ -312,16 +360,53 @@ void Agc::Process(StatisticsPtr &stats, Metadata *image_metadata)
// Some of the exposure has to be applied as digital gain, so work out
// what that is. This function also tells us whether it's decided to
// "desaturate" the image more quickly.
- bool desaturate = applyDigitalGain(image_metadata, gain, target_Y);
+ bool desaturate = applyDigitalGain(gain, target_Y);
// The results have to be filtered so as not to change too rapidly.
filterExposure(desaturate);
- // The last thing is to divvy up the exposure value into a shutter time
+ // The last thing is to divide up the exposure value into a shutter time
// and analogue_gain, according to the current exposure mode.
- divvyupExposure();
+ divideUpExposure();
// Finally advertise what we've done.
writeAndFinish(image_metadata, desaturate);
}
+void Agc::updateLockStatus(DeviceStatus const &device_status)
+{
+ const double ERROR_FACTOR = 0.10; // make these customisable?
+ const int MAX_LOCK_COUNT = 5;
+ // Reset "lock count" when we exceed this multiple of ERROR_FACTOR
+ const double RESET_MARGIN = 1.5;
+
+ // Add 200us to the exposure time error to allow for line quantisation.
+ Duration exposure_error = last_device_status_.shutter_speed * ERROR_FACTOR + 200us;
+ double gain_error = last_device_status_.analogue_gain * ERROR_FACTOR;
+ Duration target_error = last_target_exposure_ * ERROR_FACTOR;
+
+ // Note that we don't know the exposure/gain limits of the sensor, so
+ // the values we keep requesting may be unachievable. For this reason
+ // we only insist that we're close to values in the past few frames.
+ if (device_status.shutter_speed > last_device_status_.shutter_speed - exposure_error &&
+ device_status.shutter_speed < last_device_status_.shutter_speed + exposure_error &&
+ device_status.analogue_gain > last_device_status_.analogue_gain - gain_error &&
+ device_status.analogue_gain < last_device_status_.analogue_gain + gain_error &&
+ status_.target_exposure_value > last_target_exposure_ - target_error &&
+ status_.target_exposure_value < last_target_exposure_ + target_error)
+ lock_count_ = std::min(lock_count_ + 1, MAX_LOCK_COUNT);
+ else if (device_status.shutter_speed < last_device_status_.shutter_speed - RESET_MARGIN * exposure_error ||
+ device_status.shutter_speed > last_device_status_.shutter_speed + RESET_MARGIN * exposure_error ||
+ device_status.analogue_gain < last_device_status_.analogue_gain - RESET_MARGIN * gain_error ||
+ device_status.analogue_gain > last_device_status_.analogue_gain + RESET_MARGIN * gain_error ||
+ status_.target_exposure_value < last_target_exposure_ - RESET_MARGIN * target_error ||
+ status_.target_exposure_value > last_target_exposure_ + RESET_MARGIN * target_error)
+ lock_count_ = 0;
+
+ last_device_status_ = device_status;
+ last_target_exposure_ = status_.target_exposure_value;
+
+ LOG(RPiAgc, Debug) << "Lock count updated to " << lock_count_;
+ status_.locked = lock_count_ == MAX_LOCK_COUNT;
+}
+
static void copy_string(std::string const &s, char *d, size_t size)
{
size_t length = s.copy(d, size - 1);
@@ -331,55 +416,47 @@ static void copy_string(std::string const &s, char *d, size_t size)
void Agc::housekeepConfig()
{
// First fetch all the up-to-date settings, so no one else has to do it.
- std::string new_exposure_mode_name, new_constraint_mode_name,
- new_metering_mode_name;
- {
- std::unique_lock<std::mutex> lock(settings_mutex_);
- new_metering_mode_name = metering_mode_name_;
- new_exposure_mode_name = exposure_mode_name_;
- new_constraint_mode_name = constraint_mode_name_;
- status_.ev = ev_;
- status_.fixed_shutter = fixed_shutter_;
- status_.fixed_analogue_gain = fixed_analogue_gain_;
- status_.flicker_period = flicker_period_;
- }
- RPI_LOG("ev " << status_.ev << " fixed_shutter "
- << status_.fixed_shutter << " fixed_analogue_gain "
- << status_.fixed_analogue_gain);
+ status_.ev = ev_;
+ status_.fixed_shutter = clipShutter(fixed_shutter_);
+ status_.fixed_analogue_gain = fixed_analogue_gain_;
+ status_.flicker_period = flicker_period_;
+ LOG(RPiAgc, Debug) << "ev " << status_.ev << " fixed_shutter "
+ << status_.fixed_shutter << " fixed_analogue_gain "
+ << status_.fixed_analogue_gain;
// Make sure the "mode" pointers point to the up-to-date things, if
// they've changed.
- if (strcmp(new_metering_mode_name.c_str(), status_.metering_mode)) {
- auto it = config_.metering_modes.find(new_metering_mode_name);
+ if (strcmp(metering_mode_name_.c_str(), status_.metering_mode)) {
+ auto it = config_.metering_modes.find(metering_mode_name_);
if (it == config_.metering_modes.end())
throw std::runtime_error("Agc: no metering mode " +
- new_metering_mode_name);
+ metering_mode_name_);
metering_mode_ = &it->second;
- copy_string(new_metering_mode_name, status_.metering_mode,
+ copy_string(metering_mode_name_, status_.metering_mode,
sizeof(status_.metering_mode));
}
- if (strcmp(new_exposure_mode_name.c_str(), status_.exposure_mode)) {
- auto it = config_.exposure_modes.find(new_exposure_mode_name);
+ if (strcmp(exposure_mode_name_.c_str(), status_.exposure_mode)) {
+ auto it = config_.exposure_modes.find(exposure_mode_name_);
if (it == config_.exposure_modes.end())
throw std::runtime_error("Agc: no exposure profile " +
- new_exposure_mode_name);
+ exposure_mode_name_);
exposure_mode_ = &it->second;
- copy_string(new_exposure_mode_name, status_.exposure_mode,
+ copy_string(exposure_mode_name_, status_.exposure_mode,
sizeof(status_.exposure_mode));
}
- if (strcmp(new_constraint_mode_name.c_str(), status_.constraint_mode)) {
+ if (strcmp(constraint_mode_name_.c_str(), status_.constraint_mode)) {
auto it =
- config_.constraint_modes.find(new_constraint_mode_name);
+ config_.constraint_modes.find(constraint_mode_name_);
if (it == config_.constraint_modes.end())
throw std::runtime_error("Agc: no constraint list " +
- new_constraint_mode_name);
+ constraint_mode_name_);
constraint_mode_ = &it->second;
- copy_string(new_constraint_mode_name, status_.constraint_mode,
+ copy_string(constraint_mode_name_, status_.constraint_mode,
sizeof(status_.constraint_mode));
}
- RPI_LOG("exposure_mode "
- << new_exposure_mode_name << " constraint_mode "
- << new_constraint_mode_name << " metering_mode "
- << new_metering_mode_name);
+ LOG(RPiAgc, Debug) << "exposure_mode "
+ << exposure_mode_name_ << " constraint_mode "
+ << constraint_mode_name_ << " metering_mode "
+ << metering_mode_name_;
}
void Agc::fetchCurrentExposure(Metadata *image_metadata)
@@ -393,30 +470,44 @@ void Agc::fetchCurrentExposure(Metadata *image_metadata)
current_.analogue_gain = device_status->analogue_gain;
AgcStatus *agc_status =
image_metadata->GetLocked<AgcStatus>("agc.status");
- current_.total_exposure = agc_status ? agc_status->total_exposure_value : 0;
+ current_.total_exposure = agc_status ? agc_status->total_exposure_value : 0s;
current_.total_exposure_no_dg = current_.shutter * current_.analogue_gain;
}
-static double compute_initial_Y(bcm2835_isp_stats *stats, Metadata *image_metadata,
- double weights[])
+void Agc::fetchAwbStatus(Metadata *image_metadata)
+{
+ awb_.gain_r = 1.0; // in case not found in metadata
+ awb_.gain_g = 1.0;
+ awb_.gain_b = 1.0;
+ if (image_metadata->Get("awb.status", awb_) != 0)
+ LOG(RPiAgc, Debug) << "Agc: no AWB status found";
+}
+
+static double compute_initial_Y(bcm2835_isp_stats *stats, AwbStatus const &awb,
+ double weights[], double gain)
{
bcm2835_isp_stats_region *regions = stats->agc_stats;
- struct AwbStatus awb;
- awb.gain_r = awb.gain_g = awb.gain_b = 1.0; // in case no metadata
- if (image_metadata->Get("awb.status", awb) != 0)
- RPI_WARN("Agc: no AWB status found");
- double Y_sum = 0, weight_sum = 0;
+ // Note how the calculation below means that equal weights give you
+ // "average" metering (i.e. all pixels equally important).
+ double R_sum = 0, G_sum = 0, B_sum = 0, pixel_sum = 0;
for (int i = 0; i < AGC_STATS_SIZE; i++) {
- if (regions[i].counted == 0)
- continue;
- weight_sum += weights[i];
- double Y = regions[i].r_sum * awb.gain_r * .299 +
- regions[i].g_sum * awb.gain_g * .587 +
- regions[i].b_sum * awb.gain_b * .114;
- Y /= regions[i].counted;
- Y_sum += Y * weights[i];
+ double counted = regions[i].counted;
+ double r_sum = std::min(regions[i].r_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
+ double g_sum = std::min(regions[i].g_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
+ double b_sum = std::min(regions[i].b_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
+ R_sum += r_sum * weights[i];
+ G_sum += g_sum * weights[i];
+ B_sum += b_sum * weights[i];
+ pixel_sum += counted * weights[i];
}
- return Y_sum / weight_sum / (1 << PIPELINE_BITS);
+ if (pixel_sum == 0.0) {
+ LOG(RPiAgc, Warning) << "compute_initial_Y: pixel_sum is zero";
+ return 0;
+ }
+ double Y_sum = R_sum * awb.gain_r * .299 +
+ G_sum * awb.gain_g * .587 +
+ B_sum * awb.gain_b * .114;
+ return Y_sum / pixel_sum / (1 << PIPELINE_BITS);
}
// We handle extra gain through EV by adjusting our Y targets. However, you
@@ -443,7 +534,7 @@ void Agc::computeGain(bcm2835_isp_stats *statistics, Metadata *image_metadata,
struct LuxStatus lux = {};
lux.lux = 400; // default lux level to 400 in case no metadata found
if (image_metadata->Get("lux.status", lux) != 0)
- RPI_WARN("Agc: no lux level found");
+ LOG(RPiAgc, Warning) << "Agc: no lux level found";
Histogram h(statistics->hist[0].g_hist, NUM_HISTOGRAM_BINS);
double ev_gain = status_.ev * config_.base_ev;
// The initial gain and target_Y come from some of the regions. After
@@ -451,67 +542,84 @@ void Agc::computeGain(bcm2835_isp_stats *statistics, Metadata *image_metadata,
target_Y =
config_.Y_target.Eval(config_.Y_target.Domain().Clip(lux.lux));
target_Y = std::min(EV_GAIN_Y_TARGET_LIMIT, target_Y * ev_gain);
- double initial_Y = compute_initial_Y(statistics, image_metadata,
- metering_mode_->weights);
- gain = std::min(10.0, target_Y / (initial_Y + .001));
- RPI_LOG("Initially Y " << initial_Y << " target " << target_Y
- << " gives gain " << gain);
+
+ // Do this calculation a few times as brightness increase can be
+ // non-linear when there are saturated regions.
+ gain = 1.0;
+ for (int i = 0; i < 8; i++) {
+ double initial_Y = compute_initial_Y(statistics, awb_,
+ metering_mode_->weights, gain);
+ double extra_gain = std::min(10.0, target_Y / (initial_Y + .001));
+ gain *= extra_gain;
+ LOG(RPiAgc, Debug) << "Initial Y " << initial_Y << " target " << target_Y
+ << " gives gain " << gain;
+ if (extra_gain < 1.01) // close enough
+ break;
+ }
+
for (auto &c : *constraint_mode_) {
double new_target_Y;
double new_gain =
constraint_compute_gain(c, h, lux.lux, ev_gain,
new_target_Y);
- RPI_LOG("Constraint has target_Y "
- << new_target_Y << " giving gain " << new_gain);
+ LOG(RPiAgc, Debug) << "Constraint has target_Y "
+ << new_target_Y << " giving gain " << new_gain;
if (c.bound == AgcConstraint::Bound::LOWER &&
new_gain > gain) {
- RPI_LOG("Lower bound constraint adopted");
+ LOG(RPiAgc, Debug) << "Lower bound constraint adopted";
gain = new_gain, target_Y = new_target_Y;
} else if (c.bound == AgcConstraint::Bound::UPPER &&
new_gain < gain) {
- RPI_LOG("Upper bound constraint adopted");
+ LOG(RPiAgc, Debug) << "Upper bound constraint adopted";
gain = new_gain, target_Y = new_target_Y;
}
}
- RPI_LOG("Final gain " << gain << " (target_Y " << target_Y << " ev "
- << status_.ev << " base_ev " << config_.base_ev
- << ")");
+ LOG(RPiAgc, Debug) << "Final gain " << gain << " (target_Y " << target_Y << " ev "
+ << status_.ev << " base_ev " << config_.base_ev
+ << ")";
}
void Agc::computeTargetExposure(double gain)
{
- // The statistics reflect the image without digital gain, so the final
- // total exposure we're aiming for is:
- target_.total_exposure = current_.total_exposure_no_dg * gain;
- // The final target exposure is also limited to what the exposure
- // mode allows.
- double max_total_exposure =
- (status_.fixed_shutter != 0.0
- ? status_.fixed_shutter
- : exposure_mode_->shutter.back()) *
- (status_.fixed_analogue_gain != 0.0
- ? status_.fixed_analogue_gain
- : exposure_mode_->gain.back());
- target_.total_exposure = std::min(target_.total_exposure,
- max_total_exposure);
- RPI_LOG("Target total_exposure " << target_.total_exposure);
-}
-
-bool Agc::applyDigitalGain(Metadata *image_metadata, double gain,
- double target_Y)
-{
- double dg = 1.0;
+ if (status_.fixed_shutter && status_.fixed_analogue_gain) {
+ // When ag and shutter are both fixed, we need to drive the
+ // total exposure so that we end up with a digital gain of at least
+ // 1/min_colour_gain. Otherwise we'd desaturate channels causing
+ // white to go cyan or magenta.
+ double min_colour_gain = std::min({ awb_.gain_r, awb_.gain_g, awb_.gain_b, 1.0 });
+ ASSERT(min_colour_gain != 0.0);
+ target_.total_exposure =
+ status_.fixed_shutter * status_.fixed_analogue_gain / min_colour_gain;
+ } else {
+ // The statistics reflect the image without digital gain, so the final
+ // total exposure we're aiming for is:
+ target_.total_exposure = current_.total_exposure_no_dg * gain;
+ // The final target exposure is also limited to what the exposure
+ // mode allows.
+ Duration max_shutter = status_.fixed_shutter
+ ? status_.fixed_shutter
+ : exposure_mode_->shutter.back();
+ max_shutter = clipShutter(max_shutter);
+ Duration max_total_exposure =
+ max_shutter *
+ (status_.fixed_analogue_gain != 0.0
+ ? status_.fixed_analogue_gain
+ : exposure_mode_->gain.back());
+ target_.total_exposure = std::min(target_.total_exposure,
+ max_total_exposure);
+ }
+ LOG(RPiAgc, Debug) << "Target total_exposure " << target_.total_exposure;
+}
+
+bool Agc::applyDigitalGain(double gain, double target_Y)
+{
+ double min_colour_gain = std::min({ awb_.gain_r, awb_.gain_g, awb_.gain_b, 1.0 });
+ ASSERT(min_colour_gain != 0.0);
+ double dg = 1.0 / min_colour_gain;
// I think this pipeline subtracts black level and rescales before we
// get the stats, so no need to worry about it.
- struct AwbStatus awb;
- if (image_metadata->Get("awb.status", awb) == 0) {
- double min_gain = std::min(awb.gain_r,
- std::min(awb.gain_g, awb.gain_b));
- dg *= std::max(1.0, 1.0 / min_gain);
- } else
- RPI_WARN("Agc: no AWB status found");
- RPI_LOG("after AWB, target dg " << dg << " gain " << gain
- << " target_Y " << target_Y);
+ LOG(RPiAgc, Debug) << "after AWB, target dg " << dg << " gain " << gain
+ << " target_Y " << target_Y;
// Finally, if we're trying to reduce exposure but the target_Y is
// "close" to 1.0, then the gain computed for that constraint will be
// only slightly less than one, because the measured Y can never be
@@ -523,16 +631,21 @@ bool Agc::applyDigitalGain(Metadata *image_metadata, double gain,
gain < sqrt(target_Y);
if (desaturate)
dg /= config_.fast_reduce_threshold;
- RPI_LOG("Digital gain " << dg << " desaturate? " << desaturate);
+ LOG(RPiAgc, Debug) << "Digital gain " << dg << " desaturate? " << desaturate;
target_.total_exposure_no_dg = target_.total_exposure / dg;
- RPI_LOG("Target total_exposure_no_dg " << target_.total_exposure_no_dg);
+ LOG(RPiAgc, Debug) << "Target total_exposure_no_dg " << target_.total_exposure_no_dg;
return desaturate;
}
void Agc::filterExposure(bool desaturate)
{
- double speed = frame_count_ <= config_.startup_frames ? 1.0 : config_.speed;
- if (filtered_.total_exposure == 0.0) {
+ double speed = config_.speed;
+ // AGC adapts instantly if both shutter and gain are directly specified
+ // or we're in the startup phase.
+ if ((status_.fixed_shutter && status_.fixed_analogue_gain) ||
+ frame_count_ <= config_.startup_frames)
+ speed = 1.0;
+ if (!filtered_.total_exposure) {
filtered_.total_exposure = target_.total_exposure;
filtered_.total_exposure_no_dg = target_.total_exposure_no_dg;
} else {
@@ -560,35 +673,38 @@ void Agc::filterExposure(bool desaturate)
filtered_.total_exposure * config_.fast_reduce_threshold)
filtered_.total_exposure_no_dg = filtered_.total_exposure *
config_.fast_reduce_threshold;
- RPI_LOG("After filtering, total_exposure " << filtered_.total_exposure <<
- " no dg " << filtered_.total_exposure_no_dg);
+ LOG(RPiAgc, Debug) << "After filtering, total_exposure " << filtered_.total_exposure
+ << " no dg " << filtered_.total_exposure_no_dg;
}
-void Agc::divvyupExposure()
+void Agc::divideUpExposure()
{
// Sending the fixed shutter/gain cases through the same code may seem
// unnecessary, but it will make more sense when extend this to cover
// variable aperture.
- double exposure_value = filtered_.total_exposure_no_dg;
- double shutter_time, analogue_gain;
- shutter_time = status_.fixed_shutter != 0.0
+ Duration exposure_value = filtered_.total_exposure_no_dg;
+ Duration shutter_time;
+ double analogue_gain;
+ shutter_time = status_.fixed_shutter
? status_.fixed_shutter
: exposure_mode_->shutter[0];
+ shutter_time = clipShutter(shutter_time);
analogue_gain = status_.fixed_analogue_gain != 0.0
? status_.fixed_analogue_gain
: exposure_mode_->gain[0];
if (shutter_time * analogue_gain < exposure_value) {
for (unsigned int stage = 1;
stage < exposure_mode_->gain.size(); stage++) {
- if (status_.fixed_shutter == 0.0) {
- if (exposure_mode_->shutter[stage] *
- analogue_gain >=
+ if (!status_.fixed_shutter) {
+ Duration stage_shutter =
+ clipShutter(exposure_mode_->shutter[stage]);
+ if (stage_shutter * analogue_gain >=
exposure_value) {
shutter_time =
exposure_value / analogue_gain;
break;
}
- shutter_time = exposure_mode_->shutter[stage];
+ shutter_time = stage_shutter;
}
if (status_.fixed_analogue_gain == 0.0) {
if (exposure_mode_->gain[stage] *
@@ -602,16 +718,15 @@ void Agc::divvyupExposure()
}
}
}
- RPI_LOG("Divided up shutter and gain are " << shutter_time << " and "
- << analogue_gain);
+ LOG(RPiAgc, Debug) << "Divided up shutter and gain are " << shutter_time << " and "
+ << analogue_gain;
// Finally adjust shutter time for flicker avoidance (require both
// shutter and gain not to be fixed).
- if (status_.fixed_shutter == 0.0 &&
- status_.fixed_analogue_gain == 0.0 &&
- status_.flicker_period != 0.0) {
+ if (!status_.fixed_shutter && !status_.fixed_analogue_gain &&
+ status_.flicker_period) {
int flicker_periods = shutter_time / status_.flicker_period;
- if (flicker_periods > 0) {
- double new_shutter_time = flicker_periods * status_.flicker_period;
+ if (flicker_periods) {
+ Duration new_shutter_time = flicker_periods * status_.flicker_period;
analogue_gain *= shutter_time / new_shutter_time;
// We should still not allow the ag to go over the
// largest value in the exposure mode. Note that this
@@ -621,8 +736,8 @@ void Agc::divvyupExposure()
exposure_mode_->gain.back());
shutter_time = new_shutter_time;
}
- RPI_LOG("After flicker avoidance, shutter "
- << shutter_time << " gain " << analogue_gain);
+ LOG(RPiAgc, Debug) << "After flicker avoidance, shutter "
+ << shutter_time << " gain " << analogue_gain;
}
filtered_.shutter = shutter_time;
filtered_.analogue_gain = analogue_gain;
@@ -631,20 +746,23 @@ void Agc::divvyupExposure()
void Agc::writeAndFinish(Metadata *image_metadata, bool desaturate)
{
status_.total_exposure_value = filtered_.total_exposure;
- status_.target_exposure_value = desaturate ? 0 : target_.total_exposure_no_dg;
+ status_.target_exposure_value = desaturate ? 0s : target_.total_exposure_no_dg;
status_.shutter_time = filtered_.shutter;
status_.analogue_gain = filtered_.analogue_gain;
- {
- std::unique_lock<std::mutex> lock(output_mutex_);
- output_status_ = status_;
- }
// Write to metadata as well, in case anyone wants to update the camera
// immediately.
image_metadata->Set("agc.status", status_);
- RPI_LOG("Output written, total exposure requested is "
- << filtered_.total_exposure);
- RPI_LOG("Camera exposure update: shutter time " << filtered_.shutter <<
- " analogue gain " << filtered_.analogue_gain);
+ LOG(RPiAgc, Debug) << "Output written, total exposure requested is "
+ << filtered_.total_exposure;
+ LOG(RPiAgc, Debug) << "Camera exposure update: shutter time " << filtered_.shutter
+ << " analogue gain " << filtered_.analogue_gain;
+}
+
+Duration Agc::clipShutter(Duration shutter)
+{
+ if (max_shutter_)
+ shutter = std::min(shutter, max_shutter_);
+ return shutter;
}
// Register algorithm with the system.
diff --git a/src/ipa/raspberrypi/controller/rpi/agc.hpp b/src/ipa/raspberrypi/controller/rpi/agc.hpp
index ba7ae092..85067dc6 100644
--- a/src/ipa/raspberrypi/controller/rpi/agc.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/agc.hpp
@@ -9,6 +9,8 @@
#include <vector>
#include <mutex>
+#include <libcamera/base/utils.h>
+
#include "../agc_algorithm.hpp"
#include "../agc_status.h"
#include "../pwl.hpp"
@@ -22,13 +24,15 @@
namespace RPiController {
+using namespace std::literals::chrono_literals;
+
struct AgcMeteringMode {
double weights[AGC_STATS_SIZE];
void Read(boost::property_tree::ptree const &params);
};
struct AgcExposureMode {
- std::vector<double> shutter;
+ std::vector<libcamera::utils::Duration> shutter;
std::vector<double> gain;
void Read(boost::property_tree::ptree const &params);
};
@@ -52,6 +56,7 @@ struct AgcConfig {
Pwl Y_target;
double speed;
uint16_t startup_frames;
+ unsigned int convergence_frames;
double max_change;
double min_change;
double fast_reduce_threshold;
@@ -60,6 +65,8 @@ struct AgcConfig {
std::string default_exposure_mode;
std::string default_constraint_mode;
double base_ev;
+ libcamera::utils::Duration default_exposure_time;
+ double default_analogue_gain;
};
class Agc : public AgcAlgorithm
@@ -68,9 +75,15 @@ public:
Agc(Controller *controller);
char const *Name() const override;
void Read(boost::property_tree::ptree const &params) override;
+ // AGC handles "pausing" for itself.
+ bool IsPaused() const override;
+ void Pause() override;
+ void Resume() override;
+ unsigned int GetConvergenceFrames() const override;
void SetEv(double ev) override;
- void SetFlickerPeriod(double flicker_period) override;
- void SetFixedShutter(double fixed_shutter) override; // microseconds
+ void SetFlickerPeriod(libcamera::utils::Duration flicker_period) override;
+ void SetMaxShutter(libcamera::utils::Duration max_shutter) override;
+ void SetFixedShutter(libcamera::utils::Duration fixed_shutter) override;
void SetFixedAnalogueGain(double fixed_analogue_gain) override;
void SetMeteringMode(std::string const &metering_mode_name) override;
void SetExposureMode(std::string const &exposure_mode_name) override;
@@ -80,44 +93,47 @@ public:
void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
private:
+ void updateLockStatus(DeviceStatus const &device_status);
AgcConfig config_;
void housekeepConfig();
void fetchCurrentExposure(Metadata *image_metadata);
+ void fetchAwbStatus(Metadata *image_metadata);
void computeGain(bcm2835_isp_stats *statistics, Metadata *image_metadata,
double &gain, double &target_Y);
void computeTargetExposure(double gain);
- bool applyDigitalGain(Metadata *image_metadata, double gain,
- double target_Y);
+ bool applyDigitalGain(double gain, double target_Y);
void filterExposure(bool desaturate);
- void divvyupExposure();
+ void divideUpExposure();
void writeAndFinish(Metadata *image_metadata, bool desaturate);
+ libcamera::utils::Duration clipShutter(libcamera::utils::Duration shutter);
AgcMeteringMode *metering_mode_;
AgcExposureMode *exposure_mode_;
AgcConstraintMode *constraint_mode_;
uint64_t frame_count_;
+ AwbStatus awb_;
struct ExposureValues {
- ExposureValues() : shutter(0), analogue_gain(0),
- total_exposure(0), total_exposure_no_dg(0) {}
- double shutter;
+ ExposureValues() : shutter(0s), analogue_gain(0),
+ total_exposure(0s), total_exposure_no_dg(0s) {}
+ libcamera::utils::Duration shutter;
double analogue_gain;
- double total_exposure;
- double total_exposure_no_dg; // without digital gain
+ libcamera::utils::Duration total_exposure;
+ libcamera::utils::Duration total_exposure_no_dg; // without digital gain
};
ExposureValues current_; // values for the current frame
ExposureValues target_; // calculate the values we want here
ExposureValues filtered_; // these values are filtered towards target
- AgcStatus status_; // to "latch" settings so they can't change
- AgcStatus output_status_; // the status we will write out
- std::mutex output_mutex_;
+ AgcStatus status_;
int lock_count_;
+ DeviceStatus last_device_status_;
+ libcamera::utils::Duration last_target_exposure_;
// Below here the "settings" that applications can change.
- std::mutex settings_mutex_;
std::string metering_mode_name_;
std::string exposure_mode_name_;
std::string constraint_mode_name_;
double ev_;
- double flicker_period_;
- double fixed_shutter_;
+ libcamera::utils::Duration flicker_period_;
+ libcamera::utils::Duration max_shutter_;
+ libcamera::utils::Duration fixed_shutter_;
double fixed_analogue_gain_;
};
diff --git a/src/ipa/raspberrypi/controller/rpi/alsc.cpp b/src/ipa/raspberrypi/controller/rpi/alsc.cpp
index 42fbc8a4..be3d1ae4 100644
--- a/src/ipa/raspberrypi/controller/rpi/alsc.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/alsc.cpp
@@ -6,12 +6,17 @@
*/
#include <math.h>
+#include <libcamera/base/log.h>
+
#include "../awb_status.h"
#include "alsc.hpp"
// Raspberry Pi ALSC (Auto Lens Shading Correction) algorithm.
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiAlsc)
#define NAME "rpi.alsc"
@@ -110,15 +115,14 @@ static void read_calibrations(std::vector<AlscCalibration> &calibrations,
"Alsc: too few values for ct " +
std::to_string(ct) + " in " + name);
calibrations.push_back(calibration);
- RPI_LOG("Read " << name << " calibration for ct "
- << ct);
+ LOG(RPiAlsc, Debug)
+ << "Read " << name << " calibration for ct " << ct;
}
}
}
void Alsc::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG("Alsc");
config_.frame_period = params.get<uint16_t>("frame_period", 12);
config_.startup_frames = params.get<uint16_t>("startup_frames", 10);
config_.speed = params.get<double>("speed", 0.05);
@@ -139,13 +143,15 @@ void Alsc::Read(boost::property_tree::ptree const &params)
read_lut(config_.luminance_lut,
params.get_child("luminance_lut"));
else
- RPI_WARN("Alsc: no luminance table - assume unity everywhere");
+ LOG(RPiAlsc, Warning)
+ << "no luminance table - assume unity everywhere";
read_calibrations(config_.calibrations_Cr, params, "calibrations_Cr");
read_calibrations(config_.calibrations_Cb, params, "calibrations_Cb");
config_.default_ct = params.get<double>("default_ct", 4500.0);
config_.threshold = params.get<double>("threshold", 1e-3);
}
+static double get_ct(Metadata *metadata, double default_ct);
static void get_cal_table(double ct,
std::vector<AlscCalibration> const &calibrations,
double cal_table[XY]);
@@ -163,7 +169,6 @@ static void add_luminance_to_tables(double results[3][Y][X],
void Alsc::Initialise()
{
- RPI_LOG("Alsc");
frame_count2_ = frame_count_ = frame_phase_ = 0;
first_time_ = true;
ct_ = config_.default_ct;
@@ -210,6 +215,9 @@ void Alsc::SwitchMode(CameraMode const &camera_mode,
// change.
bool reset_tables = first_time_ || compare_modes(camera_mode_, camera_mode);
+ // Believe the colour temperature from the AWB, if there is one.
+ ct_ = get_ct(metadata, ct_);
+
// Ensure the other thread isn't running while we do this.
waitForAysncThread();
@@ -248,22 +256,22 @@ void Alsc::SwitchMode(CameraMode const &camera_mode,
void Alsc::fetchAsyncResults()
{
- RPI_LOG("Fetch ALSC results");
+ LOG(RPiAlsc, Debug) << "Fetch ALSC results";
async_finished_ = false;
async_started_ = false;
memcpy(sync_results_, async_results_, sizeof(sync_results_));
}
-static double get_ct(Metadata *metadata, double default_ct)
+double get_ct(Metadata *metadata, double default_ct)
{
AwbStatus awb_status;
awb_status.temperature_K = default_ct; // in case nothing found
if (metadata->Get("awb.status", awb_status) != 0)
- RPI_WARN("Alsc: no AWB results found, using "
- << awb_status.temperature_K);
+ LOG(RPiAlsc, Debug) << "no AWB results found, using "
+ << awb_status.temperature_K;
else
- RPI_LOG("Alsc: AWB results found, using "
- << awb_status.temperature_K);
+ LOG(RPiAlsc, Debug) << "AWB results found, using "
+ << awb_status.temperature_K;
return awb_status.temperature_K;
}
@@ -285,7 +293,7 @@ static void copy_stats(bcm2835_isp_stats_region regions[XY], StatisticsPtr &stat
void Alsc::restartAsync(StatisticsPtr &stats, Metadata *image_metadata)
{
- RPI_LOG("Starting ALSC thread");
+ LOG(RPiAlsc, Debug) << "Starting ALSC calculation";
// Get the current colour temperature. It's all we need from the
// metadata. Default to the last CT value (which could be the default).
ct_ = get_ct(image_metadata, ct_);
@@ -293,7 +301,8 @@ void Alsc::restartAsync(StatisticsPtr &stats, Metadata *image_metadata)
// the LSC table that the pipeline applied to them.
AlscStatus alsc_status;
if (image_metadata->Get("alsc.status", alsc_status) != 0) {
- RPI_WARN("No ALSC status found for applied gains!");
+ LOG(RPiAlsc, Warning)
+ << "No ALSC status found for applied gains!";
for (int y = 0; y < Y; y++)
for (int x = 0; x < X; x++) {
alsc_status.r[y][x] = 1.0;
@@ -320,13 +329,12 @@ void Alsc::Prepare(Metadata *image_metadata)
double speed = frame_count_ < (int)config_.startup_frames
? 1.0
: config_.speed;
- RPI_LOG("Alsc: frame_count " << frame_count_ << " speed " << speed);
+ LOG(RPiAlsc, Debug)
+ << "frame_count " << frame_count_ << " speed " << speed;
{
std::unique_lock<std::mutex> lock(mutex_);
- if (async_started_ && async_finished_) {
- RPI_LOG("ALSC thread finished");
+ if (async_started_ && async_finished_)
fetchAsyncResults();
- }
}
// Apply IIR filter to results and program into the pipeline.
double *ptr = (double *)sync_results_,
@@ -350,13 +358,11 @@ void Alsc::Process(StatisticsPtr &stats, Metadata *image_metadata)
frame_phase_++;
if (frame_count2_ < (int)config_.startup_frames)
frame_count2_++;
- RPI_LOG("Alsc: frame_phase " << frame_phase_);
+ LOG(RPiAlsc, Debug) << "frame_phase " << frame_phase_;
if (frame_phase_ >= (int)config_.frame_period ||
frame_count2_ < (int)config_.startup_frames) {
- if (async_started_ == false) {
- RPI_LOG("ALSC thread starting");
+ if (async_started_ == false)
restartAsync(stats, image_metadata);
- }
}
}
@@ -387,25 +393,26 @@ void get_cal_table(double ct, std::vector<AlscCalibration> const &calibrations,
if (calibrations.empty()) {
for (int i = 0; i < XY; i++)
cal_table[i] = 1.0;
- RPI_LOG("Alsc: no calibrations found");
+ LOG(RPiAlsc, Debug) << "no calibrations found";
} else if (ct <= calibrations.front().ct) {
memcpy(cal_table, calibrations.front().table,
XY * sizeof(double));
- RPI_LOG("Alsc: using calibration for "
- << calibrations.front().ct);
+ LOG(RPiAlsc, Debug) << "using calibration for "
+ << calibrations.front().ct;
} else if (ct >= calibrations.back().ct) {
memcpy(cal_table, calibrations.back().table,
XY * sizeof(double));
- RPI_LOG("Alsc: using calibration for "
- << calibrations.front().ct);
+ LOG(RPiAlsc, Debug) << "using calibration for "
+ << calibrations.back().ct;
} else {
int idx = 0;
while (ct > calibrations[idx + 1].ct)
idx++;
double ct0 = calibrations[idx].ct,
ct1 = calibrations[idx + 1].ct;
- RPI_LOG("Alsc: ct is " << ct << ", interpolating between "
- << ct0 << " and " << ct1);
+ LOG(RPiAlsc, Debug)
+ << "ct is " << ct << ", interpolating between "
+ << ct0 << " and " << ct1;
for (int i = 0; i < XY; i++)
cal_table[i] =
(calibrations[idx].table[i] * (ct1 - ct) +
@@ -606,9 +613,9 @@ static double gauss_seidel2_SOR(double const M[XY][4], double omega,
double lambda[XY])
{
double old_lambda[XY];
- for (int i = 0; i < XY; i++)
- old_lambda[i] = lambda[i];
int i;
+ for (i = 0; i < XY; i++)
+ old_lambda[i] = lambda[i];
lambda[0] = compute_lambda_bottom_start(0, M, lambda);
for (i = 1; i < X; i++)
lambda[i] = compute_lambda_bottom(i, M, lambda);
@@ -628,7 +635,7 @@ static double gauss_seidel2_SOR(double const M[XY][4], double omega,
lambda[i] = compute_lambda_bottom(i, M, lambda);
lambda[0] = compute_lambda_bottom_start(0, M, lambda);
double max_diff = 0;
- for (int i = 0; i < XY; i++) {
+ for (i = 0; i < XY; i++) {
lambda[i] = old_lambda[i] + (lambda[i] - old_lambda[i]) * omega;
if (fabs(lambda[i] - old_lambda[i]) > fabs(max_diff))
max_diff = lambda[i] - old_lambda[i];
@@ -656,15 +663,16 @@ static void run_matrix_iterations(double const C[XY], double lambda[XY],
for (int i = 0; i < n_iter; i++) {
double max_diff = fabs(gauss_seidel2_SOR(M, omega, lambda));
if (max_diff < threshold) {
- RPI_LOG("Stop after " << i + 1 << " iterations");
+ LOG(RPiAlsc, Debug)
+ << "Stop after " << i + 1 << " iterations";
break;
}
// this happens very occasionally (so make a note), though
// doesn't seem to matter
if (max_diff > last_max_diff)
- RPI_LOG("Iteration " << i << ": max_diff gone up "
- << last_max_diff << " to "
- << max_diff);
+ LOG(RPiAlsc, Debug)
+ << "Iteration " << i << ": max_diff gone up "
+ << last_max_diff << " to " << max_diff;
last_max_diff = max_diff;
}
// We're going to normalise the lambdas so the smallest is 1. Not sure
diff --git a/src/ipa/raspberrypi/controller/rpi/awb.cpp b/src/ipa/raspberrypi/controller/rpi/awb.cpp
index a5536e47..5cfd33a3 100644
--- a/src/ipa/raspberrypi/controller/rpi/awb.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/awb.cpp
@@ -5,19 +5,24 @@
* awb.cpp - AWB control algorithm
*/
-#include "../logging.hpp"
+#include <libcamera/base/log.h>
+
#include "../lux_status.h"
#include "awb.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiAwb)
#define NAME "rpi.awb"
#define AWB_STATS_SIZE_X DEFAULT_AWB_REGIONS_X
#define AWB_STATS_SIZE_Y DEFAULT_AWB_REGIONS_Y
-const double Awb::RGB::INVALID = -1.0;
+// todo - the locking in this algorithm needs some tidying up as has been done
+// elsewhere (ALSC and AGC).
void AwbMode::Read(boost::property_tree::ptree const &params)
{
@@ -55,10 +60,10 @@ static void read_ct_curve(Pwl &ct_r, Pwl &ct_b,
void AwbConfig::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG("AwbConfig");
bayes = params.get<int>("bayes", 1);
frame_period = params.get<uint16_t>("frame_period", 10);
startup_frames = params.get<uint16_t>("startup_frames", 10);
+ convergence_frames = params.get<unsigned int>("convergence_frames", 3);
speed = params.get<double>("speed", 0.05);
if (params.get_child_optional("ct_curve"))
read_ct_curve(ct_r, ct_b, params.get_child("ct_curve"));
@@ -100,8 +105,8 @@ void AwbConfig::Read(boost::property_tree::ptree const &params)
if (bayes) {
if (ct_r.Empty() || ct_b.Empty() || priors.empty() ||
default_mode == nullptr) {
- RPI_WARN(
- "Bayesian AWB mis-configured - switch to Grey method");
+ LOG(RPiAwb, Warning)
+ << "Bayesian AWB mis-configured - switch to Grey method";
bayes = false;
}
}
@@ -120,6 +125,7 @@ Awb::Awb(Controller *controller)
async_abort_ = async_start_ = async_started_ = async_finished_ = false;
mode_ = nullptr;
manual_r_ = manual_b_ = 0.0;
+ first_switch_mode_ = true;
async_thread_ = std::thread(std::bind(&Awb::asyncFunc, this));
}
@@ -128,8 +134,8 @@ Awb::~Awb()
{
std::lock_guard<std::mutex> lock(mutex_);
async_abort_ = true;
- async_signal_.notify_one();
}
+ async_signal_.notify_one();
async_thread_.join();
}
@@ -145,7 +151,7 @@ void Awb::Read(boost::property_tree::ptree const &params)
void Awb::Initialise()
{
- frame_count2_ = frame_count_ = frame_phase_ = 0;
+ frame_count_ = frame_phase_ = 0;
// Put something sane into the status that we are filtering towards,
// just in case the first few frames don't have anything meaningful in
// them.
@@ -163,48 +169,92 @@ void Awb::Initialise()
sync_results_.gain_b = 1.0;
}
prev_sync_results_ = sync_results_;
+ async_results_ = sync_results_;
+}
+
+unsigned int Awb::GetConvergenceFrames() const
+{
+ // If not in auto mode, there is no convergence
+ // to happen, so no need to drop any frames - return zero.
+ if (!isAutoEnabled())
+ return 0;
+ else
+ return config_.convergence_frames;
}
void Awb::SetMode(std::string const &mode_name)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
mode_name_ = mode_name;
}
void Awb::SetManualGains(double manual_r, double manual_b)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
// If any of these are 0.0, we swich back to auto.
manual_r_ = manual_r;
manual_b_ = manual_b;
+ // If not in auto mode, set these values into the sync_results which
+ // means that Prepare() will adopt them immediately.
+ if (!isAutoEnabled()) {
+ sync_results_.gain_r = prev_sync_results_.gain_r = manual_r_;
+ sync_results_.gain_g = prev_sync_results_.gain_g = 1.0;
+ sync_results_.gain_b = prev_sync_results_.gain_b = manual_b_;
+ }
+}
+
+void Awb::SwitchMode([[maybe_unused]] CameraMode const &camera_mode,
+ Metadata *metadata)
+{
+ // On the first mode switch we'll have no meaningful colour
+ // temperature, so try to dead reckon one if in manual mode.
+ if (!isAutoEnabled() && first_switch_mode_ && config_.bayes) {
+ Pwl ct_r_inverse = config_.ct_r.Inverse();
+ Pwl ct_b_inverse = config_.ct_b.Inverse();
+ double ct_r = ct_r_inverse.Eval(ct_r_inverse.Domain().Clip(1 / manual_r_));
+ double ct_b = ct_b_inverse.Eval(ct_b_inverse.Domain().Clip(1 / manual_b_));
+ prev_sync_results_.temperature_K = (ct_r + ct_b) / 2;
+ sync_results_.temperature_K = prev_sync_results_.temperature_K;
+ }
+ // Let other algorithms know the current white balance values.
+ metadata->Set("awb.status", prev_sync_results_);
+ first_switch_mode_ = false;
+}
+
+bool Awb::isAutoEnabled() const
+{
+ return manual_r_ == 0.0 || manual_b_ == 0.0;
}
void Awb::fetchAsyncResults()
{
- RPI_LOG("Fetch AWB results");
+ LOG(RPiAwb, Debug) << "Fetch AWB results";
async_finished_ = false;
async_started_ = false;
- sync_results_ = async_results_;
+ // It's possible manual gains could be set even while the async
+ // thread was running, so only copy the results if still in auto mode.
+ if (isAutoEnabled())
+ sync_results_ = async_results_;
}
-void Awb::restartAsync(StatisticsPtr &stats, std::string const &mode_name,
- double lux)
+void Awb::restartAsync(StatisticsPtr &stats, double lux)
{
- RPI_LOG("Starting AWB thread");
+ LOG(RPiAwb, Debug) << "Starting AWB calculation";
// this makes a new reference which belongs to the asynchronous thread
statistics_ = stats;
// store the mode as it could technically change
- auto m = config_.modes.find(mode_name);
+ auto m = config_.modes.find(mode_name_);
mode_ = m != config_.modes.end()
? &m->second
: (mode_ == nullptr ? config_.default_mode : mode_);
lux_ = lux;
frame_phase_ = 0;
- async_start_ = true;
async_started_ = true;
- size_t len = mode_name.copy(async_results_.mode,
- sizeof(async_results_.mode) - 1);
+ size_t len = mode_name_.copy(async_results_.mode,
+ sizeof(async_results_.mode) - 1);
async_results_.mode[len] = '\0';
+ {
+ std::lock_guard<std::mutex> lock(mutex_);
+ async_start_ = true;
+ }
async_signal_.notify_one();
}
@@ -215,13 +265,12 @@ void Awb::Prepare(Metadata *image_metadata)
double speed = frame_count_ < (int)config_.startup_frames
? 1.0
: config_.speed;
- RPI_LOG("Awb: frame_count " << frame_count_ << " speed " << speed);
+ LOG(RPiAwb, Debug)
+ << "frame_count " << frame_count_ << " speed " << speed;
{
std::unique_lock<std::mutex> lock(mutex_);
- if (async_started_ && async_finished_) {
- RPI_LOG("AWB thread finished");
+ if (async_started_ && async_finished_)
fetchAsyncResults();
- }
}
// Finally apply IIR filter to results and put into metadata.
memcpy(prev_sync_results_.mode, sync_results_.mode,
@@ -236,9 +285,10 @@ void Awb::Prepare(Metadata *image_metadata)
prev_sync_results_.gain_b = speed * sync_results_.gain_b +
(1.0 - speed) * prev_sync_results_.gain_b;
image_metadata->Set("awb.status", prev_sync_results_);
- RPI_LOG("Using AWB gains r " << prev_sync_results_.gain_r << " g "
- << prev_sync_results_.gain_g << " b "
- << prev_sync_results_.gain_b);
+ LOG(RPiAwb, Debug)
+ << "Using AWB gains r " << prev_sync_results_.gain_r << " g "
+ << prev_sync_results_.gain_g << " b "
+ << prev_sync_results_.gain_b;
}
void Awb::Process(StatisticsPtr &stats, Metadata *image_metadata)
@@ -246,28 +296,20 @@ void Awb::Process(StatisticsPtr &stats, Metadata *image_metadata)
// Count frames since we last poked the async thread.
if (frame_phase_ < (int)config_.frame_period)
frame_phase_++;
- if (frame_count2_ < (int)config_.startup_frames)
- frame_count2_++;
- RPI_LOG("Awb: frame_phase " << frame_phase_);
- if (frame_phase_ >= (int)config_.frame_period ||
- frame_count2_ < (int)config_.startup_frames) {
+ LOG(RPiAwb, Debug) << "frame_phase " << frame_phase_;
+ // We do not restart the async thread if we're not in auto mode.
+ if (isAutoEnabled() &&
+ (frame_phase_ >= (int)config_.frame_period ||
+ frame_count_ < (int)config_.startup_frames)) {
// Update any settings and any image metadata that we need.
- std::string mode_name;
- {
- std::unique_lock<std::mutex> lock(settings_mutex_);
- mode_name = mode_name_;
- }
struct LuxStatus lux_status = {};
lux_status.lux = 400; // in case no metadata
if (image_metadata->Get("lux.status", lux_status) != 0)
- RPI_LOG("No lux metadata found");
- RPI_LOG("Awb lux value is " << lux_status.lux);
+ LOG(RPiAwb, Debug) << "No lux metadata found";
+ LOG(RPiAwb, Debug) << "Awb lux value is " << lux_status.lux;
- std::unique_lock<std::mutex> lock(mutex_);
- if (async_started_ == false) {
- RPI_LOG("AWB thread starting");
- restartAsync(stats, mode_name, lux_status.lux);
- }
+ if (async_started_ == false)
+ restartAsync(stats, lux_status.lux);
}
}
@@ -287,8 +329,8 @@ void Awb::asyncFunc()
{
std::lock_guard<std::mutex> lock(mutex_);
async_finished_ = true;
- sync_signal_.notify_one();
}
+ sync_signal_.notify_one();
}
}
@@ -297,16 +339,16 @@ static void generate_stats(std::vector<Awb::RGB> &zones,
double min_G)
{
for (int i = 0; i < AWB_STATS_SIZE_X * AWB_STATS_SIZE_Y; i++) {
- Awb::RGB zone; // this is "invalid", unless R gets overwritten later
+ Awb::RGB zone;
double counted = stats[i].counted;
if (counted >= min_pixels) {
zone.G = stats[i].g_sum / counted;
if (zone.G >= min_G) {
zone.R = stats[i].r_sum / counted;
zone.B = stats[i].b_sum / counted;
+ zones.push_back(zone);
}
}
- zones.push_back(zone);
}
}
@@ -336,7 +378,7 @@ double Awb::computeDelta2Sum(double gain_r, double gain_b)
double delta_r = gain_r * z.R - 1 - config_.whitepoint_r;
double delta_b = gain_b * z.B - 1 - config_.whitepoint_b;
double delta2 = delta_r * delta_r + delta_b * delta_b;
- //RPI_LOG("delta_r " << delta_r << " delta_b " << delta_b << " delta2 " << delta2);
+ //LOG(RPiAwb, Debug) << "delta_r " << delta_r << " delta_b " << delta_b << " delta2 " << delta2;
delta2 = std::min(delta2, config_.delta_limit);
delta2_sum += delta2;
}
@@ -399,10 +441,11 @@ double Awb::coarseSearch(Pwl const &prior)
double prior_log_likelihood =
prior.Eval(prior.Domain().Clip(t));
double final_log_likelihood = delta2_sum - prior_log_likelihood;
- RPI_LOG("t: " << t << " gain_r " << gain_r << " gain_b "
- << gain_b << " delta2_sum " << delta2_sum
- << " prior " << prior_log_likelihood << " final "
- << final_log_likelihood);
+ LOG(RPiAwb, Debug)
+ << "t: " << t << " gain_r " << gain_r << " gain_b "
+ << gain_b << " delta2_sum " << delta2_sum
+ << " prior " << prior_log_likelihood << " final "
+ << final_log_likelihood;
points_.push_back(Pwl::Point(t, final_log_likelihood));
if (points_.back().y < points_[best_point].y)
best_point = points_.size() - 1;
@@ -413,7 +456,7 @@ double Awb::coarseSearch(Pwl const &prior)
mode_->ct_hi);
}
t = points_[best_point].x;
- RPI_LOG("Coarse search found CT " << t);
+ LOG(RPiAwb, Debug) << "Coarse search found CT " << t;
// We have the best point of the search, but refine it with a quadratic
// interpolation around its neighbours.
if (points_.size() > 2) {
@@ -422,15 +465,16 @@ double Awb::coarseSearch(Pwl const &prior)
t = interpolate_quadatric(points_[best_point - 1],
points_[best_point],
points_[best_point + 1]);
- RPI_LOG("After quadratic refinement, coarse search has CT "
- << t);
+ LOG(RPiAwb, Debug)
+ << "After quadratic refinement, coarse search has CT "
+ << t;
}
return t;
}
void Awb::fineSearch(double &t, double &r, double &b, Pwl const &prior)
{
- int span_r, span_b;
+ int span_r = -1, span_b = -1;
config_.ct_r.Eval(t, &span_r);
config_.ct_b.Eval(t, &span_b);
double step = t / 10 * config_.coarse_step * 0.1;
@@ -475,8 +519,9 @@ void Awb::fineSearch(double &t, double &r, double &b, Pwl const &prior)
double gain_r = 1 / r_test, gain_b = 1 / b_test;
double delta2_sum = computeDelta2Sum(gain_r, gain_b);
points[j].y = delta2_sum - prior_log_likelihood;
- RPI_LOG("At t " << t_test << " r " << r_test << " b "
- << b_test << ": " << points[j].y);
+ LOG(RPiAwb, Debug)
+ << "At t " << t_test << " r " << r_test << " b "
+ << b_test << ": " << points[j].y;
if (points[j].y < points[best_point].y)
best_point = j;
}
@@ -493,17 +538,18 @@ void Awb::fineSearch(double &t, double &r, double &b, Pwl const &prior)
double gain_r = 1 / r_test, gain_b = 1 / b_test;
double delta2_sum = computeDelta2Sum(gain_r, gain_b);
double final_log_likelihood = delta2_sum - prior_log_likelihood;
- RPI_LOG("Finally "
+ LOG(RPiAwb, Debug)
+ << "Finally "
<< t_test << " r " << r_test << " b " << b_test << ": "
<< final_log_likelihood
- << (final_log_likelihood < best_log_likelihood ? " BEST"
- : ""));
+ << (final_log_likelihood < best_log_likelihood ? " BEST" : "");
if (best_t == 0 || final_log_likelihood < best_log_likelihood)
best_log_likelihood = final_log_likelihood,
best_t = t_test, best_r = r_test, best_b = b_test;
}
t = best_t, r = best_r, b = best_b;
- RPI_LOG("Fine search found t " << t << " r " << r << " b " << b);
+ LOG(RPiAwb, Debug)
+ << "Fine search found t " << t << " r " << r << " b " << b;
}
void Awb::awbBayes()
@@ -517,13 +563,14 @@ void Awb::awbBayes()
Pwl prior = interpolatePrior();
prior *= zones_.size() / (double)(AWB_STATS_SIZE_X * AWB_STATS_SIZE_Y);
prior.Map([](double x, double y) {
- RPI_LOG("(" << x << "," << y << ")");
+ LOG(RPiAwb, Debug) << "(" << x << "," << y << ")";
});
double t = coarseSearch(prior);
double r = config_.ct_r.Eval(t);
double b = config_.ct_b.Eval(t);
- RPI_LOG("After coarse search: r " << r << " b " << b << " (gains r "
- << 1 / r << " b " << 1 / b << ")");
+ LOG(RPiAwb, Debug)
+ << "After coarse search: r " << r << " b " << b << " (gains r "
+ << 1 / r << " b " << 1 / b << ")";
// Not entirely sure how to handle the fine search yet. Mostly the
// estimated CT is already good enough, but the fine search allows us to
// wander transverely off the CT curve. Under some illuminants, where
@@ -531,8 +578,9 @@ void Awb::awbBayes()
// though I probably need more real datasets before deciding exactly how
// this should be controlled and tuned.
fineSearch(t, r, b, prior);
- RPI_LOG("After fine search: r " << r << " b " << b << " (gains r "
- << 1 / r << " b " << 1 / b << ")");
+ LOG(RPiAwb, Debug)
+ << "After fine search: r " << r << " b " << b << " (gains r "
+ << 1 / r << " b " << 1 / b << ")";
// Write results out for the main thread to pick up. Remember to adjust
// the gains from the ones that the "canonical sensor" would require to
// the ones needed by *this* sensor.
@@ -544,7 +592,7 @@ void Awb::awbBayes()
void Awb::awbGrey()
{
- RPI_LOG("Grey world AWB");
+ LOG(RPiAwb, Debug) << "Grey world AWB";
// Make a separate list of the derivatives for each of red and blue, so
// that we can sort them to exclude the extreme gains. We could
// consider some variations, such as normalising all the zones first, or
@@ -576,27 +624,18 @@ void Awb::awbGrey()
void Awb::doAwb()
{
- if (manual_r_ != 0.0 && manual_b_ != 0.0) {
- async_results_.temperature_K = 4500; // don't know what it is
- async_results_.gain_r = manual_r_;
- async_results_.gain_g = 1.0;
- async_results_.gain_b = manual_b_;
- RPI_LOG("Using manual white balance: gain_r "
- << async_results_.gain_r << " gain_b "
- << async_results_.gain_b);
- } else {
- prepareStats();
- RPI_LOG("Valid zones: " << zones_.size());
- if (zones_.size() > config_.min_regions) {
- if (config_.bayes)
- awbBayes();
- else
- awbGrey();
- RPI_LOG("CT found is "
- << async_results_.temperature_K
- << " with gains r " << async_results_.gain_r
- << " and b " << async_results_.gain_b);
- }
+ prepareStats();
+ LOG(RPiAwb, Debug) << "Valid zones: " << zones_.size();
+ if (zones_.size() > config_.min_regions) {
+ if (config_.bayes)
+ awbBayes();
+ else
+ awbGrey();
+ LOG(RPiAwb, Debug)
+ << "CT found is "
+ << async_results_.temperature_K
+ << " with gains r " << async_results_.gain_r
+ << " and b " << async_results_.gain_b;
}
}
diff --git a/src/ipa/raspberrypi/controller/rpi/awb.hpp b/src/ipa/raspberrypi/controller/rpi/awb.hpp
index 9124d042..8af1f27c 100644
--- a/src/ipa/raspberrypi/controller/rpi/awb.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/awb.hpp
@@ -37,6 +37,7 @@ struct AwbConfig {
uint16_t frame_period;
// number of initial frames for which speed taken as 1.0 (maximum)
uint16_t startup_frames;
+ unsigned int convergence_frames; // approx number of frames to converge
double speed; // IIR filter speed applied to algorithm results
bool fast; // "fast" mode uses a 16x16 rather than 32x32 grid
Pwl ct_r; // function maps CT to r (= R/G)
@@ -82,29 +83,27 @@ public:
char const *Name() const override;
void Initialise() override;
void Read(boost::property_tree::ptree const &params) override;
+ unsigned int GetConvergenceFrames() const override;
void SetMode(std::string const &name) override;
void SetManualGains(double manual_r, double manual_b) override;
+ void SwitchMode(CameraMode const &camera_mode, Metadata *metadata) override;
void Prepare(Metadata *image_metadata) override;
void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
struct RGB {
- RGB(double _R = INVALID, double _G = INVALID,
- double _B = INVALID)
+ RGB(double _R = 0, double _G = 0, double _B = 0)
: R(_R), G(_G), B(_B)
{
}
double R, G, B;
- static const double INVALID;
- bool Valid() const { return G != INVALID; }
- bool Invalid() const { return G == INVALID; }
RGB &operator+=(RGB const &other)
{
R += other.R, G += other.G, B += other.B;
return *this;
}
- RGB Square() const { return RGB(R * R, G * G, B * B); }
};
private:
+ bool isAutoEnabled() const;
// configuration is read-only, and available to both threads
AwbConfig config_;
std::thread async_thread_;
@@ -127,15 +126,12 @@ private:
// counts up to frame_period before restarting the async thread
int frame_phase_;
int frame_count_; // counts up to startup_frames
- int frame_count2_; // counts up to startup_frames for Process method
AwbStatus sync_results_;
AwbStatus prev_sync_results_;
std::string mode_name_;
- std::mutex settings_mutex_;
// The following are for the asynchronous thread to use, though the main
// thread can set/reset them if the async thread is known to be idle:
- void restartAsync(StatisticsPtr &stats, std::string const &mode_name,
- double lux);
+ void restartAsync(StatisticsPtr &stats, double lux);
// copy out the results from the async thread so that it can be restarted
void fetchAsyncResults();
StatisticsPtr statistics_;
@@ -156,6 +152,7 @@ private:
double manual_r_;
// manual b setting
double manual_b_;
+ bool first_switch_mode_; // is this the first call to SwitchMode?
};
static inline Awb::RGB operator+(Awb::RGB const &a, Awb::RGB const &b)
diff --git a/src/ipa/raspberrypi/controller/rpi/black_level.cpp b/src/ipa/raspberrypi/controller/rpi/black_level.cpp
index 0629b77c..6b3497f1 100644
--- a/src/ipa/raspberrypi/controller/rpi/black_level.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/black_level.cpp
@@ -8,12 +8,16 @@
#include <math.h>
#include <stdint.h>
+#include <libcamera/base/log.h>
+
#include "../black_level_status.h"
-#include "../logging.hpp"
#include "black_level.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiBlackLevel)
#define NAME "rpi.black_level"
@@ -29,12 +33,15 @@ char const *BlackLevel::Name() const
void BlackLevel::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG(Name());
uint16_t black_level = params.get<uint16_t>(
"black_level", 4096); // 64 in 10 bits scaled to 16 bits
black_level_r_ = params.get<uint16_t>("black_level_r", black_level);
black_level_g_ = params.get<uint16_t>("black_level_g", black_level);
black_level_b_ = params.get<uint16_t>("black_level_b", black_level);
+ LOG(RPiBlackLevel, Debug)
+ << " Read black levels red " << black_level_r_
+ << " green " << black_level_g_
+ << " blue " << black_level_b_;
}
void BlackLevel::Prepare(Metadata *image_metadata)
diff --git a/src/ipa/raspberrypi/controller/rpi/ccm.cpp b/src/ipa/raspberrypi/controller/rpi/ccm.cpp
index a8a2caff..821a4c7c 100644
--- a/src/ipa/raspberrypi/controller/rpi/ccm.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/ccm.cpp
@@ -5,15 +5,19 @@
* ccm.cpp - CCM (colour correction matrix) control algorithm
*/
+#include <libcamera/base/log.h>
+
#include "../awb_status.h"
#include "../ccm_status.h"
-#include "../logging.hpp"
#include "../lux_status.h"
#include "../metadata.hpp"
#include "ccm.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiCcm)
// This algorithm selects a CCM (Colour Correction Matrix) according to the
// colour temperature estimated by AWB (interpolating between known matricies as
@@ -129,9 +133,9 @@ void Ccm::Prepare(Metadata *image_metadata)
lux_ok = get_locked(image_metadata, "lux.status", lux);
}
if (!awb_ok)
- RPI_WARN("Ccm: no colour temperature found");
+ LOG(RPiCcm, Warning) << "no colour temperature found";
if (!lux_ok)
- RPI_WARN("Ccm: no lux value found");
+ LOG(RPiCcm, Warning) << "no lux value found";
Matrix ccm = calculate_ccm(config_.ccms, awb.temperature_K);
double saturation = saturation_;
struct CcmStatus ccm_status;
@@ -144,13 +148,15 @@ void Ccm::Prepare(Metadata *image_metadata)
for (int i = 0; i < 3; i++)
ccm_status.matrix[j * 3 + i] =
std::max(-8.0, std::min(7.9999, ccm.m[j][i]));
- RPI_LOG("CCM: colour temperature " << awb.temperature_K << "K");
- RPI_LOG("CCM: " << ccm_status.matrix[0] << " " << ccm_status.matrix[1]
- << " " << ccm_status.matrix[2] << " "
- << ccm_status.matrix[3] << " " << ccm_status.matrix[4]
- << " " << ccm_status.matrix[5] << " "
- << ccm_status.matrix[6] << " " << ccm_status.matrix[7]
- << " " << ccm_status.matrix[8]);
+ LOG(RPiCcm, Debug)
+ << "colour temperature " << awb.temperature_K << "K";
+ LOG(RPiCcm, Debug)
+ << "CCM: " << ccm_status.matrix[0] << " " << ccm_status.matrix[1]
+ << " " << ccm_status.matrix[2] << " "
+ << ccm_status.matrix[3] << " " << ccm_status.matrix[4]
+ << " " << ccm_status.matrix[5] << " "
+ << ccm_status.matrix[6] << " " << ccm_status.matrix[7]
+ << " " << ccm_status.matrix[8];
image_metadata->Set("ccm.status", ccm_status);
}
diff --git a/src/ipa/raspberrypi/controller/rpi/ccm.hpp b/src/ipa/raspberrypi/controller/rpi/ccm.hpp
index fcf077e7..330ed51f 100644
--- a/src/ipa/raspberrypi/controller/rpi/ccm.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/ccm.hpp
@@ -7,7 +7,6 @@
#pragma once
#include <vector>
-#include <atomic>
#include "../ccm_algorithm.hpp"
#include "../pwl.hpp"
@@ -70,7 +69,7 @@ public:
private:
CcmConfig config_;
- std::atomic<double> saturation_;
+ double saturation_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/rpi/contrast.cpp b/src/ipa/raspberrypi/controller/rpi/contrast.cpp
index 103153db..ae55aad5 100644
--- a/src/ipa/raspberrypi/controller/rpi/contrast.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/contrast.cpp
@@ -6,12 +6,17 @@
*/
#include <stdint.h>
+#include <libcamera/base/log.h>
+
#include "../contrast_status.h"
#include "../histogram.hpp"
#include "contrast.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiContrast)
// This is a very simple control algorithm which simply retrieves the results of
// AGC and AWB via their "status" metadata, and applies digital gain to the
@@ -97,11 +102,13 @@ Pwl compute_stretch_curve(Histogram const &histogram,
double hist_lo = histogram.Quantile(config.lo_histogram) *
(65536 / NUM_HISTOGRAM_BINS);
double level_lo = config.lo_level * 65536;
- RPI_LOG("Move histogram point " << hist_lo << " to " << level_lo);
+ LOG(RPiContrast, Debug)
+ << "Move histogram point " << hist_lo << " to " << level_lo;
hist_lo = std::max(
level_lo,
std::min(65535.0, std::min(hist_lo, level_lo + config.lo_max)));
- RPI_LOG("Final values " << hist_lo << " -> " << level_lo);
+ LOG(RPiContrast, Debug)
+ << "Final values " << hist_lo << " -> " << level_lo;
enhance.Append(hist_lo, level_lo);
// Keep the mid-point (median) in the same place, though, to limit the
// apparent amount of global brightness shift.
@@ -113,11 +120,13 @@ Pwl compute_stretch_curve(Histogram const &histogram,
double hist_hi = histogram.Quantile(config.hi_histogram) *
(65536 / NUM_HISTOGRAM_BINS);
double level_hi = config.hi_level * 65536;
- RPI_LOG("Move histogram point " << hist_hi << " to " << level_hi);
+ LOG(RPiContrast, Debug)
+ << "Move histogram point " << hist_hi << " to " << level_hi;
hist_hi = std::min(
level_hi,
std::max(0.0, std::max(hist_hi, level_hi - config.hi_max)));
- RPI_LOG("Final values " << hist_hi << " -> " << level_hi);
+ LOG(RPiContrast, Debug)
+ << "Final values " << hist_hi << " -> " << level_hi;
enhance.Append(hist_hi, level_hi);
enhance.Append(65535, 65535);
return enhance;
@@ -127,7 +136,8 @@ Pwl apply_manual_contrast(Pwl const &gamma_curve, double brightness,
double contrast)
{
Pwl new_gamma_curve;
- RPI_LOG("Manual brightness " << brightness << " contrast " << contrast);
+ LOG(RPiContrast, Debug)
+ << "Manual brightness " << brightness << " contrast " << contrast;
gamma_curve.Map([&](double x, double y) {
new_gamma_curve.Append(
x, std::max(0.0, std::min(65535.0,
@@ -140,7 +150,6 @@ Pwl apply_manual_contrast(Pwl const &gamma_curve, double brightness,
void Contrast::Process(StatisticsPtr &stats,
[[maybe_unused]] Metadata *image_metadata)
{
- double brightness = brightness_, contrast = contrast_;
Histogram histogram(stats->hist[0].g_hist, NUM_HISTOGRAM_BINS);
// We look at the histogram and adjust the gamma curve in the following
// ways: 1. Adjust the gamma curve so as to pull the start of the
@@ -155,13 +164,13 @@ void Contrast::Process(StatisticsPtr &stats,
}
// 2. Finally apply any manually selected brightness/contrast
// adjustment.
- if (brightness != 0 || contrast != 1.0)
- gamma_curve = apply_manual_contrast(gamma_curve, brightness,
- contrast);
+ if (brightness_ != 0 || contrast_ != 1.0)
+ gamma_curve = apply_manual_contrast(gamma_curve, brightness_,
+ contrast_);
// And fill in the status for output. Use more points towards the bottom
// of the curve.
ContrastStatus status;
- fill_in_status(status, brightness, contrast, gamma_curve);
+ fill_in_status(status, brightness_, contrast_, gamma_curve);
{
std::unique_lock<std::mutex> lock(mutex_);
status_ = status;
diff --git a/src/ipa/raspberrypi/controller/rpi/contrast.hpp b/src/ipa/raspberrypi/controller/rpi/contrast.hpp
index 6836f181..85624539 100644
--- a/src/ipa/raspberrypi/controller/rpi/contrast.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/contrast.hpp
@@ -6,7 +6,6 @@
*/
#pragma once
-#include <atomic>
#include <mutex>
#include "../contrast_algorithm.hpp"
@@ -42,8 +41,8 @@ public:
private:
ContrastConfig config_;
- std::atomic<double> brightness_;
- std::atomic<double> contrast_;
+ double brightness_;
+ double contrast_;
ContrastStatus status_;
std::mutex mutex_;
};
diff --git a/src/ipa/raspberrypi/controller/rpi/dpc.cpp b/src/ipa/raspberrypi/controller/rpi/dpc.cpp
index 348e1609..110f5056 100644
--- a/src/ipa/raspberrypi/controller/rpi/dpc.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/dpc.cpp
@@ -5,10 +5,14 @@
* dpc.cpp - DPC (defective pixel correction) control algorithm
*/
-#include "../logging.hpp"
+#include <libcamera/base/log.h>
+
#include "dpc.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiDpc)
// We use the lux status so that we can apply stronger settings in darkness (if
// necessary).
@@ -37,7 +41,7 @@ void Dpc::Prepare(Metadata *image_metadata)
DpcStatus dpc_status = {};
// Should we vary this with lux level or analogue gain? TBD.
dpc_status.strength = config_.strength;
- RPI_LOG("Dpc: strength " << dpc_status.strength);
+ LOG(RPiDpc, Debug) << "strength " << dpc_status.strength;
image_metadata->Set("dpc.status", dpc_status);
}
diff --git a/src/ipa/raspberrypi/controller/rpi/focus.cpp b/src/ipa/raspberrypi/controller/rpi/focus.cpp
index bab4406f..a87ec802 100644
--- a/src/ipa/raspberrypi/controller/rpi/focus.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/focus.cpp
@@ -6,7 +6,7 @@
*/
#include <stdint.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
#include "../focus_status.h"
#include "focus.hpp"
diff --git a/src/ipa/raspberrypi/controller/rpi/geq.cpp b/src/ipa/raspberrypi/controller/rpi/geq.cpp
index b6c98414..4530cb75 100644
--- a/src/ipa/raspberrypi/controller/rpi/geq.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/geq.cpp
@@ -5,14 +5,18 @@
* geq.cpp - GEQ (green equalisation) control algorithm
*/
+#include <libcamera/base/log.h>
+
#include "../device_status.h"
-#include "../logging.hpp"
#include "../lux_status.h"
#include "../pwl.hpp"
#include "geq.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiGeq)
// We use the lux status so that we can apply stronger settings in darkness (if
// necessary).
@@ -44,11 +48,12 @@ void Geq::Prepare(Metadata *image_metadata)
LuxStatus lux_status = {};
lux_status.lux = 400;
if (image_metadata->Get("lux.status", lux_status))
- RPI_WARN("Geq: no lux data found");
- DeviceStatus device_status = {};
+ LOG(RPiGeq, Warning) << "no lux data found";
+ DeviceStatus device_status;
device_status.analogue_gain = 1.0; // in case not found
if (image_metadata->Get("device.status", device_status))
- RPI_WARN("Geq: no device metadata - use analogue gain of 1x");
+ LOG(RPiGeq, Warning)
+ << "no device metadata - use analogue gain of 1x";
GeqStatus geq_status = {};
double strength =
config_.strength.Empty()
@@ -60,10 +65,11 @@ void Geq::Prepare(Metadata *image_metadata)
double slope = config_.slope * strength;
geq_status.offset = std::min(65535.0, std::max(0.0, offset));
geq_status.slope = std::min(.99999, std::max(0.0, slope));
- RPI_LOG("Geq: offset " << geq_status.offset << " slope "
- << geq_status.slope << " (analogue gain "
- << device_status.analogue_gain << " lux "
- << lux_status.lux << ")");
+ LOG(RPiGeq, Debug)
+ << "offset " << geq_status.offset << " slope "
+ << geq_status.slope << " (analogue gain "
+ << device_status.analogue_gain << " lux "
+ << lux_status.lux << ")";
image_metadata->Set("geq.status", geq_status);
}
diff --git a/src/ipa/raspberrypi/controller/rpi/lux.cpp b/src/ipa/raspberrypi/controller/rpi/lux.cpp
index 5acd49a0..6367b17d 100644
--- a/src/ipa/raspberrypi/controller/rpi/lux.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/lux.cpp
@@ -8,12 +8,17 @@
#include "linux/bcm2835-isp.h"
+#include <libcamera/base/log.h>
+
#include "../device_status.h"
-#include "../logging.hpp"
#include "lux.hpp"
using namespace RPiController;
+using namespace libcamera;
+using namespace std::literals::chrono_literals;
+
+LOG_DEFINE_CATEGORY(RPiLux)
#define NAME "rpi.lux"
@@ -33,9 +38,8 @@ char const *Lux::Name() const
void Lux::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG(Name());
reference_shutter_speed_ =
- params.get<double>("reference_shutter_speed");
+ params.get<double>("reference_shutter_speed") * 1.0us;
reference_gain_ = params.get<double>("reference_gain");
reference_aperture_ = params.get<double>("reference_aperture", 1.0);
reference_Y_ = params.get<double>("reference_Y");
@@ -43,6 +47,11 @@ void Lux::Read(boost::property_tree::ptree const &params)
current_aperture_ = reference_aperture_;
}
+void Lux::SetCurrentAperture(double aperture)
+{
+ current_aperture_ = aperture;
+}
+
void Lux::Prepare(Metadata *image_metadata)
{
std::unique_lock<std::mutex> lock(mutex_);
@@ -51,16 +60,9 @@ void Lux::Prepare(Metadata *image_metadata)
void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
{
- // set some initial values to shut the compiler up
- DeviceStatus device_status =
- { .shutter_speed = 1.0,
- .analogue_gain = 1.0,
- .lens_position = 0.0,
- .aperture = 0.0,
- .flash_intensity = 0.0 };
+ DeviceStatus device_status;
if (image_metadata->Get("device.status", device_status) == 0) {
double current_gain = device_status.analogue_gain;
- double current_shutter_speed = device_status.shutter_speed;
double current_aperture = device_status.aperture;
if (current_aperture == 0)
current_aperture = current_aperture_;
@@ -75,7 +77,7 @@ void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
double current_Y = sum / (double)num + .5;
double gain_ratio = reference_gain_ / current_gain;
double shutter_speed_ratio =
- reference_shutter_speed_ / current_shutter_speed;
+ reference_shutter_speed_ / device_status.shutter_speed;
double aperture_ratio = reference_aperture_ / current_aperture;
double Y_ratio = current_Y * (65536 / num_bins) / reference_Y_;
double estimated_lux = shutter_speed_ratio * gain_ratio *
@@ -84,7 +86,7 @@ void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
LuxStatus status;
status.lux = estimated_lux;
status.aperture = current_aperture;
- RPI_LOG(Name() << ": estimated lux " << estimated_lux);
+ LOG(RPiLux, Debug) << ": estimated lux " << estimated_lux;
{
std::unique_lock<std::mutex> lock(mutex_);
status_ = status;
@@ -93,7 +95,7 @@ void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
// algorithms get the latest value.
image_metadata->Set("lux.status", status);
} else
- RPI_WARN(Name() << ": no device metadata");
+ LOG(RPiLux, Warning) << ": no device metadata";
}
// Register algorithm with the system.
diff --git a/src/ipa/raspberrypi/controller/rpi/lux.hpp b/src/ipa/raspberrypi/controller/rpi/lux.hpp
index 7b6c7258..3ebd35d1 100644
--- a/src/ipa/raspberrypi/controller/rpi/lux.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/lux.hpp
@@ -6,9 +6,10 @@
*/
#pragma once
-#include <atomic>
#include <mutex>
+#include <libcamera/base/utils.h>
+
#include "../lux_status.h"
#include "../algorithm.hpp"
@@ -29,12 +30,12 @@ public:
private:
// These values define the conditions of the reference image, against
// which we compare the new image.
- double reference_shutter_speed_; // in micro-seconds
+ libcamera::utils::Duration reference_shutter_speed_;
double reference_gain_;
double reference_aperture_; // units of 1/f
double reference_Y_; // out of 65536
double reference_lux_;
- std::atomic<double> current_aperture_;
+ double current_aperture_;
LuxStatus status_;
std::mutex mutex_;
};
diff --git a/src/ipa/raspberrypi/controller/rpi/noise.cpp b/src/ipa/raspberrypi/controller/rpi/noise.cpp
index 9e9eaf1b..63cad639 100644
--- a/src/ipa/raspberrypi/controller/rpi/noise.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/noise.cpp
@@ -7,13 +7,17 @@
#include <math.h>
+#include <libcamera/base/log.h>
+
#include "../device_status.h"
-#include "../logging.hpp"
#include "../noise_status.h"
#include "noise.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiNoise)
#define NAME "rpi.noise"
@@ -37,7 +41,6 @@ void Noise::SwitchMode(CameraMode const &camera_mode,
void Noise::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG(Name());
reference_constant_ = params.get<double>("reference_constant");
reference_slope_ = params.get<double>("reference_slope");
}
@@ -58,10 +61,11 @@ void Noise::Prepare(Metadata *image_metadata)
status.noise_constant = reference_constant_ * factor;
status.noise_slope = reference_slope_ * factor;
image_metadata->Set("noise.status", status);
- RPI_LOG(Name() << ": constant " << status.noise_constant
- << " slope " << status.noise_slope);
+ LOG(RPiNoise, Debug)
+ << "constant " << status.noise_constant
+ << " slope " << status.noise_slope;
} else
- RPI_WARN(Name() << " no metadata");
+ LOG(RPiNoise, Warning) << " no metadata";
}
// Register algorithm with the system.
diff --git a/src/ipa/raspberrypi/controller/rpi/noise.hpp b/src/ipa/raspberrypi/controller/rpi/noise.hpp
index 6f6e0be9..1c9de5c8 100644
--- a/src/ipa/raspberrypi/controller/rpi/noise.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/noise.hpp
@@ -26,7 +26,7 @@ private:
// the noise profile for analogue gain of 1.0
double reference_constant_;
double reference_slope_;
- std::atomic<double> mode_factor_;
+ double mode_factor_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/rpi/sdn.cpp b/src/ipa/raspberrypi/controller/rpi/sdn.cpp
index aa82830b..93845509 100644
--- a/src/ipa/raspberrypi/controller/rpi/sdn.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/sdn.cpp
@@ -1,16 +1,21 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
*
* sdn.cpp - SDN (spatial denoise) control algorithm
*/
+#include <libcamera/base/log.h>
+
+#include "../denoise_status.h"
#include "../noise_status.h"
-#include "../sdn_status.h"
#include "sdn.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiSdn)
// Calculate settings for the spatial denoise block using the noise profile in
// the image metadata.
@@ -18,7 +23,7 @@ using namespace RPiController;
#define NAME "rpi.sdn"
Sdn::Sdn(Controller *controller)
- : Algorithm(controller)
+ : DenoiseAlgorithm(controller), mode_(DenoiseMode::ColourOff)
{
}
@@ -40,19 +45,26 @@ void Sdn::Prepare(Metadata *image_metadata)
struct NoiseStatus noise_status = {};
noise_status.noise_slope = 3.0; // in case no metadata
if (image_metadata->Get("noise.status", noise_status) != 0)
- RPI_WARN("Sdn: no noise profile found");
- RPI_LOG("Noise profile: constant " << noise_status.noise_constant
- << " slope "
- << noise_status.noise_slope);
- struct SdnStatus status;
+ LOG(RPiSdn, Warning) << "no noise profile found";
+ LOG(RPiSdn, Debug)
+ << "Noise profile: constant " << noise_status.noise_constant
+ << " slope " << noise_status.noise_slope;
+ struct DenoiseStatus status;
status.noise_constant = noise_status.noise_constant * deviation_;
status.noise_slope = noise_status.noise_slope * deviation_;
status.strength = strength_;
- image_metadata->Set("sdn.status", status);
- RPI_LOG("Sdn: programmed constant " << status.noise_constant
- << " slope " << status.noise_slope
- << " strength "
- << status.strength);
+ status.mode = static_cast<std::underlying_type_t<DenoiseMode>>(mode_);
+ image_metadata->Set("denoise.status", status);
+ LOG(RPiSdn, Debug)
+ << "programmed constant " << status.noise_constant
+ << " slope " << status.noise_slope
+ << " strength " << status.strength;
+}
+
+void Sdn::SetMode(DenoiseMode mode)
+{
+ // We only distinguish between off and all other modes.
+ mode_ = mode;
}
// Register algorithm with the system.
diff --git a/src/ipa/raspberrypi/controller/rpi/sdn.hpp b/src/ipa/raspberrypi/controller/rpi/sdn.hpp
index 486c000d..2371ce04 100644
--- a/src/ipa/raspberrypi/controller/rpi/sdn.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/sdn.hpp
@@ -7,12 +7,13 @@
#pragma once
#include "../algorithm.hpp"
+#include "../denoise_algorithm.hpp"
namespace RPiController {
// Algorithm to calculate correct spatial denoise (SDN) settings.
-class Sdn : public Algorithm
+class Sdn : public DenoiseAlgorithm
{
public:
Sdn(Controller *controller = NULL);
@@ -20,10 +21,12 @@ public:
void Read(boost::property_tree::ptree const &params) override;
void Initialise() override;
void Prepare(Metadata *image_metadata) override;
+ void SetMode(DenoiseMode mode) override;
private:
double deviation_;
double strength_;
+ DenoiseMode mode_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/rpi/sharpen.cpp b/src/ipa/raspberrypi/controller/rpi/sharpen.cpp
index c953a7d9..b0c2e00a 100644
--- a/src/ipa/raspberrypi/controller/rpi/sharpen.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/sharpen.cpp
@@ -7,12 +7,16 @@
#include <math.h>
-#include "../logging.hpp"
+#include <libcamera/base/log.h>
+
#include "../sharpen_status.h"
#include "sharpen.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiSharpen)
#define NAME "rpi.sharpen"
@@ -35,10 +39,13 @@ void Sharpen::SwitchMode(CameraMode const &camera_mode,
void Sharpen::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG(Name());
threshold_ = params.get<double>("threshold", 1.0);
strength_ = params.get<double>("strength", 1.0);
limit_ = params.get<double>("limit", 1.0);
+ LOG(RPiSharpen, Debug)
+ << "Read threshold " << threshold_
+ << " strength " << strength_
+ << " limit " << limit_;
}
void Sharpen::SetStrength(double strength)
diff --git a/src/ipa/raspberrypi/controller/sdn_status.h b/src/ipa/raspberrypi/controller/sdn_status.h
deleted file mode 100644
index 871e0b62..00000000
--- a/src/ipa/raspberrypi/controller/sdn_status.h
+++ /dev/null
@@ -1,23 +0,0 @@
-/* SPDX-License-Identifier: BSD-2-Clause */
-/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
- *
- * sdn_status.h - SDN (spatial denoise) control algorithm status
- */
-#pragma once
-
-// This stores the parameters required for Spatial Denoise (SDN).
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-struct SdnStatus {
- double noise_constant;
- double noise_slope;
- double strength;
-};
-
-#ifdef __cplusplus
-}
-#endif
diff --git a/src/ipa/raspberrypi/data/imx219.json b/src/ipa/raspberrypi/data/imx219.json
index ce7ff36f..1ec338be 100644
--- a/src/ipa/raspberrypi/data/imx219.json
+++ b/src/ipa/raspberrypi/data/imx219.json
@@ -133,18 +133,29 @@
{
"shutter":
[
- 100, 10000, 30000, 60000, 120000
+ 100, 10000, 30000, 60000, 66666
],
"gain":
[
1.0, 2.0, 4.0, 6.0, 6.0
]
},
- "sport":
+ "short":
{
"shutter":
[
- 100, 5000, 10000, 20000, 120000
+ 100, 5000, 10000, 20000, 33333
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 4.0, 6.0, 6.0
+ ]
+ },
+ "long":
+ {
+ "shutter":
+ [
+ 100, 10000, 30000, 60000, 120000
],
"gain":
[
diff --git a/src/ipa/raspberrypi/data/imx290.json b/src/ipa/raspberrypi/data/imx290.json
new file mode 100644
index 00000000..6fb92cc4
--- /dev/null
+++ b/src/ipa/raspberrypi/data/imx290.json
@@ -0,0 +1,165 @@
+{
+ "rpi.black_level":
+ {
+ "black_level": 3840
+ },
+ "rpi.dpc":
+ {
+ },
+ "rpi.lux":
+ {
+ "reference_shutter_speed": 6813,
+ "reference_gain": 1.0,
+ "reference_aperture": 1.0,
+ "reference_lux": 890,
+ "reference_Y": 12900
+ },
+ "rpi.noise":
+ {
+ "reference_constant": 0,
+ "reference_slope": 2.67
+ },
+ "rpi.geq":
+ {
+ "offset": 187,
+ "slope": 0.00842
+ },
+ "rpi.sdn":
+ {
+ },
+ "rpi.awb":
+ {
+ "bayes": 0
+ },
+ "rpi.agc":
+ {
+ "speed": 0.2,
+ "metering_modes":
+ {
+ "matrix":
+ {
+ "weights":
+ [
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ ]
+ },
+ "centre-weighted":
+ {
+ "weights":
+ [
+ 3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
+ ]
+ },
+ "spot":
+ {
+ "weights":
+ [
+ 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ ]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter":
+ [
+ 10, 30000, 60000
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 8.0
+ ]
+ },
+ "sport":
+ {
+ "shutter":
+ [
+ 10, 5000, 10000, 20000, 120000
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 4.0, 6.0, 6.0
+ ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal":
+ [
+ ],
+ "highlight":
+ [
+ {
+ "bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target":
+ [
+ 0, 0.5, 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER", "q_lo": 0.98, "q_hi": 1.0, "y_target":
+ [
+ 0, 0.8, 1000, 0.8
+ ]
+ }
+ ]
+ },
+ "y_target":
+ [
+ 0, 0.16, 1000, 0.16, 10000, 0.16
+ ]
+ },
+ "rpi.alsc":
+ {
+ "omega": 1.3,
+ "n_iter": 100,
+ "luminance_strength": 0.7,
+ "luminance_lut":
+ [
+ 2.844, 2.349, 2.018, 1.775, 1.599, 1.466, 1.371, 1.321, 1.306, 1.316, 1.357, 1.439, 1.552, 1.705, 1.915, 2.221,
+ 2.576, 2.151, 1.851, 1.639, 1.478, 1.358, 1.272, 1.231, 1.218, 1.226, 1.262, 1.335, 1.438, 1.571, 1.766, 2.067,
+ 2.381, 2.005, 1.739, 1.545, 1.389, 1.278, 1.204, 1.166, 1.153, 1.161, 1.194, 1.263, 1.356, 1.489, 1.671, 1.943,
+ 2.242, 1.899, 1.658, 1.481, 1.329, 1.225, 1.156, 1.113, 1.096, 1.107, 1.143, 1.201, 1.289, 1.423, 1.607, 1.861,
+ 2.152, 1.831, 1.602, 1.436, 1.291, 1.193, 1.121, 1.069, 1.047, 1.062, 1.107, 1.166, 1.249, 1.384, 1.562, 1.801,
+ 2.104, 1.795, 1.572, 1.407, 1.269, 1.174, 1.099, 1.041, 1.008, 1.029, 1.083, 1.146, 1.232, 1.364, 1.547, 1.766,
+ 2.104, 1.796, 1.572, 1.403, 1.264, 1.171, 1.097, 1.036, 1.001, 1.025, 1.077, 1.142, 1.231, 1.363, 1.549, 1.766,
+ 2.148, 1.827, 1.594, 1.413, 1.276, 1.184, 1.114, 1.062, 1.033, 1.049, 1.092, 1.153, 1.242, 1.383, 1.577, 1.795,
+ 2.211, 1.881, 1.636, 1.455, 1.309, 1.214, 1.149, 1.104, 1.081, 1.089, 1.125, 1.184, 1.273, 1.423, 1.622, 1.846,
+ 2.319, 1.958, 1.698, 1.516, 1.362, 1.262, 1.203, 1.156, 1.137, 1.142, 1.171, 1.229, 1.331, 1.484, 1.682, 1.933,
+ 2.459, 2.072, 1.789, 1.594, 1.441, 1.331, 1.261, 1.219, 1.199, 1.205, 1.232, 1.301, 1.414, 1.571, 1.773, 2.052,
+ 2.645, 2.206, 1.928, 1.728, 1.559, 1.451, 1.352, 1.301, 1.282, 1.289, 1.319, 1.395, 1.519, 1.685, 1.904, 2.227
+ ],
+ "sigma": 0.005,
+ "sigma_Cb": 0.005
+ },
+ "rpi.contrast":
+ {
+ "ce_enable": 1,
+ "gamma_curve":
+ [
+ 0, 0, 1024, 5040, 2048, 9338, 3072, 12356, 4096, 15312, 5120, 18051, 6144, 20790, 7168, 23193,
+ 8192, 25744, 9216, 27942, 10240, 30035, 11264, 32005, 12288, 33975, 13312, 35815, 14336, 37600, 15360, 39168,
+ 16384, 40642, 18432, 43379, 20480, 45749, 22528, 47753, 24576, 49621, 26624, 51253, 28672, 52698, 30720, 53796,
+ 32768, 54876, 36864, 57012, 40960, 58656, 45056, 59954, 49152, 61183, 53248, 62355, 57344, 63419, 61440, 64476,
+ 65535, 65535
+ ]
+ },
+ "rpi.sharpen":
+ {
+ },
+ "rpi.ccm":
+ {
+ "ccms":
+ [
+ {
+ "ct": 3900, "ccm":
+ [
+ 1.54659, -0.17707, -0.36953, -0.51471, 1.72733, -0.21262, 0.06667, -0.92279, 1.85612
+ ]
+ }
+ ]
+ },
+ "rpi.focus":
+ {
+ }
+}
diff --git a/src/ipa/raspberrypi/data/imx477.json b/src/ipa/raspberrypi/data/imx477.json
index 73ad1ae9..9bee3f16 100644
--- a/src/ipa/raspberrypi/data/imx477.json
+++ b/src/ipa/raspberrypi/data/imx477.json
@@ -133,18 +133,29 @@
{
"shutter":
[
- 100, 10000, 30000, 60000, 120000
+ 100, 10000, 30000, 60000, 66666
],
"gain":
[
1.0, 2.0, 4.0, 6.0, 6.0
]
},
- "sport":
+ "short":
{
"shutter":
[
- 100, 5000, 10000, 20000, 120000
+ 100, 5000, 10000, 20000, 33333
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 4.0, 6.0, 6.0
+ ]
+ },
+ "long":
+ {
+ "shutter":
+ [
+ 100, 10000, 30000, 60000, 120000
],
"gain":
[
diff --git a/src/ipa/raspberrypi/data/meson.build b/src/ipa/raspberrypi/data/meson.build
index 5236bf1e..abb1f928 100644
--- a/src/ipa/raspberrypi/data/meson.build
+++ b/src/ipa/raspberrypi/data/meson.build
@@ -2,10 +2,13 @@
conf_files = files([
'imx219.json',
+ 'imx290.json',
'imx477.json',
'ov5647.json',
+ 'ov9281.json',
+ 'se327m12.json',
'uncalibrated.json',
])
install_data(conf_files,
- install_dir : join_paths(ipa_data_dir, 'raspberrypi'))
+ install_dir : ipa_data_dir / 'raspberrypi')
diff --git a/src/ipa/raspberrypi/data/ov5647.json b/src/ipa/raspberrypi/data/ov5647.json
index a2469059..1a354f7c 100644
--- a/src/ipa/raspberrypi/data/ov5647.json
+++ b/src/ipa/raspberrypi/data/ov5647.json
@@ -133,18 +133,29 @@
{
"shutter":
[
- 100, 10000, 30000, 30000, 30000
+ 100, 10000, 30000, 60000, 66666
],
"gain":
[
1.0, 2.0, 4.0, 6.0, 6.0
]
},
- "sport":
+ "short":
{
"shutter":
[
- 100, 5000, 10000, 20000, 30000
+ 100, 5000, 10000, 20000, 33333
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 4.0, 6.0, 6.0
+ ]
+ },
+ "long":
+ {
+ "shutter":
+ [
+ 100, 10000, 30000, 60000, 120000
],
"gain":
[
diff --git a/src/ipa/raspberrypi/data/ov9281.json b/src/ipa/raspberrypi/data/ov9281.json
new file mode 100644
index 00000000..ecd262be
--- /dev/null
+++ b/src/ipa/raspberrypi/data/ov9281.json
@@ -0,0 +1,92 @@
+{
+ "rpi.black_level":
+ {
+ "black_level": 4096
+ },
+ "rpi.lux":
+ {
+ "reference_shutter_speed": 2000,
+ "reference_gain": 1.0,
+ "reference_aperture": 1.0,
+ "reference_lux": 800,
+ "reference_Y": 20000
+ },
+ "rpi.noise":
+ {
+ "reference_constant": 0,
+ "reference_slope": 2.5
+ },
+ "rpi.sdn":
+ {
+ },
+ "rpi.agc":
+ {
+ "metering_modes":
+ {
+ "centre-weighted": {
+ "weights": [4, 4, 4, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter": [ 100, 15000, 30000, 60000, 120000 ],
+ "gain": [ 1.0, 2.0, 3.0, 4.0, 6.0 ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal":
+ [
+ { "bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target": [ 0, 0.4, 1000, 0.4 ] }
+ ]
+ },
+ "y_target": [ 0, 0.16, 1000, 0.165, 10000, 0.17 ]
+ },
+ "rpi.alsc":
+ {
+ "n_iter": 0,
+ "luminance_strength": 1.0,
+ "corner_strength": 1.5
+ },
+ "rpi.contrast":
+ {
+ "ce_enable": 0,
+ "gamma_curve": [
+ 0, 0,
+ 1024, 5040,
+ 2048, 9338,
+ 3072, 12356,
+ 4096, 15312,
+ 5120, 18051,
+ 6144, 20790,
+ 7168, 23193,
+ 8192, 25744,
+ 9216, 27942,
+ 10240, 30035,
+ 11264, 32005,
+ 12288, 33975,
+ 13312, 35815,
+ 14336, 37600,
+ 15360, 39168,
+ 16384, 40642,
+ 18432, 43379,
+ 20480, 45749,
+ 22528, 47753,
+ 24576, 49621,
+ 26624, 51253,
+ 28672, 52698,
+ 30720, 53796,
+ 32768, 54876,
+ 36864, 57012,
+ 40960, 58656,
+ 45056, 59954,
+ 49152, 61183,
+ 53248, 62355,
+ 57344, 63419,
+ 61440, 64476,
+ 65535, 65535
+ ]
+ }
+}
diff --git a/src/ipa/raspberrypi/data/se327m12.json b/src/ipa/raspberrypi/data/se327m12.json
new file mode 100644
index 00000000..3245ed98
--- /dev/null
+++ b/src/ipa/raspberrypi/data/se327m12.json
@@ -0,0 +1,341 @@
+{
+ "rpi.black_level":
+ {
+ "black_level": 3840
+ },
+ "rpi.dpc":
+ {
+ },
+ "rpi.lux":
+ {
+ "reference_shutter_speed": 6873,
+ "reference_gain": 1.0,
+ "reference_aperture": 1.0,
+ "reference_lux": 800,
+ "reference_Y": 12293
+ },
+ "rpi.noise":
+ {
+ "reference_constant": 0,
+ "reference_slope": 1.986
+ },
+ "rpi.geq":
+ {
+ "offset": 207,
+ "slope": 0.00539
+ },
+ "rpi.sdn":
+ {
+ },
+ "rpi.awb":
+ {
+ "priors":
+ [
+ {
+ "lux": 0, "prior":
+ [
+ 2000, 1.0, 3000, 0.0, 13000, 0.0
+ ]
+ },
+ {
+ "lux": 800, "prior":
+ [
+ 2000, 0.0, 6000, 2.0, 13000, 2.0
+ ]
+ },
+ {
+ "lux": 1500, "prior":
+ [
+ 2000, 0.0, 4000, 1.0, 6000, 6.0, 6500, 7.0, 7000, 1.0, 13000, 1.0
+ ]
+ }
+ ],
+ "modes":
+ {
+ "auto":
+ {
+ "lo": 2500,
+ "hi": 8000
+ },
+ "incandescent":
+ {
+ "lo": 2500,
+ "hi": 3000
+ },
+ "tungsten":
+ {
+ "lo": 3000,
+ "hi": 3500
+ },
+ "fluorescent":
+ {
+ "lo": 4000,
+ "hi": 4700
+ },
+ "indoor":
+ {
+ "lo": 3000,
+ "hi": 5000
+ },
+ "daylight":
+ {
+ "lo": 5500,
+ "hi": 6500
+ },
+ "cloudy":
+ {
+ "lo": 7000,
+ "hi": 8600
+ }
+ },
+ "bayes": 1,
+ "ct_curve":
+ [
+ 2900.0, 0.9217, 0.3657, 3600.0, 0.7876, 0.4651, 4600.0, 0.6807, 0.5684, 5800.0, 0.5937, 0.6724, 8100.0, 0.5447, 0.7403
+ ],
+ "sensitivity_r": 1.0,
+ "sensitivity_b": 1.0,
+ "transverse_pos": 0.0162,
+ "transverse_neg": 0.0204
+ },
+ "rpi.agc":
+ {
+ "metering_modes":
+ {
+ "centre-weighted":
+ {
+ "weights":
+ [
+ 3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
+ ]
+ },
+ "spot":
+ {
+ "weights":
+ [
+ 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ ]
+ },
+ "matrix":
+ {
+ "weights":
+ [
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ ]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter":
+ [
+ 100, 10000, 30000, 60000, 120000
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 4.0, 6.0, 6.0
+ ]
+ },
+ "short":
+ {
+ "shutter":
+ [
+ 100, 5000, 10000, 20000, 120000
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 4.0, 6.0, 6.0
+ ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal":
+ [
+ {
+ "bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target":
+ [
+ 0, 0.5, 1000, 0.5
+ ]
+ }
+ ],
+ "highlight":
+ [
+ {
+ "bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target":
+ [
+ 0, 0.5, 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER", "q_lo": 0.98, "q_hi": 1.0, "y_target":
+ [
+ 0, 0.8, 1000, 0.8
+ ]
+ }
+ ]
+ },
+ "y_target":
+ [
+ 0, 0.16, 1000, 0.165, 10000, 0.17
+ ]
+ },
+ "rpi.alsc":
+ {
+ "omega": 1.3,
+ "n_iter": 100,
+ "luminance_strength": 0.5,
+ "calibrations_Cr":
+ [
+ {
+ "ct": 4000, "table":
+ [
+ 1.481, 1.471, 1.449, 1.429, 1.416, 1.404, 1.394, 1.389, 1.389, 1.389, 1.392, 1.397, 1.404, 1.416, 1.429, 1.437,
+ 1.472, 1.456, 1.436, 1.418, 1.405, 1.394, 1.389, 1.384, 1.382, 1.382, 1.386, 1.388, 1.398, 1.407, 1.422, 1.429,
+ 1.465, 1.443, 1.426, 1.411, 1.397, 1.389, 1.383, 1.377, 1.377, 1.377, 1.379, 1.384, 1.388, 1.398, 1.411, 1.422,
+ 1.462, 1.441, 1.423, 1.409, 1.395, 1.385, 1.379, 1.376, 1.374, 1.374, 1.375, 1.379, 1.384, 1.394, 1.407, 1.418,
+ 1.461, 1.439, 1.421, 1.407, 1.394, 1.385, 1.381, 1.376, 1.373, 1.373, 1.373, 1.376, 1.381, 1.389, 1.403, 1.415,
+ 1.461, 1.439, 1.419, 1.404, 1.392, 1.384, 1.379, 1.376, 1.373, 1.372, 1.374, 1.375, 1.379, 1.389, 1.401, 1.413,
+ 1.461, 1.438, 1.419, 1.402, 1.389, 1.383, 1.377, 1.375, 1.373, 1.372, 1.372, 1.375, 1.381, 1.388, 1.401, 1.414,
+ 1.462, 1.438, 1.419, 1.403, 1.391, 1.381, 1.377, 1.374, 1.373, 1.373, 1.374, 1.376, 1.381, 1.389, 1.401, 1.414,
+ 1.462, 1.441, 1.423, 1.405, 1.392, 1.383, 1.377, 1.374, 1.373, 1.372, 1.373, 1.376, 1.382, 1.391, 1.402, 1.414,
+ 1.465, 1.444, 1.424, 1.407, 1.393, 1.382, 1.378, 1.373, 1.369, 1.369, 1.372, 1.375, 1.381, 1.389, 1.402, 1.417,
+ 1.469, 1.449, 1.427, 1.413, 1.396, 1.384, 1.381, 1.375, 1.371, 1.371, 1.373, 1.377, 1.385, 1.393, 1.407, 1.422,
+ 1.474, 1.456, 1.436, 1.419, 1.407, 1.391, 1.383, 1.379, 1.377, 1.377, 1.378, 1.381, 1.391, 1.404, 1.422, 1.426
+ ]
+ },
+ {
+ "ct": 5000, "table":
+ [
+ 1.742, 1.721, 1.689, 1.661, 1.639, 1.623, 1.613, 1.609, 1.607, 1.606, 1.609, 1.617, 1.626, 1.641, 1.665, 1.681,
+ 1.728, 1.703, 1.672, 1.645, 1.631, 1.614, 1.602, 1.599, 1.596, 1.597, 1.601, 1.608, 1.618, 1.631, 1.653, 1.671,
+ 1.713, 1.691, 1.658, 1.635, 1.618, 1.606, 1.595, 1.591, 1.588, 1.588, 1.591, 1.601, 1.608, 1.624, 1.641, 1.658,
+ 1.707, 1.681, 1.651, 1.627, 1.613, 1.599, 1.591, 1.585, 1.583, 1.584, 1.587, 1.591, 1.601, 1.615, 1.633, 1.655,
+ 1.699, 1.672, 1.644, 1.622, 1.606, 1.593, 1.586, 1.581, 1.579, 1.581, 1.583, 1.587, 1.597, 1.611, 1.631, 1.652,
+ 1.697, 1.665, 1.637, 1.617, 1.601, 1.589, 1.584, 1.579, 1.577, 1.578, 1.581, 1.585, 1.597, 1.607, 1.627, 1.652,
+ 1.697, 1.662, 1.634, 1.613, 1.599, 1.591, 1.583, 1.578, 1.576, 1.576, 1.579, 1.586, 1.597, 1.607, 1.628, 1.653,
+ 1.697, 1.662, 1.633, 1.613, 1.598, 1.589, 1.582, 1.578, 1.576, 1.577, 1.582, 1.589, 1.598, 1.611, 1.635, 1.655,
+ 1.701, 1.666, 1.636, 1.616, 1.602, 1.589, 1.583, 1.578, 1.577, 1.581, 1.583, 1.591, 1.601, 1.617, 1.639, 1.659,
+ 1.708, 1.671, 1.641, 1.618, 1.603, 1.591, 1.584, 1.581, 1.578, 1.581, 1.585, 1.594, 1.604, 1.622, 1.646, 1.666,
+ 1.714, 1.681, 1.648, 1.622, 1.608, 1.599, 1.591, 1.584, 1.583, 1.584, 1.589, 1.599, 1.614, 1.629, 1.653, 1.673,
+ 1.719, 1.691, 1.659, 1.631, 1.618, 1.606, 1.596, 1.591, 1.591, 1.593, 1.599, 1.608, 1.623, 1.642, 1.665, 1.681
+ ]
+ }
+ ],
+ "calibrations_Cb":
+ [
+ {
+ "ct": 4000, "table":
+ [
+ 2.253, 2.267, 2.289, 2.317, 2.342, 2.359, 2.373, 2.381, 2.381, 2.378, 2.368, 2.361, 2.344, 2.337, 2.314, 2.301,
+ 2.262, 2.284, 2.314, 2.335, 2.352, 2.371, 2.383, 2.391, 2.393, 2.391, 2.381, 2.368, 2.361, 2.342, 2.322, 2.308,
+ 2.277, 2.303, 2.321, 2.346, 2.364, 2.381, 2.391, 2.395, 2.397, 2.397, 2.395, 2.381, 2.367, 2.354, 2.332, 2.321,
+ 2.277, 2.304, 2.327, 2.349, 2.369, 2.388, 2.393, 2.396, 2.396, 2.398, 2.396, 2.391, 2.376, 2.359, 2.339, 2.328,
+ 2.279, 2.311, 2.327, 2.354, 2.377, 2.389, 2.393, 2.397, 2.397, 2.398, 2.395, 2.393, 2.382, 2.363, 2.344, 2.332,
+ 2.282, 2.311, 2.329, 2.354, 2.377, 2.386, 2.396, 2.396, 2.395, 2.396, 2.397, 2.394, 2.383, 2.367, 2.346, 2.333,
+ 2.283, 2.314, 2.333, 2.353, 2.375, 2.389, 2.394, 2.395, 2.395, 2.395, 2.396, 2.394, 2.386, 2.368, 2.354, 2.336,
+ 2.287, 2.309, 2.331, 2.352, 2.373, 2.386, 2.394, 2.395, 2.395, 2.396, 2.396, 2.394, 2.384, 2.371, 2.354, 2.339,
+ 2.289, 2.307, 2.326, 2.347, 2.369, 2.385, 2.392, 2.397, 2.398, 2.398, 2.397, 2.392, 2.383, 2.367, 2.352, 2.337,
+ 2.286, 2.303, 2.322, 2.342, 2.361, 2.379, 2.389, 2.394, 2.397, 2.398, 2.396, 2.389, 2.381, 2.366, 2.346, 2.332,
+ 2.284, 2.291, 2.312, 2.329, 2.351, 2.372, 2.381, 2.389, 2.393, 2.394, 2.389, 2.385, 2.374, 2.362, 2.338, 2.325,
+ 2.283, 2.288, 2.305, 2.319, 2.339, 2.365, 2.374, 2.381, 2.384, 2.386, 2.385, 2.379, 2.368, 2.342, 2.325, 2.318
+ ]
+ },
+ {
+ "ct": 5000, "table":
+ [
+ 1.897, 1.919, 1.941, 1.969, 1.989, 2.003, 2.014, 2.019, 2.019, 2.017, 2.014, 2.008, 1.999, 1.988, 1.968, 1.944,
+ 1.914, 1.932, 1.957, 1.982, 1.998, 2.014, 2.023, 2.029, 2.031, 2.029, 2.022, 2.014, 2.006, 1.995, 1.976, 1.955,
+ 1.925, 1.951, 1.974, 1.996, 2.013, 2.027, 2.035, 2.039, 2.039, 2.038, 2.035, 2.026, 2.015, 2.002, 1.984, 1.963,
+ 1.932, 1.958, 1.986, 2.007, 2.024, 2.034, 2.041, 2.041, 2.045, 2.045, 2.042, 2.033, 2.023, 2.009, 1.995, 1.971,
+ 1.942, 1.964, 1.994, 2.012, 2.029, 2.038, 2.043, 2.046, 2.047, 2.046, 2.045, 2.039, 2.029, 2.014, 1.997, 1.977,
+ 1.946, 1.974, 1.999, 2.015, 2.031, 2.041, 2.046, 2.047, 2.048, 2.047, 2.044, 2.041, 2.031, 2.019, 1.999, 1.978,
+ 1.948, 1.975, 2.002, 2.018, 2.031, 2.041, 2.046, 2.047, 2.048, 2.048, 2.045, 2.041, 2.029, 2.019, 1.998, 1.978,
+ 1.948, 1.973, 2.002, 2.018, 2.029, 2.042, 2.045, 2.048, 2.048, 2.048, 2.044, 2.037, 2.027, 2.014, 1.993, 1.978,
+ 1.945, 1.969, 1.998, 2.015, 2.028, 2.037, 2.045, 2.046, 2.047, 2.044, 2.039, 2.033, 2.022, 2.008, 1.989, 1.971,
+ 1.939, 1.964, 1.991, 2.011, 2.024, 2.032, 2.036, 2.042, 2.042, 2.039, 2.035, 2.024, 2.012, 1.998, 1.977, 1.964,
+ 1.932, 1.953, 1.981, 2.006, 2.016, 2.024, 2.028, 2.031, 2.034, 2.031, 2.024, 2.015, 2.005, 1.989, 1.966, 1.955,
+ 1.928, 1.944, 1.973, 1.999, 2.007, 2.016, 2.019, 2.025, 2.026, 2.025, 2.017, 2.008, 1.997, 1.975, 1.958, 1.947
+ ]
+ }
+ ],
+ "luminance_lut":
+ [
+ 1.877, 1.597, 1.397, 1.269, 1.191, 1.131, 1.093, 1.078, 1.071, 1.069, 1.086, 1.135, 1.221, 1.331, 1.474, 1.704,
+ 1.749, 1.506, 1.334, 1.229, 1.149, 1.088, 1.058, 1.053, 1.051, 1.046, 1.053, 1.091, 1.163, 1.259, 1.387, 1.587,
+ 1.661, 1.451, 1.295, 1.195, 1.113, 1.061, 1.049, 1.048, 1.047, 1.049, 1.049, 1.066, 1.124, 1.211, 1.333, 1.511,
+ 1.615, 1.411, 1.267, 1.165, 1.086, 1.052, 1.047, 1.047, 1.047, 1.049, 1.052, 1.056, 1.099, 1.181, 1.303, 1.471,
+ 1.576, 1.385, 1.252, 1.144, 1.068, 1.049, 1.044, 1.044, 1.045, 1.049, 1.053, 1.054, 1.083, 1.163, 1.283, 1.447,
+ 1.561, 1.373, 1.245, 1.135, 1.064, 1.049, 1.044, 1.044, 1.044, 1.046, 1.048, 1.054, 1.073, 1.153, 1.271, 1.432,
+ 1.571, 1.377, 1.242, 1.137, 1.066, 1.055, 1.052, 1.051, 1.051, 1.049, 1.047, 1.048, 1.068, 1.148, 1.271, 1.427,
+ 1.582, 1.396, 1.259, 1.156, 1.085, 1.068, 1.059, 1.054, 1.049, 1.045, 1.041, 1.043, 1.074, 1.157, 1.284, 1.444,
+ 1.623, 1.428, 1.283, 1.178, 1.105, 1.074, 1.069, 1.063, 1.056, 1.048, 1.046, 1.051, 1.094, 1.182, 1.311, 1.473,
+ 1.691, 1.471, 1.321, 1.213, 1.135, 1.088, 1.073, 1.069, 1.063, 1.059, 1.053, 1.071, 1.129, 1.222, 1.351, 1.521,
+ 1.808, 1.543, 1.371, 1.253, 1.174, 1.118, 1.085, 1.072, 1.067, 1.064, 1.071, 1.106, 1.176, 1.274, 1.398, 1.582,
+ 1.969, 1.666, 1.447, 1.316, 1.223, 1.166, 1.123, 1.094, 1.089, 1.097, 1.118, 1.163, 1.239, 1.336, 1.471, 1.681
+ ],
+ "sigma": 0.00218,
+ "sigma_Cb": 0.00194
+ },
+ "rpi.contrast":
+ {
+ "ce_enable": 1,
+ "gamma_curve":
+ [
+ 0, 0, 1024, 5040, 2048, 9338, 3072, 12356, 4096, 15312, 5120, 18051, 6144, 20790, 7168, 23193,
+ 8192, 25744, 9216, 27942, 10240, 30035, 11264, 32005, 12288, 33975, 13312, 35815, 14336, 37600, 15360, 39168,
+ 16384, 40642, 18432, 43379, 20480, 45749, 22528, 47753, 24576, 49621, 26624, 51253, 28672, 52698, 30720, 53796,
+ 32768, 54876, 36864, 57012, 40960, 58656, 45056, 59954, 49152, 61183, 53248, 62355, 57344, 63419, 61440, 64476,
+ 65535, 65535
+ ]
+ },
+ "rpi.ccm":
+ {
+ "ccms":
+ [
+ {
+ "ct": 2900, "ccm":
+ [
+ 1.44924, -0.12935, -0.31989, -0.65839, 1.95441, -0.29602, 0.18344, -1.22282, 2.03938
+ ]
+ },
+ {
+ "ct": 3000, "ccm":
+ [
+ 1.38736, 0.07714, -0.46451, -0.59691, 1.84335, -0.24644, 0.10092, -1.30441, 2.20349
+ ]
+ },
+ {
+ "ct": 3600, "ccm":
+ [
+ 1.51261, -0.27921, -0.23339, -0.55129, 1.83241, -0.28111, 0.11649, -0.93195, 1.81546
+ ]
+ },
+ {
+ "ct": 4600, "ccm":
+ [
+ 1.47082, -0.18523, -0.28559, -0.48923, 1.95126, -0.46203, 0.07951, -0.83987, 1.76036
+ ]
+ },
+ {
+ "ct": 5800, "ccm":
+ [
+ 1.57294, -0.36229, -0.21065, -0.42272, 1.80305, -0.38032, 0.03671, -0.66862, 1.63191
+ ]
+ },
+ {
+ "ct": 8100, "ccm":
+ [
+ 1.58803, -0.09912, -0.48891, -0.42594, 2.22303, -0.79709, -0.00621, -0.90516, 1.91137
+ ]
+ }
+ ]
+ },
+ "rpi.sharpen":
+ {
+ "threshold": 2.0,
+ "strength": 0.5,
+ "limit": 0.5
+ }
+}
diff --git a/src/ipa/raspberrypi/md_parser.cpp b/src/ipa/raspberrypi/md_parser.cpp
deleted file mode 100644
index d82c102c..00000000
--- a/src/ipa/raspberrypi/md_parser.cpp
+++ /dev/null
@@ -1,101 +0,0 @@
-/* SPDX-License-Identifier: BSD-2-Clause */
-/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
- *
- * md_parser.cpp - image sensor metadata parsers
- */
-
-#include <assert.h>
-#include <map>
-#include <string.h>
-
-#include "md_parser.hpp"
-
-using namespace RPiController;
-
-// This function goes through the embedded data to find the offsets (not
-// values!), in the data block, where the values of the given registers can
-// subsequently be found.
-
-// Embedded data tag bytes, from Sony IMX219 datasheet but general to all SMIA
-// sensors, I think.
-
-#define LINE_START 0x0a
-#define LINE_END_TAG 0x07
-#define REG_HI_BITS 0xaa
-#define REG_LOW_BITS 0xa5
-#define REG_VALUE 0x5a
-#define REG_SKIP 0x55
-
-MdParserSmia::ParseStatus MdParserSmia::findRegs(unsigned char *data,
- uint32_t regs[], int offsets[],
- unsigned int num_regs)
-{
- assert(num_regs > 0);
- if (data[0] != LINE_START)
- return NO_LINE_START;
-
- unsigned int current_offset = 1; // after the LINE_START
- unsigned int current_line_start = 0, current_line = 0;
- unsigned int reg_num = 0, first_reg = 0;
- ParseStatus retcode = PARSE_OK;
- while (1) {
- int tag = data[current_offset++];
- if ((bits_per_pixel_ == 10 &&
- (current_offset + 1 - current_line_start) % 5 == 0) ||
- (bits_per_pixel_ == 12 &&
- (current_offset + 1 - current_line_start) % 3 == 0)) {
- if (data[current_offset++] != REG_SKIP)
- return BAD_DUMMY;
- }
- int data_byte = data[current_offset++];
- //printf("Offset %u, tag 0x%02x data_byte 0x%02x\n", current_offset-1, tag, data_byte);
- if (tag == LINE_END_TAG) {
- if (data_byte != LINE_END_TAG)
- return BAD_LINE_END;
- if (num_lines_ && ++current_line == num_lines_)
- return MISSING_REGS;
- if (line_length_bytes_) {
- current_offset =
- current_line_start + line_length_bytes_;
- // Require whole line to be in the buffer (if buffer size set).
- if (buffer_size_bytes_ &&
- current_offset + line_length_bytes_ >
- buffer_size_bytes_)
- return MISSING_REGS;
- if (data[current_offset] != LINE_START)
- return NO_LINE_START;
- } else {
- // allow a zero line length to mean "hunt for the next line"
- while (data[current_offset] != LINE_START &&
- current_offset < buffer_size_bytes_)
- current_offset++;
- if (current_offset == buffer_size_bytes_)
- return NO_LINE_START;
- }
- // inc current_offset to after LINE_START
- current_line_start =
- current_offset++;
- } else {
- if (tag == REG_HI_BITS)
- reg_num = (reg_num & 0xff) | (data_byte << 8);
- else if (tag == REG_LOW_BITS)
- reg_num = (reg_num & 0xff00) | data_byte;
- else if (tag == REG_SKIP)
- reg_num++;
- else if (tag == REG_VALUE) {
- while (reg_num >=
- // assumes registers are in order...
- regs[first_reg]) {
- if (reg_num == regs[first_reg])
- offsets[first_reg] =
- current_offset - 1;
- if (++first_reg == num_regs)
- return retcode;
- }
- reg_num++;
- } else
- return ILLEGAL_TAG;
- }
- }
-}
diff --git a/src/ipa/raspberrypi/md_parser.hpp b/src/ipa/raspberrypi/md_parser.hpp
index c9db62c0..e3e27385 100644
--- a/src/ipa/raspberrypi/md_parser.hpp
+++ b/src/ipa/raspberrypi/md_parser.hpp
@@ -6,107 +6,137 @@
*/
#pragma once
+#include <initializer_list>
+#include <map>
+#include <optional>
#include <stdint.h>
-/* Camera metadata parser class. Usage as shown below.
+#include <libcamera/base/span.h>
-Setup:
-
-Usually the metadata parser will be made as part of the CamHelper class so
-application code doesn't have to worry which to kind to instantiate. But for
-the sake of example let's suppose we're parsing imx219 metadata.
-
-MdParser *parser = new MdParserImx219(); // for example
-parser->SetBitsPerPixel(bpp);
-parser->SetLineLengthBytes(pitch);
-parser->SetNumLines(2);
-
-Note 1: if you don't know how many lines there are, you can use SetBufferSize
-instead to limit the total buffer size.
-
-Note 2: if you don't know the line length, you can leave the line length unset
-(or set to zero) and the parser will hunt for the line start instead. In this
-case SetBufferSize *must* be used so that the parser won't run off the end of
-the buffer.
-
-Then on every frame:
-
-if (parser->Parse(data) != MdParser::OK)
- much badness;
-unsigned int exposure_lines, gain_code
-if (parser->GetExposureLines(exposure_lines) != MdParser::OK)
- exposure was not found;
-if (parser->GetGainCode(parser, gain_code) != MdParser::OK)
- gain code was not found;
-
-(Note that the CamHelper class converts to/from exposure lines and time,
-and gain_code / actual gain.)
-
-If you suspect your embedded data may have changed its layout, change any line
-lengths, number of lines, bits per pixel etc. that are different, and
-then:
-
-parser->Reset();
-
-before calling Parse again. */
+/*
+ * Camera metadata parser class. Usage as shown below.
+ *
+ * Setup:
+ *
+ * Usually the metadata parser will be made as part of the CamHelper class so
+ * application code doesn't have to worry which kind to instantiate. But for
+ * the sake of example let's suppose we're parsing imx219 metadata.
+ *
+ * MdParser *parser = new MdParserSmia({ expHiReg, expLoReg, gainReg });
+ * parser->SetBitsPerPixel(bpp);
+ * parser->SetLineLengthBytes(pitch);
+ * parser->SetNumLines(2);
+ *
+ * Note 1: if you don't know how many lines there are, the size of the input
+ * buffer is used as a limit instead.
+ *
+ * Note 2: if you don't know the line length, you can leave the line length unset
+ * (or set to zero) and the parser will hunt for the line start instead.
+ *
+ * Then on every frame:
+ *
+ * RegisterMap registers;
+ * if (parser->Parse(buffer, registers) != MdParser::OK)
+ * much badness;
+ * Metadata metadata;
+ * CamHelper::PopulateMetadata(registers, metadata);
+ *
+ * (Note that the CamHelper class converts to/from exposure lines and time,
+ * and gain_code / actual gain.)
+ *
+ * If you suspect your embedded data may have changed its layout, change any line
+ * lengths, number of lines, bits per pixel etc. that are different, and
+ * then:
+ *
+ * parser->Reset();
+ *
+ * before calling Parse again.
+ */
namespace RPiController {
-// Abstract base class from which other metadata parsers are derived.
+/* Abstract base class from which other metadata parsers are derived. */
class MdParser
{
public:
- // Parser status codes:
- // OK - success
- // NOTFOUND - value such as exposure or gain was not found
- // ERROR - all other errors
+ using RegisterMap = std::map<uint32_t, uint32_t>;
+
+ /*
+ * Parser status codes:
+ * OK - success
+ * NOTFOUND - value such as exposure or gain was not found
+ * ERROR - all other errors
+ */
enum Status {
OK = 0,
NOTFOUND = 1,
ERROR = 2
};
- MdParser() : reset_(true) {}
- virtual ~MdParser() {}
- void Reset() { reset_ = true; }
- void SetBitsPerPixel(int bpp) { bits_per_pixel_ = bpp; }
- void SetNumLines(unsigned int num_lines) { num_lines_ = num_lines; }
- void SetLineLengthBytes(unsigned int num_bytes)
+
+ MdParser()
+ : reset_(true), bits_per_pixel_(0), num_lines_(0), line_length_bytes_(0)
{
- line_length_bytes_ = num_bytes;
}
- void SetBufferSize(unsigned int num_bytes)
+
+ virtual ~MdParser() = default;
+
+ void Reset()
+ {
+ reset_ = true;
+ }
+
+ void SetBitsPerPixel(int bpp)
+ {
+ bits_per_pixel_ = bpp;
+ }
+
+ void SetNumLines(unsigned int num_lines)
{
- buffer_size_bytes_ = num_bytes;
+ num_lines_ = num_lines;
}
- virtual Status Parse(void *data) = 0;
- virtual Status GetExposureLines(unsigned int &lines) = 0;
- virtual Status GetGainCode(unsigned int &gain_code) = 0;
+
+ void SetLineLengthBytes(unsigned int num_bytes)
+ {
+ line_length_bytes_ = num_bytes;
+ }
+
+ virtual Status Parse(libcamera::Span<const uint8_t> buffer,
+ RegisterMap &registers) = 0;
protected:
bool reset_;
int bits_per_pixel_;
unsigned int num_lines_;
unsigned int line_length_bytes_;
- unsigned int buffer_size_bytes_;
};
-// This isn't a full implementation of a metadata parser for SMIA sensors,
-// however, it does provide the findRegs method which will prove useful and make
-// it easier to implement parsers for other SMIA-like sensors (see
-// md_parser_imx219.cpp for an example).
+/*
+ * This isn't a full implementation of a metadata parser for SMIA sensors,
+ * however, it does provide the findRegs method which will prove useful and make
+ * it easier to implement parsers for other SMIA-like sensors (see
+ * md_parser_imx219.cpp for an example).
+ */
-class MdParserSmia : public MdParser
+class MdParserSmia final : public MdParser
{
public:
- MdParserSmia() : MdParser() {}
-
-protected:
- // Note that error codes > 0 are regarded as non-fatal; codes < 0
- // indicate a bad data buffer. Status codes are:
- // PARSE_OK - found all registers, much happiness
- // MISSING_REGS - some registers found; should this be a hard error?
- // The remaining codes are all hard errors.
+ MdParserSmia(std::initializer_list<uint32_t> registerList);
+
+ MdParser::Status Parse(libcamera::Span<const uint8_t> buffer,
+ RegisterMap &registers) override;
+
+private:
+ /* Maps register address to offset in the buffer. */
+ using OffsetMap = std::map<uint32_t, std::optional<uint32_t>>;
+
+ /*
+ * Note that error codes > 0 are regarded as non-fatal; codes < 0
+ * indicate a bad data buffer. Status codes are:
+ * PARSE_OK - found all registers, much happiness
+ * MISSING_REGS - some registers found; should this be a hard error?
+ * The remaining codes are all hard errors.
+ */
enum ParseStatus {
PARSE_OK = 0,
MISSING_REGS = 1,
@@ -116,8 +146,10 @@ protected:
BAD_LINE_END = -4,
BAD_PADDING = -5
};
- ParseStatus findRegs(unsigned char *data, uint32_t regs[],
- int offsets[], unsigned int num_regs);
+
+ ParseStatus findRegs(libcamera::Span<const uint8_t> buffer);
+
+ OffsetMap offsets_;
};
} // namespace RPi
diff --git a/src/ipa/raspberrypi/md_parser_rpi.cpp b/src/ipa/raspberrypi/md_parser_rpi.cpp
deleted file mode 100644
index 2b0bcfc5..00000000
--- a/src/ipa/raspberrypi/md_parser_rpi.cpp
+++ /dev/null
@@ -1,37 +0,0 @@
-/* SPDX-License-Identifier: BSD-2-Clause */
-/*
- * Copyright (C) 2020, Raspberry Pi (Trading) Limited
- *
- * md_parser_rpi.cpp - Metadata parser for generic Raspberry Pi metadata
- */
-
-#include <string.h>
-
-#include "md_parser_rpi.hpp"
-
-using namespace RPiController;
-
-MdParserRPi::MdParserRPi()
-{
-}
-
-MdParser::Status MdParserRPi::Parse(void *data)
-{
- if (buffer_size_bytes_ < sizeof(rpiMetadata))
- return ERROR;
-
- memcpy(&metadata, data, sizeof(rpiMetadata));
- return OK;
-}
-
-MdParser::Status MdParserRPi::GetExposureLines(unsigned int &lines)
-{
- lines = metadata.exposure;
- return OK;
-}
-
-MdParser::Status MdParserRPi::GetGainCode(unsigned int &gain_code)
-{
- gain_code = metadata.gain;
- return OK;
-}
diff --git a/src/ipa/raspberrypi/md_parser_rpi.hpp b/src/ipa/raspberrypi/md_parser_rpi.hpp
deleted file mode 100644
index 52f54f00..00000000
--- a/src/ipa/raspberrypi/md_parser_rpi.hpp
+++ /dev/null
@@ -1,32 +0,0 @@
-/* SPDX-License-Identifier: BSD-2-Clause */
-/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
- *
- * md_parser_rpi.hpp - Raspberry Pi metadata parser interface
- */
-#pragma once
-
-#include "md_parser.hpp"
-
-namespace RPiController {
-
-class MdParserRPi : public MdParser
-{
-public:
- MdParserRPi();
- Status Parse(void *data) override;
- Status GetExposureLines(unsigned int &lines) override;
- Status GetGainCode(unsigned int &gain_code) override;
-
-private:
- // This must be the same struct that is filled into the metadata buffer
- // in the pipeline handler.
- struct rpiMetadata
- {
- uint32_t exposure;
- uint32_t gain;
- };
- rpiMetadata metadata;
-};
-
-}
diff --git a/src/ipa/raspberrypi/md_parser_smia.cpp b/src/ipa/raspberrypi/md_parser_smia.cpp
new file mode 100644
index 00000000..ea5eac41
--- /dev/null
+++ b/src/ipa/raspberrypi/md_parser_smia.cpp
@@ -0,0 +1,149 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
+ *
+ * md_parser_smia.cpp - SMIA specification based embedded data parser
+ */
+
+#include <libcamera/base/log.h>
+#include "md_parser.hpp"
+
+using namespace RPiController;
+using namespace libcamera;
+
+/*
+ * This function goes through the embedded data to find the offsets (not
+ * values!), in the data block, where the values of the given registers can
+ * subsequently be found.
+ *
+ * Embedded data tag bytes, from Sony IMX219 datasheet but general to all SMIA
+ * sensors, I think.
+ */
+
+constexpr unsigned int LINE_START = 0x0a;
+constexpr unsigned int LINE_END_TAG = 0x07;
+constexpr unsigned int REG_HI_BITS = 0xaa;
+constexpr unsigned int REG_LOW_BITS = 0xa5;
+constexpr unsigned int REG_VALUE = 0x5a;
+constexpr unsigned int REG_SKIP = 0x55;
+
+MdParserSmia::MdParserSmia(std::initializer_list<uint32_t> registerList)
+{
+ for (auto r : registerList)
+ offsets_[r] = {};
+}
+
+MdParser::Status MdParserSmia::Parse(libcamera::Span<const uint8_t> buffer,
+ RegisterMap &registers)
+{
+ if (reset_) {
+ /*
+ * Search again through the metadata for all the registers
+ * requested.
+ */
+ ASSERT(bits_per_pixel_);
+
+ for (const auto &kv : offsets_)
+ offsets_[kv.first] = {};
+
+ ParseStatus ret = findRegs(buffer);
+ /*
+ * > 0 means "worked partially but parse again next time",
+ * < 0 means "hard error".
+ *
+ * In either case, we retry parsing on the next frame.
+ */
+ if (ret != PARSE_OK)
+ return ERROR;
+
+ reset_ = false;
+ }
+
+ /* Populate the register values requested. */
+ registers.clear();
+ for (const auto &[reg, offset] : offsets_) {
+ if (!offset) {
+ reset_ = true;
+ return NOTFOUND;
+ }
+ registers[reg] = buffer[offset.value()];
+ }
+
+ return OK;
+}
+
+MdParserSmia::ParseStatus MdParserSmia::findRegs(libcamera::Span<const uint8_t> buffer)
+{
+ ASSERT(offsets_.size());
+
+ if (buffer[0] != LINE_START)
+ return NO_LINE_START;
+
+ unsigned int current_offset = 1; /* after the LINE_START */
+ unsigned int current_line_start = 0, current_line = 0;
+ unsigned int reg_num = 0, regs_done = 0;
+
+ while (1) {
+ int tag = buffer[current_offset++];
+
+ if ((bits_per_pixel_ == 10 &&
+ (current_offset + 1 - current_line_start) % 5 == 0) ||
+ (bits_per_pixel_ == 12 &&
+ (current_offset + 1 - current_line_start) % 3 == 0)) {
+ if (buffer[current_offset++] != REG_SKIP)
+ return BAD_DUMMY;
+ }
+
+ int data_byte = buffer[current_offset++];
+
+ if (tag == LINE_END_TAG) {
+ if (data_byte != LINE_END_TAG)
+ return BAD_LINE_END;
+
+ if (num_lines_ && ++current_line == num_lines_)
+ return MISSING_REGS;
+
+ if (line_length_bytes_) {
+ current_offset = current_line_start + line_length_bytes_;
+
+ /* Require whole line to be in the buffer (if buffer size set). */
+ if (buffer.size() &&
+ current_offset + line_length_bytes_ > buffer.size())
+ return MISSING_REGS;
+
+ if (buffer[current_offset] != LINE_START)
+ return NO_LINE_START;
+ } else {
+ /* allow a zero line length to mean "hunt for the next line" */
+ while (current_offset < buffer.size() &&
+ buffer[current_offset] != LINE_START)
+ current_offset++;
+
+ if (current_offset == buffer.size())
+ return NO_LINE_START;
+ }
+
+ /* inc current_offset to after LINE_START */
+ current_line_start = current_offset++;
+ } else {
+ if (tag == REG_HI_BITS)
+ reg_num = (reg_num & 0xff) | (data_byte << 8);
+ else if (tag == REG_LOW_BITS)
+ reg_num = (reg_num & 0xff00) | data_byte;
+ else if (tag == REG_SKIP)
+ reg_num++;
+ else if (tag == REG_VALUE) {
+ auto reg = offsets_.find(reg_num);
+
+ if (reg != offsets_.end()) {
+ offsets_[reg_num] = current_offset - 1;
+
+ if (++regs_done == offsets_.size())
+ return PARSE_OK;
+ }
+ reg_num++;
+ } else
+ return ILLEGAL_TAG;
+ }
+ }
+}
diff --git a/src/ipa/raspberrypi/meson.build b/src/ipa/raspberrypi/meson.build
index 9445cd09..1af31e4a 100644
--- a/src/ipa/raspberrypi/meson.build
+++ b/src/ipa/raspberrypi/meson.build
@@ -3,7 +3,7 @@
ipa_name = 'ipa_rpi'
rpi_ipa_deps = [
- libcamera_dep,
+ libcamera_private,
dependency('boost'),
libatomic,
]
@@ -16,12 +16,13 @@ rpi_ipa_includes = [
rpi_ipa_sources = files([
'raspberrypi.cpp',
- 'md_parser.cpp',
- 'md_parser_rpi.cpp',
+ 'md_parser_smia.cpp',
'cam_helper.cpp',
'cam_helper_ov5647.cpp',
'cam_helper_imx219.cpp',
+ 'cam_helper_imx290.cpp',
'cam_helper_imx477.cpp',
+ 'cam_helper_ov9281.cpp',
'controller/controller.cpp',
'controller/histogram.cpp',
'controller/algorithm.cpp',
@@ -39,10 +40,11 @@ rpi_ipa_sources = files([
'controller/rpi/contrast.cpp',
'controller/rpi/sdn.cpp',
'controller/pwl.cpp',
+ 'controller/device_status.cpp',
])
mod = shared_module(ipa_name,
- rpi_ipa_sources,
+ [rpi_ipa_sources, libcamera_generated_ipa_headers],
name_prefix : '',
include_directories : rpi_ipa_includes,
dependencies : rpi_ipa_deps,
@@ -54,7 +56,7 @@ if ipa_sign_module
custom_target(ipa_name + '.so.sign',
input : mod,
output : ipa_name + '.so.sign',
- command : [ ipa_sign, ipa_priv_key, '@INPUT@', '@OUTPUT@' ],
+ command : [ipa_sign, ipa_priv_key, '@INPUT@', '@OUTPUT@'],
install : false,
build_by_default : true)
endif
diff --git a/src/ipa/raspberrypi/raspberrypi.cpp b/src/ipa/raspberrypi/raspberrypi.cpp
index b0c7d1c1..5cd33304 100644
--- a/src/ipa/raspberrypi/raspberrypi.cpp
+++ b/src/ipa/raspberrypi/raspberrypi.cpp
@@ -1,34 +1,34 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright (C) 2019-2020, Raspberry Pi (Trading) Ltd.
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Ltd.
*
* rpi.cpp - Raspberry Pi Image Processing Algorithms
*/
#include <algorithm>
+#include <array>
#include <fcntl.h>
#include <math.h>
#include <stdint.h>
#include <string.h>
#include <sys/mman.h>
-#include <libcamera/buffer.h>
+#include <linux/bcm2835-isp.h>
+
+#include <libcamera/base/log.h>
+#include <libcamera/base/span.h>
+
#include <libcamera/control_ids.h>
#include <libcamera/controls.h>
#include <libcamera/file_descriptor.h>
+#include <libcamera/framebuffer.h>
#include <libcamera/ipa/ipa_interface.h>
#include <libcamera/ipa/ipa_module_info.h>
#include <libcamera/ipa/raspberrypi.h>
+#include <libcamera/ipa/raspberrypi_ipa_interface.h>
#include <libcamera/request.h>
-#include <libcamera/span.h>
-#include <libipa/ipa_interface_wrapper.h>
-
-#include "libcamera/internal/camera_sensor.h"
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/utils.h"
-
-#include <linux/bcm2835-isp.h>
+#include "libcamera/internal/framebuffer.h"
#include "agc_algorithm.hpp"
#include "agc_status.h"
@@ -42,61 +42,79 @@
#include "contrast_algorithm.hpp"
#include "contrast_status.h"
#include "controller.hpp"
+#include "denoise_algorithm.hpp"
+#include "denoise_status.h"
#include "dpc_status.h"
#include "focus_status.h"
#include "geq_status.h"
#include "lux_status.h"
#include "metadata.hpp"
#include "noise_status.h"
-#include "sdn_status.h"
#include "sharpen_algorithm.hpp"
#include "sharpen_status.h"
namespace libcamera {
+using namespace std::literals::chrono_literals;
+using utils::Duration;
+
/* Configure the sensor with these values initially. */
-constexpr double DefaultAnalogueGain = 1.0;
-constexpr unsigned int DefaultExposureTime = 20000;
+constexpr double defaultAnalogueGain = 1.0;
+constexpr Duration defaultExposureTime = 20.0ms;
+constexpr Duration defaultMinFrameDuration = 1.0s / 30.0;
+constexpr Duration defaultMaxFrameDuration = 250.0s;
+
+/*
+ * Determine the minimum allowable inter-frame duration to run the controller
+ * algorithms. If the pipeline handler provider frames at a rate higher than this,
+ * we rate-limit the controller Prepare() and Process() calls to lower than or
+ * equal to this rate.
+ */
+constexpr Duration controllerMinFrameDuration = 1.0s / 60.0;
LOG_DEFINE_CATEGORY(IPARPI)
-class IPARPi : public IPAInterface
+class IPARPi : public ipa::RPi::IPARPiInterface
{
public:
IPARPi()
- : lastMode_({}), controller_(), controllerInit_(false),
- frameCount_(0), checkCount_(0), mistrustCount_(0),
- lsTable_(nullptr)
+ : controller_(), frameCount_(0), checkCount_(0), mistrustCount_(0),
+ lastRunTimestamp_(0), lsTable_(nullptr), firstStart_(true)
{
}
~IPARPi()
{
if (lsTable_)
- munmap(lsTable_, RPi::MaxLsGridSize);
+ munmap(lsTable_, ipa::RPi::MaxLsGridSize);
}
- int init(const IPASettings &settings) override;
- int start() override { return 0; }
+ int init(const IPASettings &settings, ipa::RPi::SensorConfig *sensorConfig) override;
+ void start(const ControlList &controls, ipa::RPi::StartConfig *startConfig) override;
void stop() override {}
- void configure(const CameraSensorInfo &sensorInfo,
- const std::map<unsigned int, IPAStream> &streamConfig,
- const std::map<unsigned int, const ControlInfoMap &> &entityControls,
- const IPAOperationData &data,
- IPAOperationData *response) override;
+ int configure(const IPACameraSensorInfo &sensorInfo,
+ const std::map<unsigned int, IPAStream> &streamConfig,
+ const std::map<unsigned int, ControlInfoMap> &entityControls,
+ const ipa::RPi::IPAConfig &data,
+ ControlList *controls) override;
void mapBuffers(const std::vector<IPABuffer> &buffers) override;
void unmapBuffers(const std::vector<unsigned int> &ids) override;
- void processEvent(const IPAOperationData &event) override;
+ void signalStatReady(const uint32_t bufferId) override;
+ void signalQueueRequest(const ControlList &controls) override;
+ void signalIspPrepare(const ipa::RPi::ISPConfig &data) override;
private:
- void setMode(const CameraSensorInfo &sensorInfo);
+ void setMode(const IPACameraSensorInfo &sensorInfo);
+ bool validateSensorControls();
+ bool validateIspControls();
void queueRequest(const ControlList &controls);
void returnEmbeddedBuffer(unsigned int bufferId);
- void prepareISP(unsigned int bufferId);
+ void prepareISP(const ipa::RPi::ISPConfig &data);
void reportMetadata();
- bool parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &deviceStatus);
+ void fillDeviceStatus(const ControlList &sensorControls);
void processStats(unsigned int bufferId);
+ void applyFrameDurations(Duration minFrameDuration, Duration maxFrameDuration);
void applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls);
void applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls);
void applyDG(const struct AgcStatus *dgStatus, ControlList &ctrls);
@@ -104,30 +122,24 @@ private:
void applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, ControlList &ctrls);
void applyGamma(const struct ContrastStatus *contrastStatus, ControlList &ctrls);
void applyGEQ(const struct GeqStatus *geqStatus, ControlList &ctrls);
- void applyDenoise(const struct SdnStatus *denoiseStatus, ControlList &ctrls);
+ void applyDenoise(const struct DenoiseStatus *denoiseStatus, ControlList &ctrls);
void applySharpen(const struct SharpenStatus *sharpenStatus, ControlList &ctrls);
void applyDPC(const struct DpcStatus *dpcStatus, ControlList &ctrls);
void applyLS(const struct AlscStatus *lsStatus, ControlList &ctrls);
void resampleTable(uint16_t dest[], double const src[12][16], int destW, int destH);
- std::map<unsigned int, FrameBuffer> buffers_;
- std::map<unsigned int, void *> buffersMemory_;
+ std::map<unsigned int, MappedFrameBuffer> buffers_;
- ControlInfoMap unicamCtrls_;
+ ControlInfoMap sensorCtrls_;
ControlInfoMap ispCtrls_;
ControlList libcameraMetadata_;
- /* IPA configuration. */
- std::string tuningFile_;
-
/* Camera sensor params. */
CameraMode mode_;
- CameraMode lastMode_;
/* Raspberry Pi controller specific defines. */
std::unique_ptr<RPiController::CamHelper> helper_;
RPiController::Controller controller_;
- bool controllerInit_;
RPiController::Metadata rpiMetadata_;
/*
@@ -142,18 +154,135 @@ private:
/* How many frames we should avoid running control algos on. */
unsigned int mistrustCount_;
+ /* Number of frames that need to be dropped on startup. */
+ unsigned int dropFrameCount_;
+
+ /* Frame timestamp for the last run of the controller. */
+ uint64_t lastRunTimestamp_;
+
+ /* Do we run a Controller::process() for this frame? */
+ bool processPending_;
+
/* LS table allocation passed in from the pipeline handler. */
FileDescriptor lsTableHandle_;
void *lsTable_;
+
+ /* Distinguish the first camera start from others. */
+ bool firstStart_;
+
+ /* Frame duration (1/fps) limits. */
+ Duration minFrameDuration_;
+ Duration maxFrameDuration_;
};
-int IPARPi::init(const IPASettings &settings)
+int IPARPi::init(const IPASettings &settings, ipa::RPi::SensorConfig *sensorConfig)
{
- tuningFile_ = settings.configurationFile;
+ /*
+ * Load the "helper" for this sensor. This tells us all the device specific stuff
+ * that the kernel driver doesn't. We only do this the first time; we don't need
+ * to re-parse the metadata after a simple mode-switch for no reason.
+ */
+ helper_ = std::unique_ptr<RPiController::CamHelper>(RPiController::CamHelper::Create(settings.sensorModel));
+ if (!helper_) {
+ LOG(IPARPI, Error) << "Could not create camera helper for "
+ << settings.sensorModel;
+ return -EINVAL;
+ }
+
+ /*
+ * Pass out the sensor config to the pipeline handler in order
+ * to setup the staggered writer class.
+ */
+ int gainDelay, exposureDelay, vblankDelay, sensorMetadata;
+ helper_->GetDelays(exposureDelay, gainDelay, vblankDelay);
+ sensorMetadata = helper_->SensorEmbeddedDataPresent();
+
+ sensorConfig->gainDelay = gainDelay;
+ sensorConfig->exposureDelay = exposureDelay;
+ sensorConfig->vblankDelay = vblankDelay;
+ sensorConfig->sensorMetadata = sensorMetadata;
+
+ /* Load the tuning file for this sensor. */
+ controller_.Read(settings.configurationFile.c_str());
+ controller_.Initialise();
+
return 0;
}
-void IPARPi::setMode(const CameraSensorInfo &sensorInfo)
+void IPARPi::start(const ControlList &controls, ipa::RPi::StartConfig *startConfig)
+{
+ RPiController::Metadata metadata;
+
+ ASSERT(startConfig);
+ if (!controls.empty()) {
+ /* We have been given some controls to action before start. */
+ queueRequest(controls);
+ }
+
+ controller_.SwitchMode(mode_, &metadata);
+
+ /* SwitchMode may supply updated exposure/gain values to use. */
+ AgcStatus agcStatus;
+ agcStatus.shutter_time = 0.0s;
+ agcStatus.analogue_gain = 0.0;
+
+ metadata.Get("agc.status", agcStatus);
+ if (agcStatus.shutter_time && agcStatus.analogue_gain) {
+ ControlList ctrls(sensorCtrls_);
+ applyAGC(&agcStatus, ctrls);
+ startConfig->controls = std::move(ctrls);
+ }
+
+ /*
+ * Initialise frame counts, and decide how many frames must be hidden or
+ * "mistrusted", which depends on whether this is a startup from cold,
+ * or merely a mode switch in a running system.
+ */
+ frameCount_ = 0;
+ checkCount_ = 0;
+ if (firstStart_) {
+ dropFrameCount_ = helper_->HideFramesStartup();
+ mistrustCount_ = helper_->MistrustFramesStartup();
+
+ /*
+ * Query the AGC/AWB for how many frames they may take to
+ * converge sufficiently. Where these numbers are non-zero
+ * we must allow for the frames with bad statistics
+ * (mistrustCount_) that they won't see. But if zero (i.e.
+ * no convergence necessary), no frames need to be dropped.
+ */
+ unsigned int agcConvergenceFrames = 0;
+ RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
+ controller_.GetAlgorithm("agc"));
+ if (agc) {
+ agcConvergenceFrames = agc->GetConvergenceFrames();
+ if (agcConvergenceFrames)
+ agcConvergenceFrames += mistrustCount_;
+ }
+
+ unsigned int awbConvergenceFrames = 0;
+ RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
+ controller_.GetAlgorithm("awb"));
+ if (awb) {
+ awbConvergenceFrames = awb->GetConvergenceFrames();
+ if (awbConvergenceFrames)
+ awbConvergenceFrames += mistrustCount_;
+ }
+
+ dropFrameCount_ = std::max({ dropFrameCount_, agcConvergenceFrames, awbConvergenceFrames });
+ LOG(IPARPI, Debug) << "Drop " << dropFrameCount_ << " frames on startup";
+ } else {
+ dropFrameCount_ = helper_->HideFramesModeSwitch();
+ mistrustCount_ = helper_->MistrustFramesModeSwitch();
+ }
+
+ startConfig->dropFrameCount = dropFrameCount_;
+
+ firstStart_ = false;
+ lastRunTimestamp_ = 0;
+}
+
+void IPARPi::setMode(const IPACameraSensorInfo &sensorInfo)
{
mode_.bitdepth = sensorInfo.bitsPerPixel;
mode_.width = sensorInfo.outputSize.width;
@@ -178,81 +307,70 @@ void IPARPi::setMode(const CameraSensorInfo &sensorInfo)
*
* \todo Get the pipeline handle to provide the full data
*/
- mode_.bin_y = std::min(2, static_cast<int>(mode_.scale_x));
+ mode_.bin_x = std::min(2, static_cast<int>(mode_.scale_x));
mode_.bin_y = std::min(2, static_cast<int>(mode_.scale_y));
/* The noise factor is the square root of the total binning factor. */
mode_.noise_factor = sqrt(mode_.bin_x * mode_.bin_y);
/*
- * Calculate the line length in nanoseconds as the ratio between
- * the line length in pixels and the pixel rate.
+ * Calculate the line length as the ratio between the line length in
+ * pixels and the pixel rate.
+ */
+ mode_.line_length = sensorInfo.lineLength * (1.0s / sensorInfo.pixelRate);
+
+ /*
+ * Set the frame length limits for the mode to ensure exposure and
+ * framerate calculations are clipped appropriately.
*/
- mode_.line_length = 1e9 * sensorInfo.lineLength / sensorInfo.pixelRate;
+ mode_.min_frame_length = sensorInfo.minFrameLength;
+ mode_.max_frame_length = sensorInfo.maxFrameLength;
}
-void IPARPi::configure(const CameraSensorInfo &sensorInfo,
- [[maybe_unused]] const std::map<unsigned int, IPAStream> &streamConfig,
- const std::map<unsigned int, const ControlInfoMap &> &entityControls,
- const IPAOperationData &ipaConfig,
- IPAOperationData *result)
+int IPARPi::configure(const IPACameraSensorInfo &sensorInfo,
+ [[maybe_unused]] const std::map<unsigned int, IPAStream> &streamConfig,
+ const std::map<unsigned int, ControlInfoMap> &entityControls,
+ const ipa::RPi::IPAConfig &ipaConfig,
+ ControlList *controls)
{
- if (entityControls.empty())
- return;
-
- result->operation = 0;
+ if (entityControls.size() != 2) {
+ LOG(IPARPI, Error) << "No ISP or sensor controls found.";
+ return -1;
+ }
- unicamCtrls_ = entityControls.at(0);
+ sensorCtrls_ = entityControls.at(0);
ispCtrls_ = entityControls.at(1);
- /* Setup a metadata ControlList to output metadata. */
- libcameraMetadata_ = ControlList(controls::controls);
-
- /*
- * Load the "helper" for this sensor. This tells us all the device specific stuff
- * that the kernel driver doesn't. We only do this the first time; we don't need
- * to re-parse the metadata after a simple mode-switch for no reason.
- */
- std::string cameraName(sensorInfo.model);
- if (!helper_) {
- helper_ = std::unique_ptr<RPiController::CamHelper>(RPiController::CamHelper::Create(cameraName));
-
- /*
- * Pass out the sensor config to the pipeline handler in order
- * to setup the staggered writer class.
- */
- int gainDelay, exposureDelay, sensorMetadata;
- helper_->GetDelays(exposureDelay, gainDelay);
- sensorMetadata = helper_->SensorEmbeddedDataPresent();
-
- result->data.push_back(gainDelay);
- result->data.push_back(exposureDelay);
- result->data.push_back(sensorMetadata);
+ if (!validateSensorControls()) {
+ LOG(IPARPI, Error) << "Sensor control validation failed.";
+ return -1;
+ }
- result->operation |= RPi::IPA_CONFIG_STAGGERED_WRITE;
+ if (!validateIspControls()) {
+ LOG(IPARPI, Error) << "ISP control validation failed.";
+ return -1;
}
+ /* Setup a metadata ControlList to output metadata. */
+ libcameraMetadata_ = ControlList(controls::controls);
+
/* Re-assemble camera mode using the sensor info. */
setMode(sensorInfo);
- /*
- * The ipaConfig.data always gives us the user transform first. Note that
- * this will always make the LS table pointer (if present) element 1.
- */
- mode_.transform = static_cast<libcamera::Transform>(ipaConfig.data[0]);
+ mode_.transform = static_cast<libcamera::Transform>(ipaConfig.transform);
/* Store the lens shading table pointer and handle if available. */
- if (ipaConfig.operation & RPi::IPA_CONFIG_LS_TABLE) {
+ if (ipaConfig.lsTableHandle.isValid()) {
/* Remove any previous table, if there was one. */
if (lsTable_) {
- munmap(lsTable_, RPi::MaxLsGridSize);
+ munmap(lsTable_, ipa::RPi::MaxLsGridSize);
lsTable_ = nullptr;
}
- /* Map the LS table buffer into user space (now element 1). */
- lsTableHandle_ = FileDescriptor(ipaConfig.data[1]);
+ /* Map the LS table buffer into user space. */
+ lsTableHandle_ = std::move(ipaConfig.lsTableHandle);
if (lsTableHandle_.isValid()) {
- lsTable_ = mmap(nullptr, RPi::MaxLsGridSize, PROT_READ | PROT_WRITE,
+ lsTable_ = mmap(nullptr, ipa::RPi::MaxLsGridSize, PROT_READ | PROT_WRITE,
MAP_SHARED, lsTableHandle_.fd(), 0);
if (lsTable_ == MAP_FAILED) {
@@ -265,139 +383,72 @@ void IPARPi::configure(const CameraSensorInfo &sensorInfo,
/* Pass the camera mode to the CamHelper to setup algorithms. */
helper_->SetCameraMode(mode_);
- /*
- * Initialise frame counts, and decide how many frames must be hidden or
- *"mistrusted", which depends on whether this is a startup from cold,
- * or merely a mode switch in a running system.
- */
- frameCount_ = 0;
- checkCount_ = 0;
- unsigned int dropFrame = 0;
- if (controllerInit_) {
- dropFrame = helper_->HideFramesModeSwitch();
- mistrustCount_ = helper_->MistrustFramesModeSwitch();
- } else {
- dropFrame = helper_->HideFramesStartup();
- mistrustCount_ = helper_->MistrustFramesStartup();
- }
-
- result->data.push_back(dropFrame);
- result->operation |= RPi::IPA_CONFIG_DROP_FRAMES;
-
- /* These zero values mean not program anything (unless overwritten). */
- struct AgcStatus agcStatus;
- agcStatus.shutter_time = 0.0;
- agcStatus.analogue_gain = 0.0;
-
- if (!controllerInit_) {
- /* Load the tuning file for this sensor. */
- controller_.Read(tuningFile_.c_str());
- controller_.Initialise();
- controllerInit_ = true;
+ if (firstStart_) {
+ /* Supply initial values for frame durations. */
+ applyFrameDurations(defaultMinFrameDuration, defaultMaxFrameDuration);
/* Supply initial values for gain and exposure. */
- agcStatus.shutter_time = DefaultExposureTime;
- agcStatus.analogue_gain = DefaultAnalogueGain;
- }
-
- RPiController::Metadata metadata;
- controller_.SwitchMode(mode_, &metadata);
-
- /* SwitchMode may supply updated exposure/gain values to use. */
- metadata.Get("agc.status", agcStatus);
- if (agcStatus.shutter_time != 0.0 && agcStatus.analogue_gain != 0.0) {
- ControlList ctrls(unicamCtrls_);
+ ControlList ctrls(sensorCtrls_);
+ AgcStatus agcStatus;
+ agcStatus.shutter_time = defaultExposureTime;
+ agcStatus.analogue_gain = defaultAnalogueGain;
applyAGC(&agcStatus, ctrls);
- result->controls.push_back(ctrls);
- result->operation |= RPi::IPA_CONFIG_SENSOR;
+ ASSERT(controls);
+ *controls = std::move(ctrls);
}
- lastMode_ = mode_;
+ return 0;
}
void IPARPi::mapBuffers(const std::vector<IPABuffer> &buffers)
{
for (const IPABuffer &buffer : buffers) {
- auto elem = buffers_.emplace(std::piecewise_construct,
- std::forward_as_tuple(buffer.id),
- std::forward_as_tuple(buffer.planes));
- const FrameBuffer &fb = elem.first->second;
-
- buffersMemory_[buffer.id] = mmap(nullptr, fb.planes()[0].length,
- PROT_READ | PROT_WRITE, MAP_SHARED,
- fb.planes()[0].fd.fd(), 0);
-
- if (buffersMemory_[buffer.id] == MAP_FAILED) {
- int ret = -errno;
- LOG(IPARPI, Fatal) << "Failed to mmap buffer: " << strerror(-ret);
- }
+ const FrameBuffer fb(buffer.planes);
+ buffers_.emplace(buffer.id, MappedFrameBuffer(&fb, PROT_READ | PROT_WRITE));
}
}
void IPARPi::unmapBuffers(const std::vector<unsigned int> &ids)
{
for (unsigned int id : ids) {
- const auto fb = buffers_.find(id);
- if (fb == buffers_.end())
+ auto it = buffers_.find(id);
+ if (it == buffers_.end())
continue;
- munmap(buffersMemory_[id], fb->second.planes()[0].length);
- buffersMemory_.erase(id);
buffers_.erase(id);
}
}
-void IPARPi::processEvent(const IPAOperationData &event)
+void IPARPi::signalStatReady(uint32_t bufferId)
{
- switch (event.operation) {
- case RPi::IPA_EVENT_SIGNAL_STAT_READY: {
- unsigned int bufferId = event.data[0];
-
- if (++checkCount_ != frameCount_) /* assert here? */
- LOG(IPARPI, Error) << "WARNING: Prepare/Process mismatch!!!";
- if (frameCount_ > mistrustCount_)
- processStats(bufferId);
-
- reportMetadata();
-
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_STATS_METADATA_COMPLETE;
- op.data = { bufferId & RPi::BufferMask::ID };
- op.controls = { libcameraMetadata_ };
- queueFrameAction.emit(0, op);
- break;
- }
+ if (++checkCount_ != frameCount_) /* assert here? */
+ LOG(IPARPI, Error) << "WARNING: Prepare/Process mismatch!!!";
+ if (processPending_ && frameCount_ > mistrustCount_)
+ processStats(bufferId);
- case RPi::IPA_EVENT_SIGNAL_ISP_PREPARE: {
- unsigned int embeddedbufferId = event.data[0];
- unsigned int bayerbufferId = event.data[1];
+ reportMetadata();
- /*
- * At start-up, or after a mode-switch, we may want to
- * avoid running the control algos for a few frames in case
- * they are "unreliable".
- */
- prepareISP(embeddedbufferId);
- frameCount_++;
-
- /* Ready to push the input buffer into the ISP. */
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_RUN_ISP;
- op.data = { bayerbufferId & RPi::BufferMask::ID };
- queueFrameAction.emit(0, op);
- break;
- }
+ statsMetadataComplete.emit(bufferId & ipa::RPi::MaskID, libcameraMetadata_);
+}
- case RPi::IPA_EVENT_QUEUE_REQUEST: {
- queueRequest(event.controls[0]);
- break;
- }
+void IPARPi::signalQueueRequest(const ControlList &controls)
+{
+ queueRequest(controls);
+}
- default:
- LOG(IPARPI, Error) << "Unknown event " << event.operation;
- break;
- }
+void IPARPi::signalIspPrepare(const ipa::RPi::ISPConfig &data)
+{
+ /*
+ * At start-up, or after a mode-switch, we may want to
+ * avoid running the control algos for a few frames in case
+ * they are "unreliable".
+ */
+ prepareISP(data);
+ frameCount_++;
+
+ /* Ready to push the input buffer into the ISP. */
+ runIsp.emit(data.bayerBufferId & ipa::RPi::MaskID);
}
void IPARPi::reportMetadata()
@@ -411,13 +462,16 @@ void IPARPi::reportMetadata()
*/
DeviceStatus *deviceStatus = rpiMetadata_.GetLocked<DeviceStatus>("device.status");
if (deviceStatus) {
- libcameraMetadata_.set(controls::ExposureTime, deviceStatus->shutter_speed);
+ libcameraMetadata_.set(controls::ExposureTime,
+ deviceStatus->shutter_speed.get<std::micro>());
libcameraMetadata_.set(controls::AnalogueGain, deviceStatus->analogue_gain);
}
AgcStatus *agcStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
- if (agcStatus)
+ if (agcStatus) {
libcameraMetadata_.set(controls::AeLocked, agcStatus->locked);
+ libcameraMetadata_.set(controls::DigitalGain, agcStatus->digital_gain);
+ }
LuxStatus *luxStatus = rpiMetadata_.GetLocked<LuxStatus>("lux.status");
if (luxStatus)
@@ -458,6 +512,53 @@ void IPARPi::reportMetadata()
}
}
+bool IPARPi::validateSensorControls()
+{
+ static const uint32_t ctrls[] = {
+ V4L2_CID_ANALOGUE_GAIN,
+ V4L2_CID_EXPOSURE,
+ V4L2_CID_VBLANK,
+ };
+
+ for (auto c : ctrls) {
+ if (sensorCtrls_.find(c) == sensorCtrls_.end()) {
+ LOG(IPARPI, Error) << "Unable to find sensor control "
+ << utils::hex(c);
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool IPARPi::validateIspControls()
+{
+ static const uint32_t ctrls[] = {
+ V4L2_CID_RED_BALANCE,
+ V4L2_CID_BLUE_BALANCE,
+ V4L2_CID_DIGITAL_GAIN,
+ V4L2_CID_USER_BCM2835_ISP_CC_MATRIX,
+ V4L2_CID_USER_BCM2835_ISP_GAMMA,
+ V4L2_CID_USER_BCM2835_ISP_BLACK_LEVEL,
+ V4L2_CID_USER_BCM2835_ISP_GEQ,
+ V4L2_CID_USER_BCM2835_ISP_DENOISE,
+ V4L2_CID_USER_BCM2835_ISP_SHARPEN,
+ V4L2_CID_USER_BCM2835_ISP_DPC,
+ V4L2_CID_USER_BCM2835_ISP_LENS_SHADING,
+ V4L2_CID_USER_BCM2835_ISP_CDN,
+ };
+
+ for (auto c : ctrls) {
+ if (ispCtrls_.find(c) == ispCtrls_.end()) {
+ LOG(IPARPI, Error) << "Unable to find ISP control "
+ << utils::hex(c);
+ return false;
+ }
+ }
+
+ return true;
+}
+
/*
* Converting between enums (used in the libcamera API) and the names that
* we use to identify different modes. Unfortunately, the conversion tables
@@ -490,9 +591,18 @@ static const std::map<int32_t, std::string> AwbModeTable = {
{ controls::AwbFluorescent, "fluorescent" },
{ controls::AwbIndoor, "indoor" },
{ controls::AwbDaylight, "daylight" },
+ { controls::AwbCloudy, "cloudy" },
{ controls::AwbCustom, "custom" },
};
+static const std::map<int32_t, RPiController::DenoiseMode> DenoiseModeTable = {
+ { controls::draft::NoiseReductionModeOff, RPiController::DenoiseMode::Off },
+ { controls::draft::NoiseReductionModeFast, RPiController::DenoiseMode::ColourFast },
+ { controls::draft::NoiseReductionModeHighQuality, RPiController::DenoiseMode::ColourHighQuality },
+ { controls::draft::NoiseReductionModeMinimal, RPiController::DenoiseMode::ColourOff },
+ { controls::draft::NoiseReductionModeZSL, RPiController::DenoiseMode::ColourHighQuality },
+};
+
void IPARPi::queueRequest(const ControlList &controls)
{
/* Clear the return metadata buffer. */
@@ -506,7 +616,12 @@ void IPARPi::queueRequest(const ControlList &controls)
switch (ctrl.first) {
case controls::AE_ENABLE: {
RPiController::Algorithm *agc = controller_.GetAlgorithm("agc");
- ASSERT(agc);
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set AE_ENABLE - no AGC algorithm";
+ break;
+ }
+
if (ctrl.second.get<bool>() == false)
agc->Pause();
else
@@ -519,14 +634,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::EXPOSURE_TIME: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
- ASSERT(agc);
-
- /* This expects units of micro-seconds. */
- agc->SetFixedShutter(ctrl.second.get<int32_t>());
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set EXPOSURE_TIME - no AGC algorithm";
+ break;
+ }
- /* For the manual values to take effect, AGC must be unpaused. */
- if (agc->IsPaused())
- agc->Resume();
+ /* The control provides units of microseconds. */
+ agc->SetFixedShutter(ctrl.second.get<int32_t>() * 1.0us);
libcameraMetadata_.set(controls::ExposureTime, ctrl.second.get<int32_t>());
break;
@@ -535,12 +650,13 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::ANALOGUE_GAIN: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
- ASSERT(agc);
- agc->SetFixedAnalogueGain(ctrl.second.get<float>());
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set ANALOGUE_GAIN - no AGC algorithm";
+ break;
+ }
- /* For the manual values to take effect, AGC must be unpaused. */
- if (agc->IsPaused())
- agc->Resume();
+ agc->SetFixedAnalogueGain(ctrl.second.get<float>());
libcameraMetadata_.set(controls::AnalogueGain,
ctrl.second.get<float>());
@@ -550,7 +666,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AE_METERING_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
- ASSERT(agc);
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set AE_METERING_MODE - no AGC algorithm";
+ break;
+ }
int32_t idx = ctrl.second.get<int32_t>();
if (MeteringModeTable.count(idx)) {
@@ -566,7 +686,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AE_CONSTRAINT_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
- ASSERT(agc);
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set AE_CONSTRAINT_MODE - no AGC algorithm";
+ break;
+ }
int32_t idx = ctrl.second.get<int32_t>();
if (ConstraintModeTable.count(idx)) {
@@ -582,7 +706,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AE_EXPOSURE_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
- ASSERT(agc);
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set AE_EXPOSURE_MODE - no AGC algorithm";
+ break;
+ }
int32_t idx = ctrl.second.get<int32_t>();
if (ExposureModeTable.count(idx)) {
@@ -598,7 +726,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::EXPOSURE_VALUE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
- ASSERT(agc);
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set EXPOSURE_VALUE - no AGC algorithm";
+ break;
+ }
/*
* The SetEv() method takes in a direct exposure multiplier.
@@ -613,7 +745,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AWB_ENABLE: {
RPiController::Algorithm *awb = controller_.GetAlgorithm("awb");
- ASSERT(awb);
+ if (!awb) {
+ LOG(IPARPI, Warning)
+ << "Could not set AWB_ENABLE - no AWB algorithm";
+ break;
+ }
if (ctrl.second.get<bool>() == false)
awb->Pause();
@@ -628,7 +764,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AWB_MODE: {
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
controller_.GetAlgorithm("awb"));
- ASSERT(awb);
+ if (!awb) {
+ LOG(IPARPI, Warning)
+ << "Could not set AWB_MODE - no AWB algorithm";
+ break;
+ }
int32_t idx = ctrl.second.get<int32_t>();
if (AwbModeTable.count(idx)) {
@@ -645,7 +785,11 @@ void IPARPi::queueRequest(const ControlList &controls)
auto gains = ctrl.second.get<Span<const float>>();
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
controller_.GetAlgorithm("awb"));
- ASSERT(awb);
+ if (!awb) {
+ LOG(IPARPI, Warning)
+ << "Could not set COLOUR_GAINS - no AWB algorithm";
+ break;
+ }
awb->SetManualGains(gains[0], gains[1]);
if (gains[0] != 0.0f && gains[1] != 0.0f)
@@ -658,7 +802,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::BRIGHTNESS: {
RPiController::ContrastAlgorithm *contrast = dynamic_cast<RPiController::ContrastAlgorithm *>(
controller_.GetAlgorithm("contrast"));
- ASSERT(contrast);
+ if (!contrast) {
+ LOG(IPARPI, Warning)
+ << "Could not set BRIGHTNESS - no contrast algorithm";
+ break;
+ }
contrast->SetBrightness(ctrl.second.get<float>() * 65536);
libcameraMetadata_.set(controls::Brightness,
@@ -669,7 +817,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::CONTRAST: {
RPiController::ContrastAlgorithm *contrast = dynamic_cast<RPiController::ContrastAlgorithm *>(
controller_.GetAlgorithm("contrast"));
- ASSERT(contrast);
+ if (!contrast) {
+ LOG(IPARPI, Warning)
+ << "Could not set CONTRAST - no contrast algorithm";
+ break;
+ }
contrast->SetContrast(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Contrast,
@@ -680,7 +832,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::SATURATION: {
RPiController::CcmAlgorithm *ccm = dynamic_cast<RPiController::CcmAlgorithm *>(
controller_.GetAlgorithm("ccm"));
- ASSERT(ccm);
+ if (!ccm) {
+ LOG(IPARPI, Warning)
+ << "Could not set SATURATION - no ccm algorithm";
+ break;
+ }
ccm->SetSaturation(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Saturation,
@@ -691,7 +847,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::SHARPNESS: {
RPiController::SharpenAlgorithm *sharpen = dynamic_cast<RPiController::SharpenAlgorithm *>(
controller_.GetAlgorithm("sharpen"));
- ASSERT(sharpen);
+ if (!sharpen) {
+ LOG(IPARPI, Warning)
+ << "Could not set SHARPNESS - no sharpen algorithm";
+ break;
+ }
sharpen->SetStrength(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Sharpness,
@@ -699,6 +859,44 @@ void IPARPi::queueRequest(const ControlList &controls)
break;
}
+ case controls::SCALER_CROP: {
+ /* We do nothing with this, but should avoid the warning below. */
+ break;
+ }
+
+ case controls::FRAME_DURATION_LIMITS: {
+ auto frameDurations = ctrl.second.get<Span<const int64_t>>();
+ applyFrameDurations(frameDurations[0] * 1.0us, frameDurations[1] * 1.0us);
+ break;
+ }
+
+ case controls::NOISE_REDUCTION_MODE: {
+ RPiController::DenoiseAlgorithm *sdn = dynamic_cast<RPiController::DenoiseAlgorithm *>(
+ controller_.GetAlgorithm("SDN"));
+ if (!sdn) {
+ LOG(IPARPI, Warning)
+ << "Could not set NOISE_REDUCTION_MODE - no SDN algorithm";
+ break;
+ }
+
+ int32_t idx = ctrl.second.get<int32_t>();
+ auto mode = DenoiseModeTable.find(idx);
+ if (mode != DenoiseModeTable.end()) {
+ sdn->SetMode(mode->second);
+
+ /*
+ * \todo If the colour denoise is not going to run due to an
+ * analysis image resolution or format mismatch, we should
+ * report the status correctly in the metadata.
+ */
+ libcameraMetadata_.set(controls::draft::NoiseReductionMode, idx);
+ } else {
+ LOG(IPARPI, Error) << "Noise reduction mode " << idx
+ << " not recognised";
+ }
+ break;
+ }
+
default:
LOG(IPARPI, Warning)
<< "Ctrl " << controls::controls.at(ctrl.first)->name()
@@ -710,152 +908,149 @@ void IPARPi::queueRequest(const ControlList &controls)
void IPARPi::returnEmbeddedBuffer(unsigned int bufferId)
{
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_EMBEDDED_COMPLETE;
- op.data = { bufferId & RPi::BufferMask::ID };
- queueFrameAction.emit(0, op);
+ embeddedComplete.emit(bufferId & ipa::RPi::MaskID);
}
-void IPARPi::prepareISP(unsigned int bufferId)
+void IPARPi::prepareISP(const ipa::RPi::ISPConfig &data)
{
- struct DeviceStatus deviceStatus = {};
- bool success = parseEmbeddedData(bufferId, deviceStatus);
+ int64_t frameTimestamp = data.controls.get(controls::SensorTimestamp);
+ RPiController::Metadata lastMetadata;
+ Span<uint8_t> embeddedBuffer;
+
+ lastMetadata = std::move(rpiMetadata_);
+ fillDeviceStatus(data.controls);
+
+ if (data.embeddedBufferPresent) {
+ /*
+ * Pipeline handler has supplied us with an embedded data buffer,
+ * we must pass it to the CamHelper for parsing.
+ */
+ auto it = buffers_.find(data.embeddedBufferId);
+ ASSERT(it != buffers_.end());
+ embeddedBuffer = it->second.maps()[0];
+ }
+
+ /*
+ * This may overwrite the DeviceStatus using values from the sensor
+ * metadata, and may also do additional custom processing.
+ */
+ helper_->Prepare(embeddedBuffer, rpiMetadata_);
/* Done with embedded data now, return to pipeline handler asap. */
- returnEmbeddedBuffer(bufferId);
+ if (data.embeddedBufferPresent)
+ returnEmbeddedBuffer(data.embeddedBufferId);
- if (success) {
- ControlList ctrls(ispCtrls_);
+ /* Allow a 10% margin on the comparison below. */
+ Duration delta = (frameTimestamp - lastRunTimestamp_) * 1.0ns;
+ if (lastRunTimestamp_ && frameCount_ > dropFrameCount_ &&
+ delta < controllerMinFrameDuration * 0.9) {
+ /*
+ * Ensure we merge the previous frame's metadata with the current
+ * frame. This will not overwrite exposure/gain values for the
+ * current frame, or any other bits of metadata that were added
+ * in helper_->Prepare().
+ */
+ rpiMetadata_.Merge(lastMetadata);
+ processPending_ = false;
+ return;
+ }
- rpiMetadata_.Clear();
- rpiMetadata_.Set("device.status", deviceStatus);
- controller_.Prepare(&rpiMetadata_);
+ lastRunTimestamp_ = frameTimestamp;
+ processPending_ = true;
- /* Lock the metadata buffer to avoid constant locks/unlocks. */
- std::unique_lock<RPiController::Metadata> lock(rpiMetadata_);
+ ControlList ctrls(ispCtrls_);
- AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
- if (awbStatus)
- applyAWB(awbStatus, ctrls);
+ controller_.Prepare(&rpiMetadata_);
- CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
- if (ccmStatus)
- applyCCM(ccmStatus, ctrls);
+ /* Lock the metadata buffer to avoid constant locks/unlocks. */
+ std::unique_lock<RPiController::Metadata> lock(rpiMetadata_);
- AgcStatus *dgStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
- if (dgStatus)
- applyDG(dgStatus, ctrls);
+ AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
+ if (awbStatus)
+ applyAWB(awbStatus, ctrls);
- AlscStatus *lsStatus = rpiMetadata_.GetLocked<AlscStatus>("alsc.status");
- if (lsStatus)
- applyLS(lsStatus, ctrls);
+ CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
+ if (ccmStatus)
+ applyCCM(ccmStatus, ctrls);
- ContrastStatus *contrastStatus = rpiMetadata_.GetLocked<ContrastStatus>("contrast.status");
- if (contrastStatus)
- applyGamma(contrastStatus, ctrls);
+ AgcStatus *dgStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
+ if (dgStatus)
+ applyDG(dgStatus, ctrls);
- BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
- if (blackLevelStatus)
- applyBlackLevel(blackLevelStatus, ctrls);
+ AlscStatus *lsStatus = rpiMetadata_.GetLocked<AlscStatus>("alsc.status");
+ if (lsStatus)
+ applyLS(lsStatus, ctrls);
- GeqStatus *geqStatus = rpiMetadata_.GetLocked<GeqStatus>("geq.status");
- if (geqStatus)
- applyGEQ(geqStatus, ctrls);
+ ContrastStatus *contrastStatus = rpiMetadata_.GetLocked<ContrastStatus>("contrast.status");
+ if (contrastStatus)
+ applyGamma(contrastStatus, ctrls);
- SdnStatus *denoiseStatus = rpiMetadata_.GetLocked<SdnStatus>("sdn.status");
- if (denoiseStatus)
- applyDenoise(denoiseStatus, ctrls);
+ BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
+ if (blackLevelStatus)
+ applyBlackLevel(blackLevelStatus, ctrls);
- SharpenStatus *sharpenStatus = rpiMetadata_.GetLocked<SharpenStatus>("sharpen.status");
- if (sharpenStatus)
- applySharpen(sharpenStatus, ctrls);
+ GeqStatus *geqStatus = rpiMetadata_.GetLocked<GeqStatus>("geq.status");
+ if (geqStatus)
+ applyGEQ(geqStatus, ctrls);
- DpcStatus *dpcStatus = rpiMetadata_.GetLocked<DpcStatus>("dpc.status");
- if (dpcStatus)
- applyDPC(dpcStatus, ctrls);
+ DenoiseStatus *denoiseStatus = rpiMetadata_.GetLocked<DenoiseStatus>("denoise.status");
+ if (denoiseStatus)
+ applyDenoise(denoiseStatus, ctrls);
- if (!ctrls.empty()) {
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_V4L2_SET_ISP;
- op.controls.push_back(ctrls);
- queueFrameAction.emit(0, op);
- }
- }
+ SharpenStatus *sharpenStatus = rpiMetadata_.GetLocked<SharpenStatus>("sharpen.status");
+ if (sharpenStatus)
+ applySharpen(sharpenStatus, ctrls);
+
+ DpcStatus *dpcStatus = rpiMetadata_.GetLocked<DpcStatus>("dpc.status");
+ if (dpcStatus)
+ applyDPC(dpcStatus, ctrls);
+
+ if (!ctrls.empty())
+ setIspControls.emit(ctrls);
}
-bool IPARPi::parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &deviceStatus)
+void IPARPi::fillDeviceStatus(const ControlList &sensorControls)
{
- auto it = buffersMemory_.find(bufferId);
- if (it == buffersMemory_.end()) {
- LOG(IPARPI, Error) << "Could not find embedded buffer!";
- return false;
- }
+ DeviceStatus deviceStatus = {};
- int size = buffers_.find(bufferId)->second.planes()[0].length;
- helper_->Parser().SetBufferSize(size);
- RPiController::MdParser::Status status = helper_->Parser().Parse(it->second);
- if (status != RPiController::MdParser::Status::OK) {
- LOG(IPARPI, Error) << "Embedded Buffer parsing failed, error " << status;
- } else {
- uint32_t exposureLines, gainCode;
- if (helper_->Parser().GetExposureLines(exposureLines) != RPiController::MdParser::Status::OK) {
- LOG(IPARPI, Error) << "Exposure time failed";
- return false;
- }
+ int32_t exposureLines = sensorControls.get(V4L2_CID_EXPOSURE).get<int32_t>();
+ int32_t gainCode = sensorControls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>();
+ int32_t vblank = sensorControls.get(V4L2_CID_VBLANK).get<int32_t>();
- deviceStatus.shutter_speed = helper_->Exposure(exposureLines);
- if (helper_->Parser().GetGainCode(gainCode) != RPiController::MdParser::Status::OK) {
- LOG(IPARPI, Error) << "Gain failed";
- return false;
- }
+ deviceStatus.shutter_speed = helper_->Exposure(exposureLines);
+ deviceStatus.analogue_gain = helper_->Gain(gainCode);
+ deviceStatus.frame_length = mode_.height + vblank;
- deviceStatus.analogue_gain = helper_->Gain(gainCode);
- LOG(IPARPI, Debug) << "Metadata - Exposure : "
- << deviceStatus.shutter_speed << " Gain : "
- << deviceStatus.analogue_gain;
- }
+ LOG(IPARPI, Debug) << "Metadata - " << deviceStatus;
- return true;
+ rpiMetadata_.Set("device.status", deviceStatus);
}
void IPARPi::processStats(unsigned int bufferId)
{
- auto it = buffersMemory_.find(bufferId);
- if (it == buffersMemory_.end()) {
+ auto it = buffers_.find(bufferId);
+ if (it == buffers_.end()) {
LOG(IPARPI, Error) << "Could not find stats buffer!";
return;
}
- bcm2835_isp_stats *stats = static_cast<bcm2835_isp_stats *>(it->second);
+ Span<uint8_t> mem = it->second.maps()[0];
+ bcm2835_isp_stats *stats = reinterpret_cast<bcm2835_isp_stats *>(mem.data());
RPiController::StatisticsPtr statistics = std::make_shared<bcm2835_isp_stats>(*stats);
+ helper_->Process(statistics, rpiMetadata_);
controller_.Process(statistics, &rpiMetadata_);
struct AgcStatus agcStatus;
if (rpiMetadata_.Get("agc.status", agcStatus) == 0) {
- ControlList ctrls(unicamCtrls_);
+ ControlList ctrls(sensorCtrls_);
applyAGC(&agcStatus, ctrls);
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_V4L2_SET_STAGGERED;
- op.controls.push_back(ctrls);
- queueFrameAction.emit(0, op);
+ setDelayedControls.emit(ctrls);
}
}
void IPARPi::applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls)
{
- const auto gainR = ispCtrls_.find(V4L2_CID_RED_BALANCE);
- if (gainR == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find red gain control";
- return;
- }
-
- const auto gainB = ispCtrls_.find(V4L2_CID_BLUE_BALANCE);
- if (gainB == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find blue gain control";
- return;
- }
-
LOG(IPARPI, Debug) << "Applying WB R: " << awbStatus->gain_r << " B: "
<< awbStatus->gain_b;
@@ -865,49 +1060,76 @@ void IPARPi::applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls)
static_cast<int32_t>(awbStatus->gain_b * 1000));
}
+void IPARPi::applyFrameDurations(Duration minFrameDuration, Duration maxFrameDuration)
+{
+ const Duration minSensorFrameDuration = mode_.min_frame_length * mode_.line_length;
+ const Duration maxSensorFrameDuration = mode_.max_frame_length * mode_.line_length;
+
+ /*
+ * This will only be applied once AGC recalculations occur.
+ * The values may be clamped based on the sensor mode capabilities as well.
+ */
+ minFrameDuration_ = minFrameDuration ? minFrameDuration : defaultMaxFrameDuration;
+ maxFrameDuration_ = maxFrameDuration ? maxFrameDuration : defaultMinFrameDuration;
+ minFrameDuration_ = std::clamp(minFrameDuration_,
+ minSensorFrameDuration, maxSensorFrameDuration);
+ maxFrameDuration_ = std::clamp(maxFrameDuration_,
+ minSensorFrameDuration, maxSensorFrameDuration);
+ maxFrameDuration_ = std::max(maxFrameDuration_, minFrameDuration_);
+
+ /* Return the validated limits via metadata. */
+ libcameraMetadata_.set(controls::FrameDurationLimits,
+ { static_cast<int64_t>(minFrameDuration_.get<std::micro>()),
+ static_cast<int64_t>(maxFrameDuration_.get<std::micro>()) });
+
+ /*
+ * Calculate the maximum exposure time possible for the AGC to use.
+ * GetVBlanking() will update maxShutter with the largest exposure
+ * value possible.
+ */
+ Duration maxShutter = Duration::max();
+ helper_->GetVBlanking(maxShutter, minFrameDuration_, maxFrameDuration_);
+
+ RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
+ controller_.GetAlgorithm("agc"));
+ agc->SetMaxShutter(maxShutter);
+}
+
void IPARPi::applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls)
{
int32_t gainCode = helper_->GainCode(agcStatus->analogue_gain);
- int32_t exposureLines = helper_->ExposureLines(agcStatus->shutter_time);
- if (unicamCtrls_.find(V4L2_CID_ANALOGUE_GAIN) == unicamCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find analogue gain control";
- return;
- }
-
- if (unicamCtrls_.find(V4L2_CID_EXPOSURE) == unicamCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find exposure control";
- return;
- }
+ /* GetVBlanking might clip exposure time to the fps limits. */
+ Duration exposure = agcStatus->shutter_time;
+ int32_t vblanking = helper_->GetVBlanking(exposure, minFrameDuration_, maxFrameDuration_);
+ int32_t exposureLines = helper_->ExposureLines(exposure);
- LOG(IPARPI, Debug) << "Applying AGC Exposure: " << agcStatus->shutter_time
- << " (Shutter lines: " << exposureLines << ") Gain: "
+ LOG(IPARPI, Debug) << "Applying AGC Exposure: " << exposure
+ << " (Shutter lines: " << exposureLines << ", AGC requested "
+ << agcStatus->shutter_time << ") Gain: "
<< agcStatus->analogue_gain << " (Gain Code: "
<< gainCode << ")";
- ctrls.set(V4L2_CID_ANALOGUE_GAIN, gainCode);
+ /*
+ * Due to the behavior of V4L2, the current value of VBLANK could clip the
+ * exposure time without us knowing. The next time though this function should
+ * clip exposure correctly.
+ */
+ ctrls.set(V4L2_CID_VBLANK, vblanking);
ctrls.set(V4L2_CID_EXPOSURE, exposureLines);
+ ctrls.set(V4L2_CID_ANALOGUE_GAIN, gainCode);
}
void IPARPi::applyDG(const struct AgcStatus *dgStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_DIGITAL_GAIN) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find digital gain control";
- return;
- }
-
ctrls.set(V4L2_CID_DIGITAL_GAIN,
static_cast<int32_t>(dgStatus->digital_gain * 1000));
}
void IPARPi::applyCCM(const struct CcmStatus *ccmStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_CC_MATRIX) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find CCM control";
- return;
- }
-
bcm2835_isp_custom_ccm ccm;
+
for (int i = 0; i < 9; i++) {
ccm.ccm.ccm[i / 3][i % 3].den = 1000;
ccm.ccm.ccm[i / 3][i % 3].num = 1000 * ccmStatus->matrix[i];
@@ -923,12 +1145,8 @@ void IPARPi::applyCCM(const struct CcmStatus *ccmStatus, ControlList &ctrls)
void IPARPi::applyGamma(const struct ContrastStatus *contrastStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_GAMMA) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find Gamma control";
- return;
- }
-
struct bcm2835_isp_gamma gamma;
+
gamma.enabled = 1;
for (int i = 0; i < CONTRAST_NUM_POINTS; i++) {
gamma.x[i] = contrastStatus->points[i].x;
@@ -942,12 +1160,8 @@ void IPARPi::applyGamma(const struct ContrastStatus *contrastStatus, ControlList
void IPARPi::applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_BLACK_LEVEL) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find black level control";
- return;
- }
-
bcm2835_isp_black_level blackLevel;
+
blackLevel.enabled = 1;
blackLevel.black_level_r = blackLevelStatus->black_level_r;
blackLevel.black_level_g = blackLevelStatus->black_level_g;
@@ -960,12 +1174,8 @@ void IPARPi::applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, Co
void IPARPi::applyGEQ(const struct GeqStatus *geqStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_GEQ) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find geq control";
- return;
- }
-
bcm2835_isp_geq geq;
+
geq.enabled = 1;
geq.offset = geqStatus->offset;
geq.slope.den = 1000;
@@ -976,34 +1186,48 @@ void IPARPi::applyGEQ(const struct GeqStatus *geqStatus, ControlList &ctrls)
ctrls.set(V4L2_CID_USER_BCM2835_ISP_GEQ, c);
}
-void IPARPi::applyDenoise(const struct SdnStatus *denoiseStatus, ControlList &ctrls)
+void IPARPi::applyDenoise(const struct DenoiseStatus *denoiseStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_DENOISE) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find denoise control";
- return;
- }
+ using RPiController::DenoiseMode;
bcm2835_isp_denoise denoise;
- denoise.enabled = 1;
+ DenoiseMode mode = static_cast<DenoiseMode>(denoiseStatus->mode);
+
+ denoise.enabled = mode != DenoiseMode::Off;
denoise.constant = denoiseStatus->noise_constant;
denoise.slope.num = 1000 * denoiseStatus->noise_slope;
denoise.slope.den = 1000;
denoise.strength.num = 1000 * denoiseStatus->strength;
denoise.strength.den = 1000;
+ /* Set the CDN mode to match the SDN operating mode. */
+ bcm2835_isp_cdn cdn;
+ switch (mode) {
+ case DenoiseMode::ColourFast:
+ cdn.enabled = 1;
+ cdn.mode = CDN_MODE_FAST;
+ break;
+ case DenoiseMode::ColourHighQuality:
+ cdn.enabled = 1;
+ cdn.mode = CDN_MODE_HIGH_QUALITY;
+ break;
+ default:
+ cdn.enabled = 0;
+ }
+
ControlValue c(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&denoise),
sizeof(denoise) });
ctrls.set(V4L2_CID_USER_BCM2835_ISP_DENOISE, c);
+
+ c = ControlValue(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&cdn),
+ sizeof(cdn) });
+ ctrls.set(V4L2_CID_USER_BCM2835_ISP_CDN, c);
}
void IPARPi::applySharpen(const struct SharpenStatus *sharpenStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_SHARPEN) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find sharpen control";
- return;
- }
-
bcm2835_isp_sharpen sharpen;
+
sharpen.enabled = 1;
sharpen.threshold.num = 1000 * sharpenStatus->threshold;
sharpen.threshold.den = 1000;
@@ -1019,12 +1243,8 @@ void IPARPi::applySharpen(const struct SharpenStatus *sharpenStatus, ControlList
void IPARPi::applyDPC(const struct DpcStatus *dpcStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_DPC) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find DPC control";
- return;
- }
-
bcm2835_isp_dpc dpc;
+
dpc.enabled = 1;
dpc.strength = dpcStatus->strength;
@@ -1035,17 +1255,12 @@ void IPARPi::applyDPC(const struct DpcStatus *dpcStatus, ControlList &ctrls)
void IPARPi::applyLS(const struct AlscStatus *lsStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_LENS_SHADING) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find LS control";
- return;
- }
-
/*
* Program lens shading tables into pipeline.
* Choose smallest cell size that won't exceed 63x48 cells.
*/
const int cellSizes[] = { 16, 32, 64, 128, 256 };
- unsigned int numCells = ARRAY_SIZE(cellSizes);
+ unsigned int numCells = std::size(cellSizes);
unsigned int i, w, h, cellSize;
for (i = 0; i < numCells; i++) {
cellSize = cellSizes[i];
@@ -1068,13 +1283,14 @@ void IPARPi::applyLS(const struct AlscStatus *lsStatus, ControlList &ctrls)
.grid_width = w,
.grid_stride = w,
.grid_height = h,
- .dmabuf = lsTableHandle_.fd(),
+ /* .dmabuf will be filled in by pipeline handler. */
+ .dmabuf = 0,
.ref_transform = 0,
.corner_sampled = 1,
.gain_format = GAIN_FORMAT_U4P10
};
- if (!lsTable_ || w * h * 4 * sizeof(uint16_t) > RPi::MaxLsGridSize) {
+ if (!lsTable_ || w * h * 4 * sizeof(uint16_t) > ipa::RPi::MaxLsGridSize) {
LOG(IPARPI, Error) << "Do not have a correctly allocate lens shading table!";
return;
}
@@ -1145,11 +1361,11 @@ const struct IPAModuleInfo ipaModuleInfo = {
"raspberrypi",
};
-struct ipa_context *ipaCreate()
+IPAInterface *ipaCreate()
{
- return new IPAInterfaceWrapper(std::make_unique<IPARPi>());
+ return new IPARPi();
}
-}; /* extern "C" */
+} /* extern "C" */
} /* namespace libcamera */
diff --git a/src/ipa/rkisp1/meson.build b/src/ipa/rkisp1/meson.build
index ed9a6b6b..f76b37f5 100644
--- a/src/ipa/rkisp1/meson.build
+++ b/src/ipa/rkisp1/meson.build
@@ -3,10 +3,10 @@
ipa_name = 'ipa_rkisp1'
mod = shared_module(ipa_name,
- 'rkisp1.cpp',
+ ['rkisp1.cpp', libcamera_generated_ipa_headers],
name_prefix : '',
include_directories : [ipa_includes, libipa_includes],
- dependencies : libcamera_dep,
+ dependencies : libcamera_private,
link_with : libipa,
install : true,
install_dir : ipa_install_dir)
@@ -15,7 +15,7 @@ if ipa_sign_module
custom_target(ipa_name + '.so.sign',
input : mod,
output : ipa_name + '.so.sign',
- command : [ ipa_sign, ipa_priv_key, '@INPUT@', '@OUTPUT@' ],
+ command : [ipa_sign, ipa_priv_key, '@INPUT@', '@OUTPUT@'],
install : false,
build_by_default : true)
endif
diff --git a/src/ipa/rkisp1/rkisp1.cpp b/src/ipa/rkisp1/rkisp1.cpp
index 07d7f1b2..b881d42e 100644
--- a/src/ipa/rkisp1/rkisp1.cpp
+++ b/src/ipa/rkisp1/rkisp1.cpp
@@ -15,39 +15,34 @@
#include <linux/rkisp1-config.h>
#include <linux/v4l2-controls.h>
-#include <libcamera/buffer.h>
+#include <libcamera/base/log.h>
+
#include <libcamera/control_ids.h>
+#include <libcamera/framebuffer.h>
#include <libcamera/ipa/ipa_interface.h>
#include <libcamera/ipa/ipa_module_info.h>
-#include <libcamera/ipa/rkisp1.h>
+#include <libcamera/ipa/rkisp1_ipa_interface.h>
#include <libcamera/request.h>
-#include <libipa/ipa_interface_wrapper.h>
-
-#include "libcamera/internal/log.h"
-
namespace libcamera {
LOG_DEFINE_CATEGORY(IPARkISP1)
-class IPARkISP1 : public IPAInterface
+namespace ipa::rkisp1 {
+
+class IPARkISP1 : public IPARkISP1Interface
{
public:
- int init([[maybe_unused]] const IPASettings &settings) override
- {
- return 0;
- }
- int start() override { return 0; }
+ int init(unsigned int hwRevision) override;
+ int start() override;
void stop() override {}
- void configure(const CameraSensorInfo &info,
- const std::map<unsigned int, IPAStream> &streamConfig,
- const std::map<unsigned int, const ControlInfoMap &> &entityControls,
- const IPAOperationData &ipaConfig,
- IPAOperationData *response) override;
+ int configure(const IPACameraSensorInfo &info,
+ const std::map<uint32_t, IPAStream> &streamConfig,
+ const std::map<uint32_t, ControlInfoMap> &entityControls) override;
void mapBuffers(const std::vector<IPABuffer> &buffers) override;
void unmapBuffers(const std::vector<unsigned int> &ids) override;
- void processEvent(const IPAOperationData &event) override;
+ void processEvent(const RkISP1Event &event) override;
private:
void queueRequest(unsigned int frame, rkisp1_params_cfg *params,
@@ -73,33 +68,52 @@ private:
uint32_t maxGain_;
};
+int IPARkISP1::init(unsigned int hwRevision)
+{
+ /* \todo Add support for other revisions */
+ if (hwRevision != RKISP1_V10) {
+ LOG(IPARkISP1, Error)
+ << "Hardware revision " << hwRevision
+ << " is currently not supported";
+ return -ENODEV;
+ }
+
+ LOG(IPARkISP1, Debug) << "Hardware revision is " << hwRevision;
+ return 0;
+}
+
+int IPARkISP1::start()
+{
+ setControls(0);
+
+ return 0;
+}
+
/**
- * \todo The RkISP1 pipeline currently provides an empty CameraSensorInfo
+ * \todo The RkISP1 pipeline currently provides an empty IPACameraSensorInfo
* if the connected sensor does not provide enough information to properly
* assemble one. Make sure the reported sensor information are relevant
* before accessing them.
*/
-void IPARkISP1::configure([[maybe_unused]] const CameraSensorInfo &info,
- [[maybe_unused]] const std::map<unsigned int, IPAStream> &streamConfig,
- const std::map<unsigned int, const ControlInfoMap &> &entityControls,
- [[maybe_unused]] const IPAOperationData &ipaConfig,
- [[maybe_unused]] IPAOperationData *result)
+int IPARkISP1::configure([[maybe_unused]] const IPACameraSensorInfo &info,
+ [[maybe_unused]] const std::map<uint32_t, IPAStream> &streamConfig,
+ const std::map<uint32_t, ControlInfoMap> &entityControls)
{
if (entityControls.empty())
- return;
+ return -EINVAL;
ctrls_ = entityControls.at(0);
const auto itExp = ctrls_.find(V4L2_CID_EXPOSURE);
if (itExp == ctrls_.end()) {
LOG(IPARkISP1, Error) << "Can't find exposure control";
- return;
+ return -EINVAL;
}
const auto itGain = ctrls_.find(V4L2_CID_ANALOGUE_GAIN);
if (itGain == ctrls_.end()) {
LOG(IPARkISP1, Error) << "Can't find gain control";
- return;
+ return -EINVAL;
}
autoExposure_ = true;
@@ -116,7 +130,7 @@ void IPARkISP1::configure([[maybe_unused]] const CameraSensorInfo &info,
<< "Exposure: " << minExposure_ << "-" << maxExposure_
<< " Gain: " << minGain_ << "-" << maxGain_;
- setControls(0);
+ return 0;
}
void IPARkISP1::mapBuffers(const std::vector<IPABuffer> &buffers)
@@ -159,12 +173,12 @@ void IPARkISP1::unmapBuffers(const std::vector<unsigned int> &ids)
}
}
-void IPARkISP1::processEvent(const IPAOperationData &event)
+void IPARkISP1::processEvent(const RkISP1Event &event)
{
- switch (event.operation) {
- case RKISP1_IPA_EVENT_SIGNAL_STAT_BUFFER: {
- unsigned int frame = event.data[0];
- unsigned int bufferId = event.data[1];
+ switch (event.op) {
+ case EventSignalStatBuffer: {
+ unsigned int frame = event.frame;
+ unsigned int bufferId = event.bufferId;
const rkisp1_stat_buffer *stats =
static_cast<rkisp1_stat_buffer *>(buffersMemory_[bufferId]);
@@ -172,18 +186,18 @@ void IPARkISP1::processEvent(const IPAOperationData &event)
updateStatistics(frame, stats);
break;
}
- case RKISP1_IPA_EVENT_QUEUE_REQUEST: {
- unsigned int frame = event.data[0];
- unsigned int bufferId = event.data[1];
+ case EventQueueRequest: {
+ unsigned int frame = event.frame;
+ unsigned int bufferId = event.bufferId;
rkisp1_params_cfg *params =
static_cast<rkisp1_params_cfg *>(buffersMemory_[bufferId]);
- queueRequest(frame, params, event.controls[0]);
+ queueRequest(frame, params, event.controls);
break;
}
default:
- LOG(IPARkISP1, Error) << "Unknown event " << event.operation;
+ LOG(IPARkISP1, Error) << "Unknown event " << event.op;
break;
}
}
@@ -203,8 +217,8 @@ void IPARkISP1::queueRequest(unsigned int frame, rkisp1_params_cfg *params,
params->module_en_update = RKISP1_CIF_ISP_MODULE_AEC;
}
- IPAOperationData op;
- op.operation = RKISP1_IPA_ACTION_PARAM_FILLED;
+ RkISP1Action op;
+ op.op = ActionParamFilled;
queueFrameAction.emit(frame, op);
}
@@ -222,7 +236,7 @@ void IPARkISP1::updateStatistics(unsigned int frame,
unsigned int value = 0;
unsigned int num = 0;
- for (int i = 0; i < RKISP1_CIF_ISP_AE_MEAN_MAX; i++) {
+ for (int i = 0; i < RKISP1_CIF_ISP_AE_MEAN_MAX_V10; i++) {
if (ae->exp_mean[i] <= 15)
continue;
@@ -256,13 +270,13 @@ void IPARkISP1::updateStatistics(unsigned int frame,
void IPARkISP1::setControls(unsigned int frame)
{
- IPAOperationData op;
- op.operation = RKISP1_IPA_ACTION_V4L2_SET;
+ RkISP1Action op;
+ op.op = ActionV4L2Set;
ControlList ctrls(ctrls_);
ctrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure_));
ctrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast<int32_t>(gain_));
- op.controls.push_back(ctrls);
+ op.controls = ctrls;
queueFrameAction.emit(frame, op);
}
@@ -274,13 +288,15 @@ void IPARkISP1::metadataReady(unsigned int frame, unsigned int aeState)
if (aeState)
ctrls.set(controls::AeLocked, aeState == 2);
- IPAOperationData op;
- op.operation = RKISP1_IPA_ACTION_METADATA;
- op.controls.push_back(ctrls);
+ RkISP1Action op;
+ op.op = ActionMetadata;
+ op.controls = ctrls;
queueFrameAction.emit(frame, op);
}
+} /* namespace ipa::rkisp1 */
+
/*
* External IPA module interface
*/
@@ -293,9 +309,9 @@ const struct IPAModuleInfo ipaModuleInfo = {
"rkisp1",
};
-struct ipa_context *ipaCreate()
+IPAInterface *ipaCreate()
{
- return new IPAInterfaceWrapper(std::make_unique<IPARkISP1>());
+ return new ipa::rkisp1::IPARkISP1();
}
}
diff --git a/src/ipa/vimc/data/meson.build b/src/ipa/vimc/data/meson.build
index 6532662c..42ec651c 100644
--- a/src/ipa/vimc/data/meson.build
+++ b/src/ipa/vimc/data/meson.build
@@ -5,4 +5,4 @@ conf_files = files([
])
install_data(conf_files,
- install_dir : join_paths(ipa_data_dir, 'vimc'))
+ install_dir : ipa_data_dir / 'vimc')
diff --git a/src/ipa/vimc/meson.build b/src/ipa/vimc/meson.build
index 8c9df854..ecbeee13 100644
--- a/src/ipa/vimc/meson.build
+++ b/src/ipa/vimc/meson.build
@@ -3,10 +3,10 @@
ipa_name = 'ipa_vimc'
mod = shared_module(ipa_name,
- 'vimc.cpp',
+ ['vimc.cpp', libcamera_generated_ipa_headers],
name_prefix : '',
include_directories : [ipa_includes, libipa_includes],
- dependencies : libcamera_dep,
+ dependencies : libcamera_private,
link_with : libipa,
install : true,
install_dir : ipa_install_dir)
@@ -15,7 +15,7 @@ if ipa_sign_module
custom_target(ipa_name + '.so.sign',
input : mod,
output : ipa_name + '.so.sign',
- command : [ ipa_sign, ipa_priv_key, '@INPUT@', '@OUTPUT@' ],
+ command : [ipa_sign, ipa_priv_key, '@INPUT@', '@OUTPUT@'],
install : false,
build_by_default : true)
endif
diff --git a/src/ipa/vimc/vimc.cpp b/src/ipa/vimc/vimc.cpp
index ef257762..2f575853 100644
--- a/src/ipa/vimc/vimc.cpp
+++ b/src/ipa/vimc/vimc.cpp
@@ -4,8 +4,7 @@
*
* ipa_vimc.cpp - Vimc Image Processing Algorithm module
*/
-
-#include <libcamera/ipa/ipa_vimc.h>
+#include <libcamera/ipa/vimc_ipa_interface.h>
#include <fcntl.h>
#include <string.h>
@@ -14,19 +13,17 @@
#include <iostream>
+#include <libcamera/base/file.h>
+#include <libcamera/base/log.h>
+
#include <libcamera/ipa/ipa_interface.h>
#include <libcamera/ipa/ipa_module_info.h>
-#include <libipa/ipa_interface_wrapper.h>
-
-#include "libcamera/internal/file.h"
-#include "libcamera/internal/log.h"
-
namespace libcamera {
LOG_DEFINE_CATEGORY(IPAVimc)
-class IPAVimc : public IPAInterface
+class IPAVimc : public ipa::vimc::IPAVimcInterface
{
public:
IPAVimc();
@@ -37,18 +34,9 @@ public:
int start() override;
void stop() override;
- void configure([[maybe_unused]] const CameraSensorInfo &sensorInfo,
- [[maybe_unused]] const std::map<unsigned int, IPAStream> &streamConfig,
- [[maybe_unused]] const std::map<unsigned int, const ControlInfoMap &> &entityControls,
- [[maybe_unused]] const IPAOperationData &ipaConfig,
- [[maybe_unused]] IPAOperationData *result) override {}
- void mapBuffers([[maybe_unused]] const std::vector<IPABuffer> &buffers) override {}
- void unmapBuffers([[maybe_unused]] const std::vector<unsigned int> &ids) override {}
- void processEvent([[maybe_unused]] const IPAOperationData &event) override {}
-
private:
void initTrace();
- void trace(enum IPAOperationCode operation);
+ void trace(enum ipa::vimc::IPAOperationCode operation);
int fd_;
};
@@ -61,13 +49,13 @@ IPAVimc::IPAVimc()
IPAVimc::~IPAVimc()
{
- if (fd_)
+ if (fd_ != -1)
::close(fd_);
}
int IPAVimc::init(const IPASettings &settings)
{
- trace(IPAOperationInit);
+ trace(ipa::vimc::IPAOperationInit);
LOG(IPAVimc, Debug)
<< "initializing vimc IPA with configuration file "
@@ -84,7 +72,7 @@ int IPAVimc::init(const IPASettings &settings)
int IPAVimc::start()
{
- trace(IPAOperationStart);
+ trace(ipa::vimc::IPAOperationStart);
LOG(IPAVimc, Debug) << "start vimc IPA!";
@@ -93,7 +81,7 @@ int IPAVimc::start()
void IPAVimc::stop()
{
- trace(IPAOperationStop);
+ trace(ipa::vimc::IPAOperationStop);
LOG(IPAVimc, Debug) << "stop vimc IPA!";
}
@@ -101,11 +89,11 @@ void IPAVimc::stop()
void IPAVimc::initTrace()
{
struct stat fifoStat;
- int ret = stat(VIMC_IPA_FIFO_PATH, &fifoStat);
+ int ret = stat(ipa::vimc::VimcIPAFIFOPath.c_str(), &fifoStat);
if (ret)
return;
- ret = ::open(VIMC_IPA_FIFO_PATH, O_WRONLY);
+ ret = ::open(ipa::vimc::VimcIPAFIFOPath.c_str(), O_WRONLY);
if (ret < 0) {
ret = errno;
LOG(IPAVimc, Error) << "Failed to open vimc IPA test FIFO: "
@@ -116,7 +104,7 @@ void IPAVimc::initTrace()
fd_ = ret;
}
-void IPAVimc::trace(enum IPAOperationCode operation)
+void IPAVimc::trace(enum ipa::vimc::IPAOperationCode operation)
{
if (fd_ < 0)
return;
@@ -141,9 +129,9 @@ const struct IPAModuleInfo ipaModuleInfo = {
"vimc",
};
-struct ipa_context *ipaCreate()
+IPAInterface *ipaCreate()
{
- return new IPAInterfaceWrapper(std::make_unique<IPAVimc>());
+ return new IPAVimc();
}
}
diff --git a/src/lc-compliance/capture_test.cpp b/src/lc-compliance/capture_test.cpp
new file mode 100644
index 00000000..52578207
--- /dev/null
+++ b/src/lc-compliance/capture_test.cpp
@@ -0,0 +1,128 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ * Copyright (C) 2021, Collabora Ltd.
+ *
+ * capture_test.cpp - Test camera capture
+ */
+
+#include <iostream>
+
+#include <gtest/gtest.h>
+
+#include "environment.h"
+#include "simple_capture.h"
+
+using namespace libcamera;
+
+const std::vector<int> NUMREQUESTS = { 1, 2, 3, 5, 8, 13, 21, 34, 55, 89 };
+const std::vector<StreamRole> ROLES = { Raw, StillCapture, VideoRecording, Viewfinder };
+
+class SingleStream : public testing::TestWithParam<std::tuple<StreamRole, int>>
+{
+public:
+ static std::string nameParameters(const testing::TestParamInfo<SingleStream::ParamType> &info);
+
+protected:
+ void SetUp() override;
+ void TearDown() override;
+
+ std::shared_ptr<Camera> camera_;
+};
+
+/*
+ * We use gtest's SetUp() and TearDown() instead of constructor and destructor
+ * in order to be able to assert on them.
+ */
+void SingleStream::SetUp()
+{
+ Environment *env = Environment::get();
+
+ camera_ = env->cm()->get(env->cameraId());
+
+ ASSERT_EQ(camera_->acquire(), 0);
+}
+
+void SingleStream::TearDown()
+{
+ if (!camera_)
+ return;
+
+ camera_->release();
+ camera_.reset();
+}
+
+std::string SingleStream::nameParameters(const testing::TestParamInfo<SingleStream::ParamType> &info)
+{
+ std::map<StreamRole, std::string> rolesMap = { { Raw, "Raw" },
+ { StillCapture, "StillCapture" },
+ { VideoRecording, "VideoRecording" },
+ { Viewfinder, "Viewfinder" } };
+
+ std::string roleName = rolesMap[std::get<0>(info.param)];
+ std::string numRequestsName = std::to_string(std::get<1>(info.param));
+
+ return roleName + "_" + numRequestsName;
+}
+
+/*
+ * Test single capture cycles
+ *
+ * Makes sure the camera completes the exact number of requests queued. Example
+ * failure is a camera that completes less requests than the number of requests
+ * queued.
+ */
+TEST_P(SingleStream, Capture)
+{
+ auto [role, numRequests] = GetParam();
+
+ SimpleCaptureBalanced capture(camera_);
+
+ capture.configure(role);
+
+ capture.capture(numRequests);
+}
+
+/*
+ * Test multiple start/stop cycles
+ *
+ * Makes sure the camera supports multiple start/stop cycles. Example failure is
+ * a camera that does not clean up correctly in its error path but is only
+ * tested by single-capture applications.
+ */
+TEST_P(SingleStream, CaptureStartStop)
+{
+ auto [role, numRequests] = GetParam();
+ unsigned int numRepeats = 3;
+
+ SimpleCaptureBalanced capture(camera_);
+
+ capture.configure(role);
+
+ for (unsigned int starts = 0; starts < numRepeats; starts++)
+ capture.capture(numRequests);
+}
+
+/*
+ * Test unbalanced stop
+ *
+ * Makes sure the camera supports a stop with requests queued. Example failure
+ * is a camera that does not handle cancelation of buffers coming back from the
+ * video device while stopping.
+ */
+TEST_P(SingleStream, UnbalancedStop)
+{
+ auto [role, numRequests] = GetParam();
+
+ SimpleCaptureUnbalanced capture(camera_);
+
+ capture.configure(role);
+
+ capture.capture(numRequests);
+}
+
+INSTANTIATE_TEST_SUITE_P(CaptureTests,
+ SingleStream,
+ testing::Combine(testing::ValuesIn(ROLES),
+ testing::ValuesIn(NUMREQUESTS)),
+ SingleStream::nameParameters);
diff --git a/src/lc-compliance/environment.cpp b/src/lc-compliance/environment.cpp
new file mode 100644
index 00000000..9e24b5e3
--- /dev/null
+++ b/src/lc-compliance/environment.cpp
@@ -0,0 +1,20 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2021, Collabora Ltd.
+ *
+ * environment.cpp - Common environment for tests
+ */
+
+#include "environment.h"
+
+Environment *Environment::get()
+{
+ static Environment instance;
+ return &instance;
+}
+
+void Environment::setup(CameraManager *cm, std::string cameraId)
+{
+ cm_ = cm;
+ cameraId_ = cameraId;
+}
diff --git a/src/lc-compliance/environment.h b/src/lc-compliance/environment.h
new file mode 100644
index 00000000..1c7d9a55
--- /dev/null
+++ b/src/lc-compliance/environment.h
@@ -0,0 +1,31 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2021, Collabora Ltd.
+ *
+ * environment.h - Common environment for tests
+ */
+#ifndef __LC_COMPLIANCE_ENVIRONMENT_H__
+#define __LC_COMPLIANCE_ENVIRONMENT_H__
+
+#include <libcamera/libcamera.h>
+
+using namespace libcamera;
+
+class Environment
+{
+public:
+ static Environment *get();
+
+ void setup(CameraManager *cm, std::string cameraId);
+
+ const std::string &cameraId() const { return cameraId_; }
+ CameraManager *cm() const { return cm_; }
+
+private:
+ Environment() = default;
+
+ std::string cameraId_;
+ CameraManager *cm_;
+};
+
+#endif /* __LC_COMPLIANCE_ENVIRONMENT_H__ */
diff --git a/src/lc-compliance/main.cpp b/src/lc-compliance/main.cpp
new file mode 100644
index 00000000..7eb52ae4
--- /dev/null
+++ b/src/lc-compliance/main.cpp
@@ -0,0 +1,193 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ * Copyright (C) 2021, Collabora Ltd.
+ *
+ * main.cpp - lc-compliance - The libcamera compliance tool
+ */
+
+#include <iomanip>
+#include <iostream>
+#include <string.h>
+
+#include <gtest/gtest.h>
+
+#include <libcamera/libcamera.h>
+
+#include "environment.h"
+#include "../cam/options.h"
+
+using namespace libcamera;
+
+enum {
+ OptCamera = 'c',
+ OptList = 'l',
+ OptFilter = 'f',
+ OptHelp = 'h',
+};
+
+/*
+ * Make asserts act like exceptions, otherwise they only fail (or skip) the
+ * current function. From gtest documentation:
+ * https://google.github.io/googletest/advanced.html#asserting-on-subroutines-with-an-exception
+ */
+class ThrowListener : public testing::EmptyTestEventListener
+{
+ void OnTestPartResult(const testing::TestPartResult &result) override
+ {
+ if (result.type() == testing::TestPartResult::kFatalFailure ||
+ result.type() == testing::TestPartResult::kSkip)
+ throw testing::AssertionException(result);
+ }
+};
+
+static void listCameras(CameraManager *cm)
+{
+ for (const std::shared_ptr<Camera> &cam : cm->cameras())
+ std::cout << "- " << cam.get()->id() << std::endl;
+}
+
+static int initCamera(CameraManager *cm, OptionsParser::Options options)
+{
+ std::shared_ptr<Camera> camera;
+
+ int ret = cm->start();
+ if (ret) {
+ std::cout << "Failed to start camera manager: "
+ << strerror(-ret) << std::endl;
+ return ret;
+ }
+
+ if (!options.isSet(OptCamera)) {
+ std::cout << "No camera specified, available cameras:" << std::endl;
+ listCameras(cm);
+ return -ENODEV;
+ }
+
+ const std::string &cameraId = options[OptCamera];
+ camera = cm->get(cameraId);
+ if (!camera) {
+ std::cout << "Camera " << cameraId << " not found, available cameras:" << std::endl;
+ listCameras(cm);
+ return -ENODEV;
+ }
+
+ Environment::get()->setup(cm, cameraId);
+
+ std::cout << "Using camera " << cameraId << std::endl;
+
+ return 0;
+}
+
+static int initGtestParameters(char *arg0, OptionsParser::Options options)
+{
+ const std::map<std::string, std::string> gtestFlags = { { "list", "--gtest_list_tests" },
+ { "filter", "--gtest_filter" } };
+
+ int argc = 0;
+ std::string filterParam;
+
+ /*
+ * +2 to have space for both the 0th argument that is needed but not
+ * used and the null at the end.
+ */
+ char **argv = new char *[(gtestFlags.size() + 2)];
+ if (!argv)
+ return -ENOMEM;
+
+ argv[0] = arg0;
+ argc++;
+
+ if (options.isSet(OptList)) {
+ argv[argc] = const_cast<char *>(gtestFlags.at("list").c_str());
+ argc++;
+ }
+
+ if (options.isSet(OptFilter)) {
+ /*
+ * The filter flag needs to be passed as a single parameter, in
+ * the format --gtest_filter=filterStr
+ */
+ filterParam = gtestFlags.at("filter") + "=" +
+ static_cast<const std::string &>(options[OptFilter]);
+
+ argv[argc] = const_cast<char *>(filterParam.c_str());
+ argc++;
+ }
+
+ argv[argc] = nullptr;
+
+ ::testing::InitGoogleTest(&argc, argv);
+
+ delete[] argv;
+
+ return 0;
+}
+
+static int initGtest(char *arg0, OptionsParser::Options options)
+{
+ int ret = initGtestParameters(arg0, options);
+ if (ret)
+ return ret;
+
+ testing::UnitTest::GetInstance()->listeners().Append(new ThrowListener);
+
+ return 0;
+}
+
+static int parseOptions(int argc, char **argv, OptionsParser::Options *options)
+{
+ OptionsParser parser;
+ parser.addOption(OptCamera, OptionString,
+ "Specify which camera to operate on, by id", "camera",
+ ArgumentRequired, "camera");
+ parser.addOption(OptList, OptionNone, "List all tests and exit", "list");
+ parser.addOption(OptFilter, OptionString,
+ "Specify which tests to run", "filter",
+ ArgumentRequired, "filter");
+ parser.addOption(OptHelp, OptionNone, "Display this help message",
+ "help");
+
+ *options = parser.parse(argc, argv);
+ if (!options->valid())
+ return -EINVAL;
+
+ if (options->isSet(OptHelp)) {
+ parser.usage();
+ std::cerr << "Further options from Googletest can be passed as environment variables"
+ << std::endl;
+ return -EINTR;
+ }
+
+ return 0;
+}
+
+int main(int argc, char **argv)
+{
+ OptionsParser::Options options;
+ int ret = parseOptions(argc, argv, &options);
+ if (ret == -EINTR)
+ return EXIT_SUCCESS;
+ if (ret < 0)
+ return EXIT_FAILURE;
+
+ std::unique_ptr<CameraManager> cm = std::make_unique<CameraManager>();
+
+ /* No need to initialize the camera if we'll just list tests */
+ if (!options.isSet(OptList)) {
+ ret = initCamera(cm.get(), options);
+ if (ret)
+ return ret;
+ }
+
+ ret = initGtest(argv[0], options);
+ if (ret)
+ return ret;
+
+ ret = RUN_ALL_TESTS();
+
+ if (!options.isSet(OptList))
+ cm->stop();
+
+ return ret;
+}
diff --git a/src/lc-compliance/meson.build b/src/lc-compliance/meson.build
new file mode 100644
index 00000000..aa5852f6
--- /dev/null
+++ b/src/lc-compliance/meson.build
@@ -0,0 +1,30 @@
+# SPDX-License-Identifier: CC0-1.0
+
+libevent = dependency('libevent_pthreads', required : get_option('lc-compliance'))
+libgtest = dependency('gtest', required : get_option('lc-compliance'))
+
+if not (libevent.found() and libgtest.found())
+ lc_compliance_enabled = false
+ subdir_done()
+endif
+
+lc_compliance_enabled = true
+
+lc_compliance_sources = files([
+ '../cam/event_loop.cpp',
+ '../cam/options.cpp',
+ 'environment.cpp',
+ 'main.cpp',
+ 'simple_capture.cpp',
+ 'capture_test.cpp',
+])
+
+lc_compliance = executable('lc-compliance', lc_compliance_sources,
+ cpp_args : [ '-fexceptions' ],
+ dependencies : [
+ libatomic,
+ libcamera_public,
+ libevent,
+ libgtest,
+ ],
+ install : true)
diff --git a/src/lc-compliance/simple_capture.cpp b/src/lc-compliance/simple_capture.cpp
new file mode 100644
index 00000000..25097f28
--- /dev/null
+++ b/src/lc-compliance/simple_capture.cpp
@@ -0,0 +1,191 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2020-2021, Google Inc.
+ *
+ * simple_capture.cpp - Simple capture helper
+ */
+
+#include <gtest/gtest.h>
+
+#include "simple_capture.h"
+
+using namespace libcamera;
+
+SimpleCapture::SimpleCapture(std::shared_ptr<Camera> camera)
+ : loop_(nullptr), camera_(camera),
+ allocator_(std::make_unique<FrameBufferAllocator>(camera))
+{
+}
+
+SimpleCapture::~SimpleCapture()
+{
+ stop();
+}
+
+void SimpleCapture::configure(StreamRole role)
+{
+ config_ = camera_->generateConfiguration({ role });
+
+ if (!config_) {
+ std::cout << "Role not supported by camera" << std::endl;
+ GTEST_SKIP();
+ }
+
+ if (config_->validate() != CameraConfiguration::Valid) {
+ config_.reset();
+ FAIL() << "Configuration not valid";
+ }
+
+ if (camera_->configure(config_.get())) {
+ config_.reset();
+ FAIL() << "Failed to configure camera";
+ }
+}
+
+void SimpleCapture::start()
+{
+ Stream *stream = config_->at(0).stream();
+ int count = allocator_->allocate(stream);
+
+ ASSERT_GE(count, 0) << "Failed to allocate buffers";
+ EXPECT_EQ(count, config_->at(0).bufferCount) << "Allocated less buffers than expected";
+
+ camera_->requestCompleted.connect(this, &SimpleCapture::requestComplete);
+
+ ASSERT_EQ(camera_->start(), 0) << "Failed to start camera";
+}
+
+void SimpleCapture::stop()
+{
+ if (!config_ || !allocator_->allocated())
+ return;
+
+ camera_->stop();
+
+ camera_->requestCompleted.disconnect(this, &SimpleCapture::requestComplete);
+
+ Stream *stream = config_->at(0).stream();
+ allocator_->free(stream);
+}
+
+/* SimpleCaptureBalanced */
+
+SimpleCaptureBalanced::SimpleCaptureBalanced(std::shared_ptr<Camera> camera)
+ : SimpleCapture(camera)
+{
+}
+
+void SimpleCaptureBalanced::capture(unsigned int numRequests)
+{
+ start();
+
+ Stream *stream = config_->at(0).stream();
+ const std::vector<std::unique_ptr<FrameBuffer>> &buffers = allocator_->buffers(stream);
+
+ /* No point in testing less requests then the camera depth. */
+ if (buffers.size() > numRequests) {
+ std::cout << "Camera needs " + std::to_string(buffers.size())
+ + " requests, can't test only "
+ + std::to_string(numRequests) << std::endl;
+ GTEST_SKIP();
+ }
+
+ queueCount_ = 0;
+ captureCount_ = 0;
+ captureLimit_ = numRequests;
+
+ /* Queue the recommended number of reqeuests. */
+ std::vector<std::unique_ptr<libcamera::Request>> requests;
+ for (const std::unique_ptr<FrameBuffer> &buffer : buffers) {
+ std::unique_ptr<Request> request = camera_->createRequest();
+ ASSERT_TRUE(request) << "Can't create request";
+
+ ASSERT_EQ(request->addBuffer(stream, buffer.get()), 0) << "Can't set buffer for request";
+
+ ASSERT_EQ(queueRequest(request.get()), 0) << "Failed to queue request";
+
+ requests.push_back(std::move(request));
+ }
+
+ /* Run capture session. */
+ loop_ = new EventLoop();
+ loop_->exec();
+ stop();
+ delete loop_;
+
+ ASSERT_EQ(captureCount_, captureLimit_);
+}
+
+int SimpleCaptureBalanced::queueRequest(Request *request)
+{
+ queueCount_++;
+ if (queueCount_ > captureLimit_)
+ return 0;
+
+ return camera_->queueRequest(request);
+}
+
+void SimpleCaptureBalanced::requestComplete(Request *request)
+{
+ captureCount_++;
+ if (captureCount_ >= captureLimit_) {
+ loop_->exit(0);
+ return;
+ }
+
+ request->reuse(Request::ReuseBuffers);
+ if (queueRequest(request))
+ loop_->exit(-EINVAL);
+}
+
+/* SimpleCaptureUnbalanced */
+
+SimpleCaptureUnbalanced::SimpleCaptureUnbalanced(std::shared_ptr<Camera> camera)
+ : SimpleCapture(camera)
+{
+}
+
+void SimpleCaptureUnbalanced::capture(unsigned int numRequests)
+{
+ start();
+
+ Stream *stream = config_->at(0).stream();
+ const std::vector<std::unique_ptr<FrameBuffer>> &buffers = allocator_->buffers(stream);
+
+ captureCount_ = 0;
+ captureLimit_ = numRequests;
+
+ /* Queue the recommended number of reqeuests. */
+ std::vector<std::unique_ptr<libcamera::Request>> requests;
+ for (const std::unique_ptr<FrameBuffer> &buffer : buffers) {
+ std::unique_ptr<Request> request = camera_->createRequest();
+ ASSERT_TRUE(request) << "Can't create request";
+
+ ASSERT_EQ(request->addBuffer(stream, buffer.get()), 0) << "Can't set buffer for request";
+
+ ASSERT_EQ(camera_->queueRequest(request.get()), 0) << "Failed to queue request";
+
+ requests.push_back(std::move(request));
+ }
+
+ /* Run capture session. */
+ loop_ = new EventLoop();
+ int status = loop_->exec();
+ stop();
+ delete loop_;
+
+ ASSERT_EQ(status, 0);
+}
+
+void SimpleCaptureUnbalanced::requestComplete(Request *request)
+{
+ captureCount_++;
+ if (captureCount_ >= captureLimit_) {
+ loop_->exit(0);
+ return;
+ }
+
+ request->reuse(Request::ReuseBuffers);
+ if (camera_->queueRequest(request))
+ loop_->exit(-EINVAL);
+}
diff --git a/src/lc-compliance/simple_capture.h b/src/lc-compliance/simple_capture.h
new file mode 100644
index 00000000..100ffd66
--- /dev/null
+++ b/src/lc-compliance/simple_capture.h
@@ -0,0 +1,67 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2020-2021, Google Inc.
+ *
+ * simple_capture.h - Simple capture helper
+ */
+#ifndef __LC_COMPLIANCE_SIMPLE_CAPTURE_H__
+#define __LC_COMPLIANCE_SIMPLE_CAPTURE_H__
+
+#include <memory>
+
+#include <libcamera/libcamera.h>
+
+#include "../cam/event_loop.h"
+
+class SimpleCapture
+{
+public:
+ void configure(libcamera::StreamRole role);
+
+protected:
+ SimpleCapture(std::shared_ptr<libcamera::Camera> camera);
+ virtual ~SimpleCapture();
+
+ void start();
+ void stop();
+
+ virtual void requestComplete(libcamera::Request *request) = 0;
+
+ EventLoop *loop_;
+
+ std::shared_ptr<libcamera::Camera> camera_;
+ std::unique_ptr<libcamera::FrameBufferAllocator> allocator_;
+ std::unique_ptr<libcamera::CameraConfiguration> config_;
+};
+
+class SimpleCaptureBalanced : public SimpleCapture
+{
+public:
+ SimpleCaptureBalanced(std::shared_ptr<libcamera::Camera> camera);
+
+ void capture(unsigned int numRequests);
+
+private:
+ int queueRequest(libcamera::Request *request);
+ void requestComplete(libcamera::Request *request) override;
+
+ unsigned int queueCount_;
+ unsigned int captureCount_;
+ unsigned int captureLimit_;
+};
+
+class SimpleCaptureUnbalanced : public SimpleCapture
+{
+public:
+ SimpleCaptureUnbalanced(std::shared_ptr<libcamera::Camera> camera);
+
+ void capture(unsigned int numRequests);
+
+private:
+ void requestComplete(libcamera::Request *request) override;
+
+ unsigned int captureCount_;
+ unsigned int captureLimit_;
+};
+
+#endif /* __LC_COMPLIANCE_SIMPLE_CAPTURE_H__ */
diff --git a/src/libcamera/bound_method.cpp b/src/libcamera/base/bound_method.cpp
index 9993e596..3ecec51c 100644
--- a/src/libcamera/bound_method.cpp
+++ b/src/libcamera/base/bound_method.cpp
@@ -5,14 +5,13 @@
* bound_method.cpp - Method bind and invocation
*/
-#include <libcamera/bound_method.h>
-
-#include "libcamera/internal/message.h"
-#include "libcamera/internal/semaphore.h"
-#include "libcamera/internal/thread.h"
+#include <libcamera/base/bound_method.h>
+#include <libcamera/base/message.h>
+#include <libcamera/base/semaphore.h>
+#include <libcamera/base/thread.h>
/**
- * \file bound_method.h
+ * \file base/bound_method.h
* \brief Method bind and invocation
*/
@@ -26,22 +25,22 @@ namespace libcamera {
* between a sender and a receiver. It applies to Signal::emit() and
* Object::invokeMethod().
*
- * \var ConnectionType::ConnectionTypeAuto
+ * \var ConnectionTypeAuto
* \brief If the sender and the receiver live in the same thread,
* ConnectionTypeDirect is used. Otherwise ConnectionTypeQueued is used.
*
- * \var ConnectionType::ConnectionTypeDirect
+ * \var ConnectionTypeDirect
* \brief The receiver is invoked immediately and synchronously in the sender's
* thread.
*
- * \var ConnectionType::ConnectionTypeQueued
+ * \var ConnectionTypeQueued
* \brief The receiver is invoked asynchronously
*
* Invoke the receiver asynchronously in its thread when control returns to the
* thread's event loop. The sender proceeds without waiting for the invocation
* to complete.
*
- * \var ConnectionType::ConnectionTypeBlocking
+ * \var ConnectionTypeBlocking
* \brief The receiver is invoked synchronously
*
* If the sender and the receiver live in the same thread, this is equivalent to
diff --git a/src/libcamera/base/class.cpp b/src/libcamera/base/class.cpp
new file mode 100644
index 00000000..26b49677
--- /dev/null
+++ b/src/libcamera/base/class.cpp
@@ -0,0 +1,181 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * class.cpp - Utilities and helpers for classes
+ */
+
+#include <libcamera/base/class.h>
+
+/**
+ * \file class.h
+ * \brief Utilities to help constructing class interfaces
+ *
+ * The extensible class can be inherited to create public classes with stable
+ * ABIs.
+ */
+
+namespace libcamera {
+
+/**
+ * \def LIBCAMERA_DISABLE_COPY
+ * \brief Disable copy construction and assignment of the \a klass
+ * \param klass The name of the class
+ *
+ * Example usage:
+ * \code{.cpp}
+ * class NonCopyable
+ * {
+ * public:
+ * NonCopyable();
+ * ...
+ *
+ * private:
+ * LIBCAMERA_DISABLE_COPY(NonCopyable)
+ * };
+ * \endcode
+ */
+
+/**
+ * \def LIBCAMERA_DISABLE_MOVE
+ * \brief Disable move construction and assignment of the \a klass
+ * \param klass The name of the class
+ *
+ * Example usage:
+ * \code{.cpp}
+ * class NonMoveable
+ * {
+ * public:
+ * NonMoveable();
+ * ...
+ *
+ * private:
+ * LIBCAMERA_DISABLE_MOVE(NonMoveable)
+ * };
+ * \endcode
+ */
+
+/**
+ * \def LIBCAMERA_DISABLE_COPY_AND_MOVE
+ * \brief Disable copy and move construction and assignment of the \a klass
+ * \param klass The name of the class
+ *
+ * Example usage:
+ * \code{.cpp}
+ * class NonCopyableNonMoveable
+ * {
+ * public:
+ * NonCopyableNonMoveable();
+ * ...
+ *
+ * private:
+ * LIBCAMERA_DISABLE_COPY_AND_MOVE(NonCopyableNonMoveable)
+ * };
+ * \endcode
+ */
+
+/**
+ * \def LIBCAMERA_DECLARE_PRIVATE
+ * \brief Declare private data for a public class
+ *
+ * The LIBCAMERA_DECLARE_PRIVATE() macro plumbs the infrastructure necessary to
+ * make a class manage its private data through a d-pointer. It shall be used at
+ * the very top of the class definition.
+ */
+
+/**
+ * \def LIBCAMERA_DECLARE_PUBLIC
+ * \brief Declare public data for a private class
+ * \param klass The public class name
+ *
+ * The LIBCAMERA_DECLARE_PUBLIC() macro is the counterpart of
+ * LIBCAMERA_DECLARE_PRIVATE() to be used in the private data class. It shall be
+ * used at the very top of the private class definition, with the public class
+ * name passed as the \a klass parameter.
+ */
+
+/**
+ * \def LIBCAMERA_O_PTR()
+ * \brief Retrieve the public instance corresponding to the private data
+ *
+ * This macro is used in any member function of the private data class to access
+ * the public class instance corresponding to the private data.
+ */
+
+/**
+ * \class Extensible
+ * \brief Base class to manage private data through a d-pointer
+ *
+ * The Extensible class provides a base class to implement the
+ * <a href="https://wiki.qt.io/D-Pointer">d-pointer</a> design pattern (also
+ * known as <a href="https://en.wikipedia.org/wiki/Opaque_pointer">opaque pointer</a>
+ * or <a href="https://en.cppreference.com/w/cpp/language/pimpl">pImpl idiom</a>).
+ * It helps creating public classes that can be extended without breaking their
+ * ABI. Such classes store their private data in a separate private data object,
+ * referenced by a pointer in the public class (hence the name d-pointer).
+ *
+ * Classes that follow this design pattern are referred herein as extensible
+ * classes. To be extensible, a class PublicClass shall:
+ *
+ * - inherit from the Extensible class or from another extensible class
+ * - invoke the LIBCAMERA_DECLARE_PRIVATE() macro at the very top of the class
+ * definition
+ * - define a private data class named PublicClass::Private that inherits from
+ * the Private data class of the base class
+ * - invoke the LIBCAMERA_DECLARE_PUBLIC() macro at the very top of the Private
+ * data class definition
+ * - pass a pointer to a newly allocated Private data object to the constructor
+ * of the base class
+ *
+ * Additionally, if the PublicClass is not final, it shall expose one or more
+ * constructors that takes a pointer to a Private data instance, to be used by
+ * derived classes.
+ *
+ * The Private class is fully opaque to users of the libcamera public API.
+ * Internally, it can be kept private to the implementation of PublicClass, or
+ * be exposed to other classes. In the latter case, the members of the Private
+ * class need to be qualified with appropriate access specifiers. The
+ * PublicClass and Private classes always have full access to each other's
+ * protected and private members.
+ *
+ * The PublicClass exposes its Private data pointer through the _d() function.
+ */
+
+/**
+ * \brief Construct an instance of an Extensible class
+ * \param[in] d Pointer to the private data instance
+ */
+Extensible::Extensible(Extensible::Private *d)
+ : d_(d)
+{
+}
+
+/**
+ * \var Extensible::d_
+ * \brief Pointer to the private data instance
+ */
+
+/**
+ * \class Extensible::Private
+ * \brief Base class for private data managed through a d-pointer
+ */
+
+/**
+ * \brief Construct an instance of an Extensible class private data
+ * \param[in] o Pointer to the public class object
+ */
+Extensible::Private::Private(Extensible *o)
+ : o_(o)
+{
+}
+
+Extensible::Private::~Private()
+{
+}
+
+/**
+ * \var Extensible::Private::o_
+ * \brief Pointer to the public class object
+ */
+
+} /* namespace libcamera */
diff --git a/src/libcamera/event_dispatcher.cpp b/src/libcamera/base/event_dispatcher.cpp
index 90bd5daf..4be89e81 100644
--- a/src/libcamera/event_dispatcher.cpp
+++ b/src/libcamera/base/event_dispatcher.cpp
@@ -5,12 +5,11 @@
* event_dispatcher.cpp - Event dispatcher
*/
-#include <libcamera/event_dispatcher.h>
-
-#include "libcamera/internal/log.h"
+#include <libcamera/base/event_dispatcher.h>
+#include <libcamera/base/log.h>
/**
- * \file event_dispatcher.h
+ * \file base/event_dispatcher.h
*/
namespace libcamera {
diff --git a/src/libcamera/event_dispatcher_poll.cpp b/src/libcamera/base/event_dispatcher_poll.cpp
index 9ab85da7..5839373a 100644
--- a/src/libcamera/event_dispatcher_poll.cpp
+++ b/src/libcamera/base/event_dispatcher_poll.cpp
@@ -5,7 +5,7 @@
* event_dispatcher_poll.cpp - Poll-based event dispatcher
*/
-#include "libcamera/internal/event_dispatcher_poll.h"
+#include <libcamera/base/event_dispatcher_poll.h>
#include <algorithm>
#include <chrono>
@@ -16,15 +16,14 @@
#include <sys/eventfd.h>
#include <unistd.h>
-#include <libcamera/event_notifier.h>
-#include <libcamera/timer.h>
-
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/thread.h"
-#include "libcamera/internal/utils.h"
+#include <libcamera/base/event_notifier.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/thread.h>
+#include <libcamera/base/timer.h>
+#include <libcamera/base/utils.h>
/**
- * \file event_dispatcher_poll.h
+ * \file base/event_dispatcher_poll.h
*/
namespace libcamera {
diff --git a/src/libcamera/event_notifier.cpp b/src/libcamera/base/event_notifier.cpp
index 21c07faf..fd93c087 100644
--- a/src/libcamera/event_notifier.cpp
+++ b/src/libcamera/base/event_notifier.cpp
@@ -5,13 +5,13 @@
* event_notifier.cpp - File descriptor event notifier
*/
-#include <libcamera/event_notifier.h>
+#include <libcamera/base/event_notifier.h>
-#include <libcamera/camera_manager.h>
-#include <libcamera/event_dispatcher.h>
+#include <libcamera/base/event_dispatcher.h>
+#include <libcamera/base/message.h>
+#include <libcamera/base/thread.h>
-#include "libcamera/internal/message.h"
-#include "libcamera/internal/thread.h"
+#include <libcamera/camera_manager.h>
/**
* \file event_notifier.h
diff --git a/src/libcamera/file.cpp b/src/libcamera/base/file.cpp
index 04b0cb61..073666fa 100644
--- a/src/libcamera/file.cpp
+++ b/src/libcamera/base/file.cpp
@@ -5,7 +5,7 @@
* file.cpp - File I/O operations
*/
-#include "libcamera/internal/file.h"
+#include <libcamera/base/file.h>
#include <errno.h>
#include <fcntl.h>
@@ -14,16 +14,16 @@
#include <sys/types.h>
#include <unistd.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
/**
- * \file file.h
+ * \file base/file.h
* \brief File I/O operations
*/
namespace libcamera {
-LOG_DEFINE_CATEGORY(File);
+LOG_DEFINE_CATEGORY(File)
/**
* \class File
@@ -458,7 +458,8 @@ bool File::exists(const std::string &name)
if (ret < 0)
return false;
- return true;
+ /* Directories can not be handled here, even if they exist. */
+ return !S_ISDIR(st.st_mode);
}
} /* namespace libcamera */
diff --git a/src/libcamera/log.cpp b/src/libcamera/base/log.cpp
index 180eb97b..1801ae26 100644
--- a/src/libcamera/log.cpp
+++ b/src/libcamera/base/log.cpp
@@ -5,8 +5,9 @@
* log.cpp - Logging infrastructure
*/
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
+#include <array>
#if HAVE_BACKTRACE
#include <execinfo.h>
#endif
@@ -22,11 +23,11 @@
#include <libcamera/logging.h>
-#include "libcamera/internal/thread.h"
-#include "libcamera/internal/utils.h"
+#include <libcamera/base/thread.h>
+#include <libcamera/base/utils.h>
/**
- * \file log.h
+ * \file base/log.h
* \brief Logging infrastructure
*
* libcamera includes a logging infrastructure used through the library that
@@ -91,7 +92,7 @@ static const char *log_severity_name(LogSeverity severity)
"FATAL",
};
- if (static_cast<unsigned int>(severity) < ARRAY_SIZE(names))
+ if (static_cast<unsigned int>(severity) < std::size(names))
return names[severity];
else
return "UNKWN";
@@ -247,6 +248,8 @@ void LogOutput::writeStream(const std::string &str)
class Logger
{
public:
+ ~Logger();
+
static Logger *instance();
void write(const LogMessage &msg);
@@ -266,7 +269,6 @@ private:
friend LogCategory;
void registerCategory(LogCategory *category);
- void unregisterCategory(LogCategory *category);
std::unordered_set<LogCategory *> categories_;
std::list<std::pair<std::string, LogSeverity>> levels_;
@@ -368,6 +370,12 @@ void logSetLevel(const char *category, const char *level)
Logger::instance()->logSetLevel(category, level);
}
+Logger::~Logger()
+{
+ for (LogCategory *category : categories_)
+ delete category;
+}
+
/**
* \brief Retrieve the logger instance
*
@@ -406,7 +414,7 @@ void Logger::backtrace()
return;
void *buffer[32];
- int num_entries = ::backtrace(buffer, ARRAY_SIZE(buffer));
+ int num_entries = ::backtrace(buffer, std::size(buffer));
char **strings = backtrace_symbols(buffer, num_entries);
if (!strings)
return;
@@ -620,7 +628,7 @@ LogSeverity Logger::parseLogLevel(const std::string &level)
severity = LogInvalid;
} else {
severity = LogInvalid;
- for (unsigned int i = 0; i < ARRAY_SIZE(names); ++i) {
+ for (unsigned int i = 0; i < std::size(names); ++i) {
if (names[i] == level) {
severity = i;
break;
@@ -665,18 +673,6 @@ void Logger::registerCategory(LogCategory *category)
}
/**
- * \brief Unregister a log category from the logger
- * \param[in] category The log category
- *
- * If the \a category hasn't been registered with the logger this function
- * performs no operation.
- */
-void Logger::unregisterCategory(LogCategory *category)
-{
- categories_.erase(category);
-}
-
-/**
* \enum LogSeverity
* Log message severity
* \var LogDebug
@@ -710,11 +706,6 @@ LogCategory::LogCategory(const char *name)
Logger::instance()->registerCategory(this);
}
-LogCategory::~LogCategory()
-{
- Logger::instance()->unregisterCategory(this);
-}
-
/**
* \fn LogCategory::name()
* \brief Retrieve the log category name
@@ -745,12 +736,12 @@ void LogCategory::setSeverity(LogSeverity severity)
* The default log category is named "default" and is used by the LOG() macro
* when no log category is specified.
*
- * \return A pointer to the default log category
+ * \return A reference to the default log category
*/
const LogCategory &LogCategory::defaultCategory()
{
- static const LogCategory category("default");
- return category;
+ static const LogCategory *category = new LogCategory("default");
+ return *category;
}
/**
@@ -763,24 +754,6 @@ const LogCategory &LogCategory::defaultCategory()
*/
/**
- * \brief Construct a log message for the default category
- * \param[in] fileName The file name where the message is logged from
- * \param[in] line The line number where the message is logged from
- * \param[in] severity The log message severity, controlling how the message
- * will be displayed
- *
- * Create a log message pertaining to line \a line of file \a fileName. The
- * \a severity argument sets the message severity to control whether it will be
- * output or dropped.
- */
-LogMessage::LogMessage(const char *fileName, unsigned int line,
- LogSeverity severity)
- : category_(LogCategory::defaultCategory()), severity_(severity)
-{
- init(fileName, line);
-}
-
-/**
* \brief Construct a log message for a given category
* \param[in] fileName The file name where the message is logged from
* \param[in] line The line number where the message is logged from
@@ -914,41 +887,22 @@ Loggable::~Loggable()
/**
* \brief Create a temporary LogMessage object to log a message
- * \param[in] fileName The file name where the message is logged from
- * \param[in] line The line number where the message is logged from
+ * \param[in] category The log message category
* \param[in] severity The log message severity
- *
- * This method is used as a backeng by the LOG() macro to create a log message
- * for locations inheriting from the Loggable class.
- *
- * \return A log message
- */
-LogMessage Loggable::_log(const char *fileName, unsigned int line,
- LogSeverity severity) const
-{
- LogMessage msg(fileName, line, severity);
-
- msg.stream() << logPrefix() << ": ";
- return msg;
-}
-
-/**
- * \brief Create a temporary LogMessage object to log a message
* \param[in] fileName The file name where the message is logged from
* \param[in] line The line number where the message is logged from
- * \param[in] category The log message category
- * \param[in] severity The log message severity
*
* This method is used as a backeng by the LOG() macro to create a log message
* for locations inheriting from the Loggable class.
*
* \return A log message
*/
-LogMessage Loggable::_log(const char *fileName, unsigned int line,
- const LogCategory &category,
- LogSeverity severity) const
+LogMessage Loggable::_log(const LogCategory *category, LogSeverity severity,
+ const char *fileName, unsigned int line) const
{
- LogMessage msg(fileName, line, category, severity);
+ LogMessage msg(fileName, line,
+ category ? *category : LogCategory::defaultCategory(),
+ severity);
msg.stream() << logPrefix() << ": ";
return msg;
@@ -956,36 +910,22 @@ LogMessage Loggable::_log(const char *fileName, unsigned int line,
/**
* \brief Create a temporary LogMessage object to log a message
- * \param[in] fileName The file name where the message is logged from
- * \param[in] line The line number where the message is logged from
+ * \param[in] category The log message category
* \param[in] severity The log message severity
- *
- * This function is used as a backeng by the LOG() macro to create a log
- * message for locations not inheriting from the Loggable class.
- *
- * \return A log message
- */
-LogMessage _log(const char *fileName, unsigned int line, LogSeverity severity)
-{
- return LogMessage(fileName, line, severity);
-}
-
-/**
- * \brief Create a temporary LogMessage object to log a message
* \param[in] fileName The file name where the message is logged from
* \param[in] line The line number where the message is logged from
- * \param[in] category The log message category
- * \param[in] severity The log message severity
*
* This function is used as a backeng by the LOG() macro to create a log
* message for locations not inheriting from the Loggable class.
*
* \return A log message
*/
-LogMessage _log(const char *fileName, unsigned int line,
- const LogCategory &category, LogSeverity severity)
+LogMessage _log(const LogCategory *category, LogSeverity severity,
+ const char *fileName, unsigned int line)
{
- return LogMessage(fileName, line, category, severity);
+ return LogMessage(fileName, line,
+ category ? *category : LogCategory::defaultCategory(),
+ severity);
}
/**
@@ -1030,10 +970,17 @@ LogMessage _log(const char *fileName, unsigned int line,
*
* If the severity is set to Fatal, execution is aborted and the program
* terminates immediately after printing the message.
+ *
+ * \warning Logging from the destructor of a global object, either directly or
+ * indirectly, results in undefined behaviour.
+ *
+ * \todo Allow logging from destructors of global objects to the largest
+ * possible extent
*/
/**
* \def ASSERT(condition)
+ * \hideinitializer
* \brief Abort program execution if assertion fails
*
* If \a condition is false, ASSERT() logs an error message with the Fatal log
diff --git a/src/libcamera/base/meson.build b/src/libcamera/base/meson.build
new file mode 100644
index 00000000..87172157
--- /dev/null
+++ b/src/libcamera/base/meson.build
@@ -0,0 +1,49 @@
+# SPDX-License-Identifier: CC0-1.0
+
+libcamera_base_sources = files([
+ 'class.cpp',
+ 'bound_method.cpp',
+ 'event_dispatcher.cpp',
+ 'event_dispatcher_poll.cpp',
+ 'event_notifier.cpp',
+ 'file.cpp',
+ 'log.cpp',
+ 'message.cpp',
+ 'object.cpp',
+ 'semaphore.cpp',
+ 'signal.cpp',
+ 'thread.cpp',
+ 'timer.cpp',
+ 'utils.cpp',
+])
+
+libcamera_base_deps = [
+ dependency('threads'),
+]
+
+# Internal components must use the libcamera_base_private dependency to enable
+# the use of headers which must not be exposed to the libcamera public api.
+libcamera_base_args = [ '-DLIBCAMERA_BASE_PRIVATE' ]
+
+libcamera_base_lib = shared_library('libcamera-base',
+ [libcamera_base_sources, libcamera_base_headers],
+ name_prefix : '',
+ install : true,
+ cpp_args : libcamera_base_args,
+ include_directories : libcamera_includes,
+ dependencies : libcamera_base_deps)
+
+libcamera_base = declare_dependency(sources : [
+ libcamera_base_headers,
+ ],
+ include_directories : libcamera_includes,
+ link_with : libcamera_base_lib)
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(libcamera_base_lib,
+ version : '1.0',
+ description : 'Camera support base utility library',
+ subdirs : 'libcamera')
+
+libcamera_base_private = declare_dependency(dependencies : libcamera_base,
+ compile_args : libcamera_base_args)
diff --git a/src/libcamera/message.cpp b/src/libcamera/base/message.cpp
index bc985c07..f1d772e4 100644
--- a/src/libcamera/message.cpp
+++ b/src/libcamera/base/message.cpp
@@ -5,14 +5,13 @@
* message.cpp - Message queue support
*/
-#include "libcamera/internal/message.h"
+#include <libcamera/base/message.h>
-#include <libcamera/signal.h>
-
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
+#include <libcamera/base/signal.h>
/**
- * \file message.h
+ * \file base/message.h
* \brief Message queue support
*
* The messaging API enables inter-thread communication through message
diff --git a/src/libcamera/object.cpp b/src/libcamera/base/object.cpp
index cd83c684..25410ecd 100644
--- a/src/libcamera/object.cpp
+++ b/src/libcamera/base/object.cpp
@@ -5,20 +5,19 @@
* object.cpp - Base object
*/
-#include <libcamera/object.h>
+#include <libcamera/base/object.h>
#include <algorithm>
-#include <libcamera/signal.h>
-
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/message.h"
-#include "libcamera/internal/semaphore.h"
-#include "libcamera/internal/thread.h"
-#include "libcamera/internal/utils.h"
+#include <libcamera/base/log.h>
+#include <libcamera/base/message.h>
+#include <libcamera/base/semaphore.h>
+#include <libcamera/base/signal.h>
+#include <libcamera/base/thread.h>
+#include <libcamera/base/utils.h>
/**
- * \file object.h
+ * \file base/object.h
* \brief Base object to support automatic signal disconnection
*/
@@ -155,6 +154,10 @@ void Object::deleteLater()
* running its event loop the message will not be delivered until the event
* loop gets started.
*
+ * Due to their asynchronous nature, threads do not provide any guarantee that
+ * all posted messages are delivered before the thread is stopped. See
+ * \ref thread-stop for additional information.
+ *
* \context This function is \threadsafe.
*/
void Object::postMessage(std::unique_ptr<Message> msg)
@@ -212,6 +215,10 @@ void Object::message(Message *msg)
* are passed untouched. The caller shall ensure that any pointer argument
* remains valid until the method is invoked.
*
+ * Due to the asynchronous nature of threads, functions invoked asynchronously
+ * with the ConnectionTypeQueued type are not guaranteed to be called before
+ * the thread is stopped. See \ref thread-stop for additional information.
+ *
* \context This function is \threadsafe.
*
* \return For connection types ConnectionTypeDirect and
diff --git a/src/libcamera/semaphore.cpp b/src/libcamera/base/semaphore.cpp
index d8988a91..7aedc6a8 100644
--- a/src/libcamera/semaphore.cpp
+++ b/src/libcamera/base/semaphore.cpp
@@ -5,11 +5,11 @@
* semaphore.cpp - General-purpose counting semaphore
*/
-#include "libcamera/internal/semaphore.h"
-#include "libcamera/internal/thread.h"
+#include <libcamera/base/semaphore.h>
+#include <libcamera/base/thread.h>
/**
- * \file semaphore.h
+ * \file base/semaphore.h
* \brief General-purpose counting semaphore
*/
diff --git a/src/libcamera/signal.cpp b/src/libcamera/base/signal.cpp
index 2532df3c..298b2d4b 100644
--- a/src/libcamera/signal.cpp
+++ b/src/libcamera/base/signal.cpp
@@ -5,12 +5,12 @@
* signal.cpp - Signal & slot implementation
*/
-#include <libcamera/signal.h>
+#include <libcamera/base/signal.h>
-#include "libcamera/internal/thread.h"
+#include <libcamera/base/thread.h>
/**
- * \file signal.h
+ * \file base/signal.h
* \brief Signal & slot implementation
*/
diff --git a/src/libcamera/thread.cpp b/src/libcamera/base/thread.cpp
index 87006a9c..1232f895 100644
--- a/src/libcamera/thread.cpp
+++ b/src/libcamera/base/thread.cpp
@@ -5,7 +5,7 @@
* thread.cpp - Thread support
*/
-#include "libcamera/internal/thread.h"
+#include <libcamera/base/thread.h>
#include <atomic>
#include <condition_variable>
@@ -14,11 +14,10 @@
#include <sys/types.h>
#include <unistd.h>
-#include <libcamera/event_dispatcher.h>
-
-#include "libcamera/internal/event_dispatcher_poll.h"
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/message.h"
+#include <libcamera/base/event_dispatcher.h>
+#include <libcamera/base/event_dispatcher_poll.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/message.h>
/**
* \page thread Thread Support
@@ -29,6 +28,13 @@
* interactions with application threads. Careful compliance with the threading
* model will ensure avoidance of race conditions.
*
+ * Every thread created by libcamera is associated with an instance of the
+ * Thread class. Those threads run an internal event loop by default to
+ * dispatch events to objects. Additionally, the main thread of the application
+ * (defined as the thread that calls CameraManager::start()) is also associated
+ * with a Thread instance, but has no event loop accessible to libcamera. Other
+ * application threads are not visible to libcamera.
+ *
* \section thread-objects Threads and Objects
*
* Instances of the Object class and all its derived classes are thread-aware
@@ -40,13 +46,12 @@
* explicitly connected with ConnectionTypeDirect, will also be delivered from
* the object thread's event loop.
*
- * All Object instances created by libcamera are bound to an internal thread,
- * and applications don't need to provide an event loop to support them. Object
- * instances created by applications require an event loop. It is the
- * responsibility of applications to provide that event loop, either explicitly
- * through CameraManager::setEventDispatcher(), or by running the default event
- * loop provided by CameraManager::eventDispatcher() in their main thread. The
- * main thread of an application is the one that calls CameraManager::start().
+ * All Object instances created internally by libcamera are bound to internal
+ * threads. As objects interact with thread event loops for proper operation,
+ * creating an Object instance in a thread that has no internal event loop (such
+ * as the main application thread, or libcamera threads that have a custom main
+ * loop), prevents some features of the Object class from being used. See
+ * Thread::exec() for more details.
*
* \section thread-signals Threads and Signals
*
@@ -59,14 +64,6 @@
* be overridden by selecting a different connection type when calling
* Signal::connect().
*
- * Asynchronous signal delivery is used internally in libcamera, but is also
- * available to applications if desired. To use this feature, applications
- * shall create receiver classes that inherit from the Object class, and
- * provide an event loop to the CameraManager as explained above. Note that
- * Object instances created by the application are limited to living in the
- * application's main thread. Creating Object instances from another thread of
- * an application causes undefined behaviour.
- *
* \section thread-reentrancy Reentrancy and Thread-Safety
*
* Through the documentation, several terms are used to define how classes and
@@ -105,7 +102,7 @@
*/
/**
- * \file thread.h
+ * \file base/thread.h
* \brief Thread support
*/
@@ -129,6 +126,11 @@ public:
* \brief Protects the \ref list_
*/
Mutex mutex_;
+ /**
+ * \brief The recursion level for recursive Thread::dispatchMessages()
+ * calls
+ */
+ unsigned int recursion_ = 0;
};
/**
@@ -221,11 +223,50 @@ ThreadData *ThreadData::current()
* with the Object, Signal and EventDispatcher classes.
*
* Thread instances by default run an event loop until the exit() method is
- * called. A custom event dispatcher may be installed with
- * setEventDispatcher(), otherwise a poll-based event dispatcher is used. This
- * behaviour can be overriden by overloading the run() method.
- *
- * \context This class is \threadsafe.
+ * called. The event loop dispatches events (messages, notifiers and timers)
+ * sent to the objects living in the thread. This behaviour can be modified by
+ * overriding the run() function.
+ *
+ * \section thread-stop Stopping Threads
+ *
+ * Threads can't be forcibly stopped. Instead, a thread user first requests the
+ * thread to exit and then waits for the thread's main function to react to the
+ * request and return, at which points the thread will stop.
+ *
+ * For threads running exec(), the exit() function is used to request the thread
+ * to exit. For threads subclassing the Thread class and implementing a custom
+ * run() function, a subclass-specific mechanism shall be provided. In either
+ * case, the wait() function shall be called to wait for the thread to stop.
+ *
+ * Due to their asynchronous nature, threads are subject to race conditions when
+ * they stop. This is of particular importance for messages posted to the thread
+ * with postMessage() (and the other mechanisms that rely on it, such as
+ * Object::invokeMethod() or asynchronous signal delivery). To understand the
+ * issues, three contexts need to be considered:
+ *
+ * - The worker is the Thread performing work and being instructed to stop.
+ * - The controller is the context which instructs the worker thread to stop.
+ * - The other contexts are any threads other than the worker and controller
+ * that interact with the worker thread.
+ *
+ * Messages posted to the worker thread from the controller context before
+ * calling exit() are queued to the thread's message queue, and the Thread class
+ * offers no guarantee that those messages will be processed before the thread
+ * stops. This allows threads to stop fast.
+ *
+ * A thread that requires delivery of messages posted from the controller
+ * context before exit() should reimplement the run() function and call
+ * dispatchMessages() after exec().
+ *
+ * Messages posted to the worker thread from the other contexts are asynchronous
+ * with respect to the exit() call from the controller context. There is no
+ * guarantee as to whether those messages will be processed or not before the
+ * thread stops.
+ *
+ * Messages that are not processed will stay in the queue, in the exact same way
+ * as messages posted after the thread has stopped. They will be processed when
+ * the thread is restarted. If the thread is never restarted, they will be
+ * deleted without being processed when the Thread instance is destroyed.
*/
/**
@@ -291,7 +332,7 @@ void Thread::startThread()
/**
* \brief Enter the event loop
*
- * This method enter an event loop based on the event dispatcher instance for
+ * This method enters an event loop based on the event dispatcher instance for
* the thread, and blocks until the exit() method is called. It is meant to be
* called within the thread from the run() method and shall not be called
* outside of the thread.
@@ -318,9 +359,17 @@ int Thread::exec()
* \brief Main method of the thread
*
* When the thread is started with start(), it calls this method in the context
- * of the new thread. The run() method can be overloaded to perform custom
- * work. When this method returns the thread execution is stopped, and the \ref
- * finished signal is emitted.
+ * of the new thread. The run() method can be overridden to perform custom
+ * work, either custom initialization and cleanup before and after calling the
+ * Thread::exec() function, or a custom thread loop altogether. When this
+ * method returns the thread execution is stopped, and the \ref finished signal
+ * is emitted.
+ *
+ * Note that if this function is overridden and doesn't call Thread::exec(), no
+ * events will be dispatched to the objects living in the thread. These objects
+ * will not be able to use the EventNotifier, Timer or Message facilities. This
+ * includes functions that rely on message dispatching, such as
+ * Object::deleteLater().
*
* The base implementation just calls exec().
*/
@@ -348,6 +397,8 @@ void Thread::finishThread()
*
* Calling exit() on a thread that reimplements the run() method and doesn't
* call exec() will likely have no effect.
+ *
+ * \context This function is \threadsafe.
*/
void Thread::exit(int code)
{
@@ -370,11 +421,13 @@ void Thread::exit(int code)
* utils::duration::max(), the wait never times out. If the thread is not
* running the function returns immediately.
*
+ * \context This function is \threadsafe.
+ *
* \return True if the thread has finished, or false if the wait timed out
*/
bool Thread::wait(utils::duration duration)
{
- bool finished = true;
+ bool hasFinished = true;
{
MutexLocker locker(data_->mutex_);
@@ -382,14 +435,14 @@ bool Thread::wait(utils::duration duration)
if (duration == utils::duration::max())
data_->cv_.wait(locker, [&]() { return !data_->running_; });
else
- finished = data_->cv_.wait_for(locker, duration,
- [&]() { return !data_->running_; });
+ hasFinished = data_->cv_.wait_for(locker, duration,
+ [&]() { return !data_->running_; });
}
if (thread_.joinable())
thread_.join();
- return finished;
+ return hasFinished;
}
/**
@@ -399,6 +452,8 @@ bool Thread::wait(utils::duration duration)
* started. This method guarantees that it returns true after the start()
* method returns, and false after the wait() method returns.
*
+ * \context This function is \threadsafe.
+ *
* \return True if the thread is running, false otherwise
*/
bool Thread::isRunning()
@@ -414,6 +469,7 @@ bool Thread::isRunning()
/**
* \brief Retrieve the Thread instance for the current thread
+ * \context This function is \threadsafe.
* \return The Thread instance for the current thread
*/
Thread *Thread::current()
@@ -428,6 +484,8 @@ Thread *Thread::current()
* The thread ID corresponds to the Linux thread ID (TID) as returned by the
* gettid system call.
*
+ * \context This function is \threadsafe.
+ *
* \return The ID of the current thread
*/
pid_t Thread::currentId()
@@ -437,37 +495,12 @@ pid_t Thread::currentId()
}
/**
- * \brief Set the event dispatcher
- * \param[in] dispatcher Pointer to the event dispatcher
- *
- * Threads that run an event loop require an event dispatcher to integrate
- * event notification and timers with the loop. Users that want to provide
- * their own event dispatcher shall call this method once and only once before
- * the thread is started with start(). If no event dispatcher is provided, a
- * default poll-based implementation will be used.
- *
- * The Thread takes ownership of the event dispatcher and will delete it when
- * the thread is destroyed.
- */
-void Thread::setEventDispatcher(std::unique_ptr<EventDispatcher> dispatcher)
-{
- if (data_->dispatcher_.load(std::memory_order_relaxed)) {
- LOG(Thread, Warning) << "Event dispatcher is already set";
- return;
- }
-
- data_->dispatcher_.store(dispatcher.release(),
- std::memory_order_relaxed);
-}
-
-/**
* \brief Retrieve the event dispatcher
*
- * This method retrieves the event dispatcher set with setEventDispatcher().
- * If no dispatcher has been set, a default poll-based implementation is created
- * and returned, and no custom event dispatcher may be installed anymore.
+ * This function retrieves the internal event dispatcher for the thread. The
+ * returned event dispatcher is valid until the thread is destroyed.
*
- * The returned event dispatcher is valid until the thread is destroyed.
+ * \context This function is \threadsafe.
*
* \return Pointer to the event dispatcher
*/
@@ -493,8 +526,13 @@ EventDispatcher *Thread::eventDispatcher()
* running its event loop the message will not be delivered until the event
* loop gets started.
*
+ * When the thread is stopped, posted messages may not have all been processed.
+ * See \ref thread-stop for additional information.
+ *
* If the \a receiver is not bound to this thread the behaviour is undefined.
*
+ * \context This function is \threadsafe.
+ *
* \sa exec()
*/
void Thread::postMessage(std::unique_ptr<Message> msg, Object *receiver)
@@ -558,28 +596,38 @@ void Thread::removeMessages(Object *receiver)
* This function immediately dispatches all the messages previously posted for
* this thread with postMessage() that match the message \a type. If the \a type
* is Message::Type::None, all messages are dispatched.
+ *
+ * Messages shall only be dispatched from the current thread, typically within
+ * the thread from the run() function. Calling this function outside of the
+ * thread results in undefined behaviour.
+ *
+ * This function is not thread-safe, but it may be called recursively in the
+ * same thread from an object's message handler. It guarantees delivery of
+ * messages in the order they have been posted in all cases.
*/
void Thread::dispatchMessages(Message::Type type)
{
+ ASSERT(data_ == ThreadData::current());
+
+ ++data_->messages_.recursion_;
+
MutexLocker locker(data_->messages_.mutex_);
std::list<std::unique_ptr<Message>> &messages = data_->messages_.list_;
- for (auto iter = messages.begin(); iter != messages.end(); ) {
- std::unique_ptr<Message> &msg = *iter;
-
- if (!msg) {
- iter = data_->messages_.list_.erase(iter);
+ for (std::unique_ptr<Message> &msg : messages) {
+ if (!msg)
continue;
- }
- if (type != Message::Type::None && msg->type() != type) {
- ++iter;
+ if (type != Message::Type::None && msg->type() != type)
continue;
- }
+ /*
+ * Move the message, setting the entry in the list to null. It
+ * will cause recursive calls to ignore the entry, and the erase
+ * loop at the end of the function to delete it from the list.
+ */
std::unique_ptr<Message> message = std::move(msg);
- iter = data_->messages_.list_.erase(iter);
Object *receiver = message->receiver_;
ASSERT(data_ == receiver->thread()->data_);
@@ -590,6 +638,20 @@ void Thread::dispatchMessages(Message::Type type)
message.reset();
locker.lock();
}
+
+ /*
+ * If the recursion level is 0, erase all null messages in the list. We
+ * can't do so during recursion, as it would invalidate the iterator of
+ * the outer calls.
+ */
+ if (!--data_->messages_.recursion_) {
+ for (auto iter = messages.begin(); iter != messages.end(); ) {
+ if (!*iter)
+ iter = messages.erase(iter);
+ else
+ ++iter;
+ }
+ }
}
/**
diff --git a/src/libcamera/timer.cpp b/src/libcamera/base/timer.cpp
index 24e452ed..9c54352d 100644
--- a/src/libcamera/timer.cpp
+++ b/src/libcamera/base/timer.cpp
@@ -5,20 +5,20 @@
* timer.cpp - Generic timer
*/
-#include <libcamera/timer.h>
+#include <libcamera/base/timer.h>
#include <chrono>
-#include <libcamera/camera_manager.h>
-#include <libcamera/event_dispatcher.h>
+#include <libcamera/base/event_dispatcher.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/message.h>
+#include <libcamera/base/thread.h>
+#include <libcamera/base/utils.h>
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/message.h"
-#include "libcamera/internal/thread.h"
-#include "libcamera/internal/utils.h"
+#include <libcamera/camera_manager.h>
/**
- * \file timer.h
+ * \file base/timer.h
* \brief Generic timer
*/
diff --git a/src/libcamera/utils.cpp b/src/libcamera/base/utils.cpp
index a5232902..45b92b67 100644
--- a/src/libcamera/utils.cpp
+++ b/src/libcamera/base/utils.cpp
@@ -5,38 +5,24 @@
* utils.cpp - Miscellaneous utility functions
*/
-#include "libcamera/internal/utils.h"
+#include <libcamera/base/utils.h>
-#include <dlfcn.h>
-#include <elf.h>
#include <iomanip>
-#include <limits.h>
-#include <link.h>
#include <sstream>
#include <stdlib.h>
#include <string.h>
-#include <sys/stat.h>
-#include <sys/types.h>
#include <unistd.h>
/**
- * \file utils.h
+ * \file base/utils.h
* \brief Miscellaneous utility functions
*/
-/* musl doesn't declare _DYNAMIC in link.h, declare it manually. */
-extern ElfW(Dyn) _DYNAMIC[];
-
namespace libcamera {
namespace utils {
/**
- * \def ARRAY_SIZE(array)
- * \brief Determine the number of elements in the static array.
- */
-
-/**
* \brief Strip the directory prefix from the path
* \param[in] path The path to process
*
@@ -49,8 +35,8 @@ namespace utils {
*/
const char *basename(const char *path)
{
- const char *base = strrchr(path, '/');
- return base ? base + 1 : path;
+ const char *base = strrchr(path, '/');
+ return base ? base + 1 : path;
}
/**
@@ -64,6 +50,10 @@ const char *basename(const char *path)
* avoid vulnerabilities that could occur if set-user-ID or set-group-ID
* programs accidentally trust the environment.
*
+ * \note Not all platforms may support the features required to implement the
+ * secure execution check, in which case this function behaves as getenv(). A
+ * notable example of this is Android.
+ *
* \return A pointer to the value in the environment or NULL if the requested
* environment variable doesn't exist or if secure execution is required.
*/
@@ -72,9 +62,10 @@ char *secure_getenv(const char *name)
#if HAVE_SECURE_GETENV
return ::secure_getenv(name);
#else
+#if HAVE_ISSETUGID
if (issetugid())
return NULL;
-
+#endif
return getenv(name);
#endif
}
@@ -335,123 +326,138 @@ details::StringSplitter split(const std::string &str, const std::string &delim)
}
/**
- * \brief Check if libcamera is installed or not
+ * \brief Remove any non-ASCII characters from a string
+ * \param[in] str The string to strip
*
- * Utilise the build_rpath dynamic tag which is stripped out by meson at
- * install time to determine at runtime if the library currently executing
- * has been installed or not.
+ * Remove all non-ASCII characters from a string.
*
- * \return True if libcamera is installed, false otherwise
+ * \return A string equal to \a str stripped out of all non-ASCII characters
*/
-bool isLibcameraInstalled()
+std::string toAscii(const std::string &str)
{
- /*
- * DT_RUNPATH (DT_RPATH when the linker uses old dtags) is removed on
- * install.
- */
- for (const ElfW(Dyn) *dyn = _DYNAMIC; dyn->d_tag != DT_NULL; ++dyn) {
- if (dyn->d_tag == DT_RUNPATH || dyn->d_tag == DT_RPATH)
- return false;
- }
-
- return true;
+ std::string ret;
+ for (const char &c : str)
+ if (!(c & 0x80))
+ ret += c;
+ return ret;
}
/**
- * \brief Retrieve the path to the build directory
- *
- * During development, it is useful to run libcamera binaries directly from the
- * build directory without installing them. This function helps components that
- * need to locate resources in the build tree, such as IPA modules or IPA proxy
- * workers, by providing them with the path to the root of the build directory.
- * Callers can then use it to complement or override searches in system-wide
- * directories.
- *
- * If libcamera has been installed, the build directory path is not available
- * and this function returns an empty string.
- *
- * \return The path to the build directory if running from a build, or an empty
- * string otherwise
+ * \fn alignDown(unsigned int value, unsigned int alignment)
+ * \brief Align \a value down to \a alignment
+ * \param[in] value The value to align
+ * \param[in] alignment The alignment
+ * \return The value rounded down to the nearest multiple of \a alignment
*/
-std::string libcameraBuildPath()
-{
- if (isLibcameraInstalled())
- return std::string();
-
- Dl_info info;
-
- /* Look up our own symbol. */
- int ret = dladdr(reinterpret_cast<void *>(libcameraBuildPath), &info);
- if (ret == 0)
- return std::string();
-
- std::string path = dirname(info.dli_fname) + "/../../";
-
- char *real = realpath(path.c_str(), nullptr);
- if (!real)
- return std::string();
- path = real;
- free(real);
+/**
+ * \fn alignUp(unsigned int value, unsigned int alignment)
+ * \brief Align \a value up to \a alignment
+ * \param[in] value The value to align
+ * \param[in] alignment The alignment
+ * \return The value rounded up to the nearest multiple of \a alignment
+ */
- return path + "/";
-}
+/**
+ * \fn reverse(T &&iterable)
+ * \brief Wrap an iterable to reverse iteration in a range-based loop
+ * \param[in] iterable The iterable
+ * \return A value of unspecified type that, when used in a range-based for
+ * loop, will cause the loop to iterate over the \a iterable in reverse order
+ */
/**
- * \brief Retrieve the path to the source directory
+ * \fn enumerate(T &iterable)
+ * \brief Wrap an iterable to enumerate index and value in a range-based loop
+ * \param[in] iterable The iterable
+ *
+ * Range-based for loops are handy and widely preferred in C++, but are limited
+ * in their ability to replace for loops that require access to a loop counter.
+ * The enumerate() function solves this problem by wrapping the \a iterable in
+ * an adapter that, when used as a range-expression, will provide iterators
+ * whose value_type is a pair of index and value reference.
*
- * During development, it is useful to run libcamera binaries directly from the
- * build directory without installing them. This function helps components that
- * need to locate resources in the source tree, such as IPA configuration
- * files, by providing them with the path to the root of the source directory.
- * Callers can then use it to complement or override searches in system-wide
- * directories.
+ * The iterable must support std::begin() and std::end(). This includes all
+ * containers provided by the standard C++ library, as well as C-style arrays.
*
- * If libcamera has been installed, the source directory path is not available
- * and this function returns an empty string.
+ * A typical usage pattern would use structured binding to store the index and
+ * value in two separate variables:
+ *
+ * \code{.cpp}
+ * std::vector<int> values = ...;
*
- * \return The path to the source directory if running from a build directory,
- * or an empty string otherwise
+ * for (auto [index, value] : utils::enumerate(values)) {
+ * ...
+ * }
+ * \endcode
+ *
+ * Note that the argument to enumerate() has to be an lvalue, as the lifetime
+ * of any rvalue would not be extended to the whole for loop. The compiler will
+ * complain if an rvalue is passed to the function, in which case it should be
+ * stored in a local variable before the loop.
+ *
+ * \return A value of unspecified type that, when used in a range-based for
+ * loop, iterates over an indexed view of the \a iterable
*/
-std::string libcameraSourcePath()
-{
- std::string path = libcameraBuildPath();
- if (path.empty())
- return std::string();
- path += "source";
-
- char *real = realpath(path.c_str(), nullptr);
- if (!real)
- return std::string();
-
- path = real;
- free(real);
-
- struct stat statbuf;
- int ret = stat(path.c_str(), &statbuf);
- if (ret < 0 || (statbuf.st_mode & S_IFMT) != S_IFDIR)
- return std::string();
+/**
+ * \class Duration
+ * \brief Helper class from std::chrono::duration that represents a time
+ * duration in nanoseconds with double precision
+ */
- return path + "/";
-}
+/**
+ * \fn Duration::Duration(const std::chrono::duration<Rep, Period> &d)
+ * \brief Construct a Duration by converting an arbitrary std::chrono::duration
+ * \param[in] d The std::chrono::duration object to convert from
+ *
+ * The constructed \a Duration object is internally represented in double
+ * precision with nanoseconds ticks.
+ */
/**
- * \fn alignDown(unsigned int value, unsigned int alignment)
- * \brief Align \a value down to \a alignment
- * \param[in] value The value to align
- * \param[in] alignment The alignment
- * \return The value rounded down to the nearest multiple of \a alignment
+ * \fn Duration::get<Period>()
+ * \brief Retrieve the tick count, converted to the timebase provided by the
+ * template argument Period of type \a std::ratio
+ *
+ * A typical usage example is given below:
+ *
+ * \code{.cpp}
+ * utils::Duration d = 5s;
+ * double d_in_ms = d.get<std::milli>();
+ * \endcode
+ *
+ * \return The tick count of the Duration expressed in \a Period
*/
/**
- * \fn alignUp(unsigned int value, unsigned int alignment)
- * \brief Align \a value up to \a alignment
- * \param[in] value The value to align
- * \param[in] alignment The alignment
- * \return The value rounded up to the nearest multiple of \a alignment
+ * \fn Duration::operator bool()
+ * \brief Boolean operator to test if a \a Duration holds a non-zero time value
+ *
+ * \return True if \a Duration is a non-zero time value, False otherwise
*/
} /* namespace utils */
+#ifndef __DOXYGEN__
+template<class CharT, class Traits>
+std::basic_ostream<CharT, Traits> &operator<<(std::basic_ostream<CharT, Traits> &os,
+ const utils::Duration &d)
+{
+ std::basic_ostringstream<CharT, Traits> s;
+
+ s.flags(os.flags());
+ s.imbue(os.getloc());
+ s.setf(std::ios_base::fixed, std::ios_base::floatfield);
+ s.precision(2);
+ s << d.get<std::micro>() << "us";
+ return os << s.str();
+}
+
+template
+std::basic_ostream<char, std::char_traits<char>> &
+operator<< <char, std::char_traits<char>>(std::basic_ostream<char, std::char_traits<char>> &os,
+ const utils::Duration &d);
+#endif
+
} /* namespace libcamera */
diff --git a/src/libcamera/bayer_format.cpp b/src/libcamera/bayer_format.cpp
index c42792ff..11355f14 100644
--- a/src/libcamera/bayer_format.cpp
+++ b/src/libcamera/bayer_format.cpp
@@ -7,7 +7,11 @@
#include "libcamera/internal/bayer_format.h"
+#include <algorithm>
#include <map>
+#include <unordered_map>
+
+#include <linux/media-bus-format.h>
#include <libcamera/transform.h>
@@ -41,6 +45,8 @@ namespace libcamera {
* \brief G then R on the first row, B then G on the second row.
* \var BayerFormat::RGGB
* \brief R then G on the first row, G then B on the second row.
+ * \var BayerFormat::MONO
+ * \brief Monochrome image data, there is no colour filter array.
*/
/**
@@ -57,37 +63,6 @@ namespace libcamera {
namespace {
-const std::map<V4L2PixelFormat, BayerFormat> v4l2ToBayer{
- { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR8), { BayerFormat::BGGR, 8, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG8), { BayerFormat::GBRG, 8, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG8), { BayerFormat::GRBG, 8, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB8), { BayerFormat::RGGB, 8, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR10), { BayerFormat::BGGR, 10, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG10), { BayerFormat::GBRG, 10, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG10), { BayerFormat::GRBG, 10, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB10), { BayerFormat::RGGB, 10, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR10P), { BayerFormat::BGGR, 10, BayerFormat::CSI2Packed } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG10P), { BayerFormat::GBRG, 10, BayerFormat::CSI2Packed } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG10P), { BayerFormat::GRBG, 10, BayerFormat::CSI2Packed } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB10P), { BayerFormat::RGGB, 10, BayerFormat::CSI2Packed } },
- { V4L2PixelFormat(V4L2_PIX_FMT_IPU3_SBGGR10), { BayerFormat::BGGR, 10, BayerFormat::IPU3Packed } },
- { V4L2PixelFormat(V4L2_PIX_FMT_IPU3_SGBRG10), { BayerFormat::GBRG, 10, BayerFormat::IPU3Packed } },
- { V4L2PixelFormat(V4L2_PIX_FMT_IPU3_SGRBG10), { BayerFormat::GRBG, 10, BayerFormat::IPU3Packed } },
- { V4L2PixelFormat(V4L2_PIX_FMT_IPU3_SRGGB10), { BayerFormat::RGGB, 10, BayerFormat::IPU3Packed } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR12), { BayerFormat::BGGR, 12, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG12), { BayerFormat::GBRG, 12, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG12), { BayerFormat::GRBG, 12, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB12), { BayerFormat::RGGB, 12, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR12P), { BayerFormat::BGGR, 12, BayerFormat::CSI2Packed } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG12P), { BayerFormat::GBRG, 12, BayerFormat::CSI2Packed } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG12P), { BayerFormat::GRBG, 12, BayerFormat::CSI2Packed } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB12P), { BayerFormat::RGGB, 12, BayerFormat::CSI2Packed } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR16), { BayerFormat::BGGR, 16, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG16), { BayerFormat::GBRG, 16, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG16), { BayerFormat::GRBG, 16, BayerFormat::None } },
- { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB16), { BayerFormat::RGGB, 16, BayerFormat::None } },
-};
-
/* Define a slightly arbitrary ordering so that we can use a std::map. */
struct BayerFormatComparator {
constexpr bool operator()(const BayerFormat &lhs, const BayerFormat &rhs) const
@@ -138,6 +113,45 @@ const std::map<BayerFormat, V4L2PixelFormat, BayerFormatComparator> bayerToV4l2{
{ { BayerFormat::GBRG, 16, BayerFormat::None }, V4L2PixelFormat(V4L2_PIX_FMT_SGBRG16) },
{ { BayerFormat::GRBG, 16, BayerFormat::None }, V4L2PixelFormat(V4L2_PIX_FMT_SGRBG16) },
{ { BayerFormat::RGGB, 16, BayerFormat::None }, V4L2PixelFormat(V4L2_PIX_FMT_SRGGB16) },
+ { { BayerFormat::MONO, 8, BayerFormat::None }, V4L2PixelFormat(V4L2_PIX_FMT_GREY) },
+ { { BayerFormat::MONO, 10, BayerFormat::CSI2Packed }, V4L2PixelFormat(V4L2_PIX_FMT_Y10P) },
+};
+
+const std::unordered_map<unsigned int, BayerFormat> mbusCodeToBayer{
+ { MEDIA_BUS_FMT_SBGGR8_1X8, { BayerFormat::BGGR, 8, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGBRG8_1X8, { BayerFormat::GBRG, 8, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGRBG8_1X8, { BayerFormat::GRBG, 8, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SRGGB8_1X8, { BayerFormat::RGGB, 8, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SBGGR10_ALAW8_1X8, { BayerFormat::BGGR, 8, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGBRG10_ALAW8_1X8, { BayerFormat::GBRG, 8, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGRBG10_ALAW8_1X8, { BayerFormat::GRBG, 8, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SRGGB10_ALAW8_1X8, { BayerFormat::RGGB, 8, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SBGGR10_DPCM8_1X8, { BayerFormat::BGGR, 8, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGBRG10_DPCM8_1X8, { BayerFormat::GBRG, 8, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGRBG10_DPCM8_1X8, { BayerFormat::GRBG, 8, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SRGGB10_DPCM8_1X8, { BayerFormat::RGGB, 8, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SBGGR10_2X8_PADHI_BE, { BayerFormat::BGGR, 10, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SBGGR10_2X8_PADHI_LE, { BayerFormat::BGGR, 10, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SBGGR10_2X8_PADLO_BE, { BayerFormat::BGGR, 10, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SBGGR10_2X8_PADLO_LE, { BayerFormat::BGGR, 10, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SBGGR10_1X10, { BayerFormat::BGGR, 10, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGBRG10_1X10, { BayerFormat::GBRG, 10, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGRBG10_1X10, { BayerFormat::GRBG, 10, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SRGGB10_1X10, { BayerFormat::RGGB, 10, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SBGGR12_1X12, { BayerFormat::BGGR, 12, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGBRG12_1X12, { BayerFormat::GBRG, 12, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGRBG12_1X12, { BayerFormat::GRBG, 12, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SRGGB12_1X12, { BayerFormat::RGGB, 12, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SBGGR14_1X14, { BayerFormat::BGGR, 14, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGBRG14_1X14, { BayerFormat::GBRG, 14, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGRBG14_1X14, { BayerFormat::GRBG, 14, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SRGGB14_1X14, { BayerFormat::RGGB, 14, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SBGGR16_1X16, { BayerFormat::BGGR, 16, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGBRG16_1X16, { BayerFormat::GBRG, 16, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SGRBG16_1X16, { BayerFormat::GRBG, 16, BayerFormat::None } },
+ { MEDIA_BUS_FMT_SRGGB16_1X16, { BayerFormat::RGGB, 16, BayerFormat::None } },
+ { MEDIA_BUS_FMT_Y8_1X8, { BayerFormat::MONO, 8, BayerFormat::None } },
+ { MEDIA_BUS_FMT_Y10_1X10, { BayerFormat::MONO, 10, BayerFormat::None } },
};
} /* namespace */
@@ -156,17 +170,20 @@ const std::map<BayerFormat, V4L2PixelFormat, BayerFormatComparator> bayerToV4l2{
*/
/**
- * \brief Construct a BayerFormat from a V4L2PixelFormat
- * \param[in] v4l2Format The raw format to convert into a BayerFormat
+ * \brief Retrieve the BayerFormat associated with a media bus code
+ * \param[in] mbusCode The media bus code to convert into a BayerFormat
+ *
+ * The media bus code numeric identifiers are defined by the V4L2 specification.
*/
-BayerFormat::BayerFormat(V4L2PixelFormat v4l2Format)
- : order(BGGR), packing(None)
+const BayerFormat &BayerFormat::fromMbusCode(unsigned int mbusCode)
{
- const auto it = v4l2ToBayer.find(v4l2Format);
- if (it == v4l2ToBayer.end())
- bitDepth = 0;
+ static BayerFormat empty;
+
+ const auto it = mbusCodeToBayer.find(mbusCode);
+ if (it == mbusCodeToBayer.end())
+ return empty;
else
- *this = it->second;
+ return it->second;
}
/**
@@ -187,9 +204,10 @@ std::string BayerFormat::toString() const
"BGGR",
"GBRG",
"GRBG",
- "RGGB"
+ "RGGB",
+ "MONO"
};
- if (isValid() && order <= RGGB)
+ if (isValid() && order <= MONO)
result = orderStrings[order];
else
return "INVALID";
@@ -205,6 +223,23 @@ std::string BayerFormat::toString() const
}
/**
+ * \brief Compare two BayerFormats for equality
+ * \return True if order, bitDepth and packing are equal, or false otherwise
+ */
+bool operator==(const BayerFormat &lhs, const BayerFormat &rhs)
+{
+ return lhs.order == rhs.order && lhs.bitDepth == rhs.bitDepth &&
+ lhs.packing == rhs.packing;
+}
+
+/**
+ * \fn bool operator!=(const BayerFormat &lhs, const BayerFormat &rhs)
+ * \brief Compare two BayerFormats for inequality
+ * \return True if either order, bitdepth or packing are not equal, or false
+ * otherwise
+ */
+
+/**
* \brief Convert a BayerFormat into the corresponding V4L2PixelFormat
* \return The V4L2PixelFormat corresponding to this BayerFormat
*/
@@ -218,6 +253,23 @@ V4L2PixelFormat BayerFormat::toV4L2PixelFormat() const
}
/**
+ * \brief Convert \a v4l2Format to the corresponding BayerFormat
+ * \param[in] v4l2Format The raw format to convert into a BayerFormat
+ * \return The BayerFormat corresponding to \a v4l2Format
+ */
+BayerFormat BayerFormat::fromV4L2PixelFormat(V4L2PixelFormat v4l2Format)
+{
+ auto it = std::find_if(bayerToV4l2.begin(), bayerToV4l2.end(),
+ [v4l2Format](const auto &i) {
+ return i.second == v4l2Format;
+ });
+ if (it != bayerToV4l2.end())
+ return it->first;
+
+ return BayerFormat();
+}
+
+/**
* \brief Apply a transform to this BayerFormat
* \param[in] t The transform to apply
*
@@ -227,9 +279,7 @@ V4L2PixelFormat BayerFormat::toV4L2PixelFormat() const
* The transformed image would have a GRBG order. The bit depth and modifiers
* are not affected.
*
- * Note that transpositions are ignored as the order of a transpose with
- * respect to the flips would have to be defined, and sensors are not expected
- * to support transposition.
+ * Horizontal and vertical flips are applied before transpose.
*
* \return The transformed Bayer format
*/
@@ -237,6 +287,9 @@ BayerFormat BayerFormat::transform(Transform t) const
{
BayerFormat result = *this;
+ if (order == MONO)
+ return result;
+
/*
* Observe that flipping bit 0 of the Order enum performs a horizontal
* mirror on the Bayer pattern (e.g. RGGB goes to GRBG). Similarly,
@@ -247,6 +300,11 @@ BayerFormat BayerFormat::transform(Transform t) const
if (!!(t & Transform::VFlip))
result.order = static_cast<Order>(result.order ^ 2);
+ if (!!(t & Transform::Transpose) && result.order == 1)
+ result.order = static_cast<Order>(2);
+ else if (!!(t & Transform::Transpose) && result.order == 2)
+ result.order = static_cast<Order>(1);
+
return result;
}
diff --git a/src/libcamera/byte_stream_buffer.cpp b/src/libcamera/byte_stream_buffer.cpp
index df7029b0..b67bb928 100644
--- a/src/libcamera/byte_stream_buffer.cpp
+++ b/src/libcamera/byte_stream_buffer.cpp
@@ -10,17 +10,17 @@
#include <stdint.h>
#include <string.h>
-#include "libcamera/internal/log.h"
-
-namespace libcamera {
-
-LOG_DEFINE_CATEGORY(Serialization);
+#include <libcamera/base/log.h>
/**
* \file byte_stream_buffer.h
* \brief Managed memory container for serialized data
*/
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(Serialization)
+
/**
* \class ByteStreamBuffer
* \brief Wrap a memory buffer and provide sequential data read and write
diff --git a/src/libcamera/camera.cpp b/src/libcamera/camera.cpp
index fb76077f..c8858e71 100644
--- a/src/libcamera/camera.cpp
+++ b/src/libcamera/camera.cpp
@@ -7,38 +7,99 @@
#include <libcamera/camera.h>
+#include <array>
#include <atomic>
#include <iomanip>
+#include <libcamera/base/log.h>
+#include <libcamera/base/thread.h>
+
#include <libcamera/framebuffer_allocator.h>
#include <libcamera/request.h>
#include <libcamera/stream.h>
-#include "libcamera/internal/log.h"
#include "libcamera/internal/pipeline_handler.h"
-#include "libcamera/internal/thread.h"
-#include "libcamera/internal/utils.h"
/**
* \file camera.h
* \brief Camera device handling
*
- * At the core of libcamera is the camera device, combining one image source
- * with processing hardware able to provide one or multiple image streams. The
- * Camera class represents a camera device.
- *
- * A camera device contains a single image source, and separate camera device
- * instances relate to different image sources. For instance, a phone containing
- * front and back image sensors will be modelled with two camera devices, one
- * for each sensor. When multiple streams can be produced from the same image
- * source, all those streams are guaranteed to be part of the same camera
- * device.
- *
- * While not sharing image sources, separate camera devices can share other
- * system resources, such as an ISP. For this reason camera device instances may
- * not be fully independent, in which case usage restrictions may apply. For
- * instance, a phone with a front and a back camera device may not allow usage
- * of the two devices simultaneously.
+ * \page camera-model Camera Model
+ *
+ * libcamera acts as a middleware between applications and camera hardware. It
+ * provides a solution to an unsolvable problem: reconciling applications,
+ * which need to run on different systems without dealing with device-specific
+ * details, and camera hardware, which exhibits a wide variety of features,
+ * limitations and architecture variations. In order to do so, it creates an
+ * abstract camera model that hides the camera hardware from applications. The
+ * model is designed to strike the right balance between genericity, to please
+ * generic applications, and flexibility, to expose even the most specific
+ * hardware features to the most demanding applications.
+ *
+ * In libcamera, a Camera is defined as a device that can capture frames
+ * continuously from a camera sensor and store them in memory. If supported by
+ * the device and desired by the application, the camera may store each
+ * captured frame in multiple copies, possibly in different formats and sizes.
+ * Each of these memory outputs of the camera is called a Stream.
+ *
+ * A camera contains a single image source, and separate camera instances
+ * relate to different image sources. For instance, a phone containing front
+ * and back image sensors will be modelled with two cameras, one for each
+ * sensor. When multiple streams can be produced from the same image source,
+ * all those streams are guaranteed to be part of the same camera.
+ *
+ * While not sharing image sources, separate cameras can share other system
+ * resources, such as ISPs. For this reason camera instances may not be fully
+ * independent, in which case usage restrictions may apply. For instance, a
+ * phone with a front and a back camera may not allow usage of the two cameras
+ * simultaneously.
+ *
+ * The camera model defines an implicit pipeline, whose input is the camera
+ * sensor, and whose outputs are the streams. Along the pipeline, the frames
+ * produced by the camera sensor are transformed by the camera into a format
+ * suitable for applications, with image processing that improves the quality
+ * of the captured frames. The camera exposes a set of controls that
+ * applications may use to manually control the processing steps. This
+ * high-level camera model is the minimum baseline that all cameras must
+ * conform to.
+ *
+ * \section camera-pipeline-model Pipeline Model
+ *
+ * Camera hardware differs in the supported image processing operations and the
+ * order in which they are applied. The libcamera pipelines abstract the
+ * hardware differences and expose a logical view of the processing operations
+ * with a fixed order. This offers low-level control of those operations to
+ * applications, while keeping application code generic.
+ *
+ * Starting from the camera sensor, a pipeline applies the following
+ * operations, in that order.
+ *
+ * - Pixel exposure
+ * - Analog to digital conversion and readout
+ * - Black level subtraction
+ * - Defective pixel correction
+ * - Lens shading correction
+ * - Spatial noise filtering
+ * - Per-channel gains (white balance)
+ * - Demosaicing (color filter array interpolation)
+ * - Color correction matrix (typically RGB to RGB)
+ * - Gamma correction
+ * - Color space transformation (typically RGB to YUV)
+ * - Cropping
+ * - Scaling
+ *
+ * Not all cameras implement all operations, and they are not necessarily
+ * implemented in the above order at the hardware level. The libcamera pipeline
+ * handlers translate the pipeline model to the real hardware configuration.
+ *
+ * \subsection digital-zoom Digital Zoom
+ *
+ * Digital zoom is implemented as a combination of the cropping and scaling
+ * stages of the pipeline. Cropping is controlled explicitly through the
+ * controls::ScalerCrop control, while scaling is controlled implicitly based
+ * on the crop rectangle and the output stream size. The crop rectangle is
+ * expressed relatively to the full pixel array size and indicates how the field
+ * of view is affected by the pipeline.
*/
namespace libcamera {
@@ -270,23 +331,29 @@ std::size_t CameraConfiguration::size() const
* \brief The vector of stream configurations
*/
-class Camera::Private
+class Camera::Private : public Extensible::Private
{
+ LIBCAMERA_DECLARE_PUBLIC(Camera)
+
public:
enum State {
CameraAvailable,
CameraAcquired,
CameraConfigured,
+ CameraStopping,
CameraRunning,
};
- Private(PipelineHandler *pipe, const std::string &id,
+ Private(Camera *camera, PipelineHandler *pipe, const std::string &id,
const std::set<Stream *> &streams);
~Private();
- int isAccessAllowed(State state, bool allowDisconnected = false) const;
+ bool isRunning() const;
+ int isAccessAllowed(State state, bool allowDisconnected = false,
+ const char *from = __builtin_FUNCTION()) const;
int isAccessAllowed(State low, State high,
- bool allowDisconnected = false) const;
+ bool allowDisconnected = false,
+ const char *from = __builtin_FUNCTION()) const;
void disconnect();
void setState(State state);
@@ -301,10 +368,11 @@ private:
std::atomic<State> state_;
};
-Camera::Private::Private(PipelineHandler *pipe, const std::string &id,
+Camera::Private::Private(Camera *camera, PipelineHandler *pipe,
+ const std::string &id,
const std::set<Stream *> &streams)
- : pipe_(pipe->shared_from_this()), id_(id), streams_(streams),
- disconnected_(false), state_(CameraAvailable)
+ : Extensible::Private(camera), pipe_(pipe->shared_from_this()), id_(id),
+ streams_(streams), disconnected_(false), state_(CameraAvailable)
{
}
@@ -318,10 +386,17 @@ static const char *const camera_state_names[] = {
"Available",
"Acquired",
"Configured",
+ "Stopping",
"Running",
};
-int Camera::Private::isAccessAllowed(State state, bool allowDisconnected) const
+bool Camera::Private::isRunning() const
+{
+ return state_.load(std::memory_order_acquire) == CameraRunning;
+}
+
+int Camera::Private::isAccessAllowed(State state, bool allowDisconnected,
+ const char *from) const
{
if (!allowDisconnected && disconnected_)
return -ENODEV;
@@ -330,17 +405,18 @@ int Camera::Private::isAccessAllowed(State state, bool allowDisconnected) const
if (currentState == state)
return 0;
- ASSERT(static_cast<unsigned int>(state) < ARRAY_SIZE(camera_state_names));
+ ASSERT(static_cast<unsigned int>(state) < std::size(camera_state_names));
- LOG(Camera, Debug) << "Camera in " << camera_state_names[currentState]
- << " state trying operation requiring state "
+ LOG(Camera, Error) << "Camera in " << camera_state_names[currentState]
+ << " state trying " << from << "() requiring state "
<< camera_state_names[state];
return -EACCES;
}
int Camera::Private::isAccessAllowed(State low, State high,
- bool allowDisconnected) const
+ bool allowDisconnected,
+ const char *from) const
{
if (!allowDisconnected && disconnected_)
return -ENODEV;
@@ -349,11 +425,12 @@ int Camera::Private::isAccessAllowed(State low, State high,
if (currentState >= low && currentState <= high)
return 0;
- ASSERT(static_cast<unsigned int>(low) < ARRAY_SIZE(camera_state_names) &&
- static_cast<unsigned int>(high) < ARRAY_SIZE(camera_state_names));
+ ASSERT(static_cast<unsigned int>(low) < std::size(camera_state_names) &&
+ static_cast<unsigned int>(high) < std::size(camera_state_names));
- LOG(Camera, Debug) << "Camera in " << camera_state_names[currentState]
- << " state trying operation requiring state between "
+ LOG(Camera, Error) << "Camera in " << camera_state_names[currentState]
+ << " state trying " << from
+ << "() requiring state between "
<< camera_state_names[low] << " and "
<< camera_state_names[high];
@@ -424,6 +501,7 @@ void Camera::Private::setState(State state)
* node [shape = doublecircle ]; Available;
* node [shape = circle ]; Acquired;
* node [shape = circle ]; Configured;
+ * node [shape = circle ]; Stopping;
* node [shape = circle ]; Running;
*
* Available -> Available [label = "release()"];
@@ -436,7 +514,8 @@ void Camera::Private::setState(State state)
* Configured -> Configured [label = "configure(), createRequest()"];
* Configured -> Running [label = "start()"];
*
- * Running -> Configured [label = "stop()"];
+ * Running -> Stopping [label = "stop()"];
+ * Stopping -> Configured;
* Running -> Running [label = "createRequest(), queueRequest()"];
* }
* \enddot
@@ -456,6 +535,12 @@ void Camera::Private::setState(State state)
* release() the camera and to get back to the Available state or start()
* it to progress to the Running state.
*
+ * \subsubsection Stopping
+ * The camera has been asked to stop. Pending requests are being completed or
+ * cancelled, and no new requests are permitted to be queued. The camera will
+ * transition to the Configured state when all queued requests have been
+ * returned to the application.
+ *
* \subsubsection Running
* The camera is running and ready to process requests queued by the
* application. The camera remains in this state until it is stopped and moved
@@ -519,7 +604,7 @@ std::shared_ptr<Camera> Camera::create(PipelineHandler *pipe,
*/
const std::string &Camera::id() const
{
- return p_->id_;
+ return _d()->id_;
}
/**
@@ -547,7 +632,7 @@ const std::string &Camera::id() const
Camera::Camera(PipelineHandler *pipe, const std::string &id,
const std::set<Stream *> &streams)
- : p_(new Private(pipe, id, streams))
+ : Extensible(new Private(this, pipe, id, streams))
{
}
@@ -571,26 +656,28 @@ void Camera::disconnect()
{
LOG(Camera, Debug) << "Disconnecting camera " << id();
- p_->disconnect();
+ _d()->disconnect();
disconnected.emit(this);
}
int Camera::exportFrameBuffers(Stream *stream,
std::vector<std::unique_ptr<FrameBuffer>> *buffers)
{
- int ret = p_->isAccessAllowed(Private::CameraConfigured);
+ Private *const d = _d();
+
+ int ret = d->isAccessAllowed(Private::CameraConfigured);
if (ret < 0)
return ret;
if (streams().find(stream) == streams().end())
return -EINVAL;
- if (p_->activeStreams_.find(stream) == p_->activeStreams_.end())
+ if (d->activeStreams_.find(stream) == d->activeStreams_.end())
return -EINVAL;
- return p_->pipe_->invokeMethod(&PipelineHandler::exportFrameBuffers,
- ConnectionTypeBlocking, this, stream,
- buffers);
+ return d->pipe_->invokeMethod(&PipelineHandler::exportFrameBuffers,
+ ConnectionTypeBlocking, this, stream,
+ buffers);
}
/**
@@ -619,21 +706,23 @@ int Camera::exportFrameBuffers(Stream *stream,
*/
int Camera::acquire()
{
+ Private *const d = _d();
+
/*
* No manual locking is required as PipelineHandler::lock() is
* thread-safe.
*/
- int ret = p_->isAccessAllowed(Private::CameraAvailable);
+ int ret = d->isAccessAllowed(Private::CameraAvailable);
if (ret < 0)
return ret == -EACCES ? -EBUSY : ret;
- if (!p_->pipe_->lock()) {
+ if (!d->pipe_->lock()) {
LOG(Camera, Info)
<< "Pipeline handler in use by another process";
return -EBUSY;
}
- p_->setState(Private::CameraAcquired);
+ d->setState(Private::CameraAcquired);
return 0;
}
@@ -654,14 +743,16 @@ int Camera::acquire()
*/
int Camera::release()
{
- int ret = p_->isAccessAllowed(Private::CameraAvailable,
- Private::CameraConfigured, true);
+ Private *const d = _d();
+
+ int ret = d->isAccessAllowed(Private::CameraAvailable,
+ Private::CameraConfigured, true);
if (ret < 0)
return ret == -EACCES ? -EBUSY : ret;
- p_->pipe_->unlock();
+ d->pipe_->unlock();
- p_->setState(Private::CameraAvailable);
+ d->setState(Private::CameraAvailable);
return 0;
}
@@ -678,7 +769,7 @@ int Camera::release()
*/
const ControlInfoMap &Camera::controls() const
{
- return p_->pipe_->controls(this);
+ return _d()->pipe_->controls(this);
}
/**
@@ -691,7 +782,7 @@ const ControlInfoMap &Camera::controls() const
*/
const ControlList &Camera::properties() const
{
- return p_->pipe_->properties(this);
+ return _d()->pipe_->properties(this);
}
/**
@@ -707,7 +798,7 @@ const ControlList &Camera::properties() const
*/
const std::set<Stream *> &Camera::streams() const
{
- return p_->streams_;
+ return _d()->streams_;
}
/**
@@ -728,15 +819,17 @@ const std::set<Stream *> &Camera::streams() const
*/
std::unique_ptr<CameraConfiguration> Camera::generateConfiguration(const StreamRoles &roles)
{
- int ret = p_->isAccessAllowed(Private::CameraAvailable,
- Private::CameraRunning);
+ Private *const d = _d();
+
+ int ret = d->isAccessAllowed(Private::CameraAvailable,
+ Private::CameraRunning);
if (ret < 0)
return nullptr;
if (roles.size() > streams().size())
return nullptr;
- CameraConfiguration *config = p_->pipe_->generateConfiguration(this, roles);
+ CameraConfiguration *config = d->pipe_->generateConfiguration(this, roles);
if (!config) {
LOG(Camera, Debug)
<< "Pipeline handler failed to generate configuration";
@@ -787,8 +880,10 @@ std::unique_ptr<CameraConfiguration> Camera::generateConfiguration(const StreamR
*/
int Camera::configure(CameraConfiguration *config)
{
- int ret = p_->isAccessAllowed(Private::CameraAcquired,
- Private::CameraConfigured);
+ Private *const d = _d();
+
+ int ret = d->isAccessAllowed(Private::CameraAcquired,
+ Private::CameraConfigured);
if (ret < 0)
return ret;
@@ -810,26 +905,26 @@ int Camera::configure(CameraConfiguration *config)
LOG(Camera, Info) << msg.str();
- ret = p_->pipe_->invokeMethod(&PipelineHandler::configure,
- ConnectionTypeBlocking, this, config);
+ ret = d->pipe_->invokeMethod(&PipelineHandler::configure,
+ ConnectionTypeBlocking, this, config);
if (ret)
return ret;
- p_->activeStreams_.clear();
+ d->activeStreams_.clear();
for (const StreamConfiguration &cfg : *config) {
Stream *stream = cfg.stream();
if (!stream) {
LOG(Camera, Fatal)
<< "Pipeline handler failed to update stream configuration";
- p_->activeStreams_.clear();
+ d->activeStreams_.clear();
return -EINVAL;
}
stream->configuration_ = cfg;
- p_->activeStreams_.insert(stream);
+ d->activeStreams_.insert(stream);
}
- p_->setState(Private::CameraConfigured);
+ d->setState(Private::CameraConfigured);
return 0;
}
@@ -847,21 +942,22 @@ int Camera::configure(CameraConfiguration *config)
* handler, and is completely opaque to libcamera.
*
* The ownership of the returned request is passed to the caller, which is
- * responsible for either queueing the request or deleting it.
+ * responsible for deleting it. The request may be deleted in the completion
+ * handler, or reused after resetting its state with Request::reuse().
*
* \context This function is \threadsafe. It may only be called when the camera
* is in the Configured or Running state as defined in \ref camera_operation.
*
* \return A pointer to the newly created request, or nullptr on error
*/
-Request *Camera::createRequest(uint64_t cookie)
+std::unique_ptr<Request> Camera::createRequest(uint64_t cookie)
{
- int ret = p_->isAccessAllowed(Private::CameraConfigured,
- Private::CameraRunning);
+ int ret = _d()->isAccessAllowed(Private::CameraConfigured,
+ Private::CameraRunning);
if (ret < 0)
return nullptr;
- return new Request(this, cookie);
+ return std::make_unique<Request>(this, cookie);
}
/**
@@ -877,9 +973,6 @@ Request *Camera::createRequest(uint64_t cookie)
* Once the request has been queued, the camera will notify its completion
* through the \ref requestCompleted signal.
*
- * Ownership of the request is transferred to the camera. It will be deleted
- * automatically after it completes.
- *
* \context This function is \threadsafe. It may only be called when the camera
* is in the Running state as defined in \ref camera_operation.
*
@@ -891,12 +984,14 @@ Request *Camera::createRequest(uint64_t cookie)
*/
int Camera::queueRequest(Request *request)
{
- int ret = p_->isAccessAllowed(Private::CameraRunning);
+ Private *const d = _d();
+
+ int ret = d->isAccessAllowed(Private::CameraRunning);
if (ret < 0)
return ret;
/*
- * The camera state may chance until the end of the function. No locking
+ * The camera state may change until the end of the function. No locking
* is however needed as PipelineHandler::queueRequest() will handle
* this.
*/
@@ -909,22 +1004,26 @@ int Camera::queueRequest(Request *request)
for (auto const &it : request->buffers()) {
const Stream *stream = it.first;
- if (p_->activeStreams_.find(stream) == p_->activeStreams_.end()) {
+ if (d->activeStreams_.find(stream) == d->activeStreams_.end()) {
LOG(Camera, Error) << "Invalid request";
return -EINVAL;
}
}
- return p_->pipe_->invokeMethod(&PipelineHandler::queueRequest,
- ConnectionTypeQueued, this, request);
+ d->pipe_->invokeMethod(&PipelineHandler::queueRequest,
+ ConnectionTypeQueued, request);
+
+ return 0;
}
/**
* \brief Start capture from camera
+ * \param[in] controls Controls to be applied before starting the Camera
*
- * Start the camera capture session. Once the camera is started the application
- * can queue requests to the camera to process and return to the application
- * until the capture session is terminated with \a stop().
+ * Start the camera capture session, optionally providing a list of controls to
+ * apply before starting. Once the camera is started the application can queue
+ * requests to the camera to process and return to the application until the
+ * capture session is terminated with \a stop().
*
* \context This function may only be called when the camera is in the
* Configured state as defined in \ref camera_operation, and shall be
@@ -935,20 +1034,22 @@ int Camera::queueRequest(Request *request)
* \retval -ENODEV The camera has been disconnected from the system
* \retval -EACCES The camera is not in a state where it can be started
*/
-int Camera::start()
+int Camera::start(const ControlList *controls)
{
- int ret = p_->isAccessAllowed(Private::CameraConfigured);
+ Private *const d = _d();
+
+ int ret = d->isAccessAllowed(Private::CameraConfigured);
if (ret < 0)
return ret;
LOG(Camera, Debug) << "Starting capture";
- ret = p_->pipe_->invokeMethod(&PipelineHandler::start,
- ConnectionTypeBlocking, this);
+ ret = d->pipe_->invokeMethod(&PipelineHandler::start,
+ ConnectionTypeBlocking, this, controls);
if (ret)
return ret;
- p_->setState(Private::CameraRunning);
+ d->setState(Private::CameraRunning);
return 0;
}
@@ -959,9 +1060,10 @@ int Camera::start()
* This method stops capturing and processing requests immediately. All pending
* requests are cancelled and complete synchronously in an error state.
*
- * \context This function may only be called when the camera is in the Running
- * state as defined in \ref camera_operation, and shall be synchronized by the
- * caller with other functions that affect the camera state.
+ * \context This function may be called in any camera state as defined in \ref
+ * camera_operation, and shall be synchronized by the caller with other
+ * functions that affect the camera state. If called when the camera isn't
+ * running, it is a no-op.
*
* \return 0 on success or a negative error code otherwise
* \retval -ENODEV The camera has been disconnected from the system
@@ -969,16 +1071,29 @@ int Camera::start()
*/
int Camera::stop()
{
- int ret = p_->isAccessAllowed(Private::CameraRunning);
+ Private *const d = _d();
+
+ /*
+ * \todo Make calling stop() when not in 'Running' part of the state
+ * machine rather than take this shortcut
+ */
+ if (!d->isRunning())
+ return 0;
+
+ int ret = d->isAccessAllowed(Private::CameraRunning);
if (ret < 0)
return ret;
LOG(Camera, Debug) << "Stopping capture";
- p_->setState(Private::CameraConfigured);
+ d->setState(Private::CameraStopping);
- p_->pipe_->invokeMethod(&PipelineHandler::stop, ConnectionTypeBlocking,
- this);
+ d->pipe_->invokeMethod(&PipelineHandler::stop, ConnectionTypeBlocking,
+ this);
+
+ ASSERT(!d->pipe_->hasPendingRequests(this));
+
+ d->setState(Private::CameraConfigured);
return 0;
}
@@ -988,13 +1103,16 @@ int Camera::stop()
* \param[in] request The request that has completed
*
* This function is called by the pipeline handler to notify the camera that
- * the request has completed. It emits the requestCompleted signal and deletes
- * the request.
+ * the request has completed. It emits the requestCompleted signal.
*/
void Camera::requestComplete(Request *request)
{
+ /* Disconnected cameras are still able to complete requests. */
+ if (_d()->isAccessAllowed(Private::CameraStopping, Private::CameraRunning,
+ true))
+ LOG(Camera, Fatal) << "Trying to complete a request when stopped";
+
requestCompleted.emit(request);
- delete request;
}
} /* namespace libcamera */
diff --git a/src/libcamera/camera_manager.cpp b/src/libcamera/camera_manager.cpp
index 47d56256..1c79308a 100644
--- a/src/libcamera/camera_manager.cpp
+++ b/src/libcamera/camera_manager.cpp
@@ -11,27 +11,33 @@
#include <map>
#include <libcamera/camera.h>
-#include <libcamera/event_dispatcher.h>
+
+#include <libcamera/base/utils.h>
+
+#include <libcamera/base/log.h>
+#include <libcamera/base/thread.h>
#include "libcamera/internal/device_enumerator.h"
-#include "libcamera/internal/event_dispatcher_poll.h"
#include "libcamera/internal/ipa_manager.h"
-#include "libcamera/internal/log.h"
#include "libcamera/internal/pipeline_handler.h"
-#include "libcamera/internal/thread.h"
-#include "libcamera/internal/utils.h"
+#include "libcamera/internal/process.h"
/**
* \file camera_manager.h
* \brief The camera manager
*/
+/**
+ * \brief Top-level libcamera namespace
+ */
namespace libcamera {
LOG_DEFINE_CATEGORY(Camera)
-class CameraManager::Private : public Thread
+class CameraManager::Private : public Extensible::Private, public Thread
{
+ LIBCAMERA_DECLARE_PUBLIC(CameraManager)
+
public:
Private(CameraManager *cm);
@@ -46,7 +52,7 @@ public:
* - initialized_ and status_ during initialization
* - cameras_ and camerasByDevnum_ after initialization
*/
- Mutex mutex_;
+ mutable Mutex mutex_;
std::vector<std::shared_ptr<Camera>> cameras_;
std::map<dev_t, std::weak_ptr<Camera>> camerasByDevnum_;
@@ -58,8 +64,6 @@ private:
void createPipelineHandlers();
void cleanup();
- CameraManager *cm_;
-
std::condition_variable cv_;
bool initialized_;
int status_;
@@ -67,10 +71,11 @@ private:
std::unique_ptr<DeviceEnumerator> enumerator_;
IPAManager ipaManager_;
+ ProcessManager processManager_;
};
CameraManager::Private::Private(CameraManager *cm)
- : cm_(cm), initialized_(false)
+ : Extensible::Private(cm), initialized_(false)
{
}
@@ -131,6 +136,8 @@ int CameraManager::Private::init()
void CameraManager::Private::createPipelineHandlers()
{
+ CameraManager *const o = LIBCAMERA_O_PTR();
+
/*
* \todo Try to read handlers and order from configuration
* file and only fallback on all handlers if there is no
@@ -140,12 +147,15 @@ void CameraManager::Private::createPipelineHandlers()
PipelineHandlerFactory::factories();
for (PipelineHandlerFactory *factory : factories) {
+ LOG(Camera, Debug)
+ << "Found registered pipeline handler '"
+ << factory->name() << "'";
/*
* Try each pipeline handler until it exhaust
* all pipelines it can provide.
*/
while (1) {
- std::shared_ptr<PipelineHandler> pipe = factory->create(cm_);
+ std::shared_ptr<PipelineHandler> pipe = factory->create(o);
if (!pipe->match(enumerator_.get()))
break;
@@ -234,12 +244,8 @@ void CameraManager::Private::removeCamera(Camera *camera)
* a time. Attempting to create a second instance without first deleting the
* existing instance results in undefined behaviour.
*
- * The manager is initially stopped, and shall be configured before being
- * started. In particular a custom event dispatcher shall be installed if
- * needed with CameraManager::setEventDispatcher().
- *
- * Once the camera manager is configured, it shall be started with start().
- * This will enumerate all the cameras present in the system, which can then be
+ * The manager is initially stopped, and shall be started with start(). This
+ * will enumerate all the cameras present in the system, which can then be
* listed with list() and retrieved with get().
*
* Cameras are shared through std::shared_ptr<>, ensuring that a camera will
@@ -247,16 +253,12 @@ void CameraManager::Private::removeCamera(Camera *camera)
* action from the application. Once the application has released all the
* references it held to cameras, the camera manager can be stopped with
* stop().
- *
- * \todo Add interface to register a notification callback to the user to be
- * able to inform it new cameras have been hot-plugged or cameras have been
- * removed due to hot-unplug.
*/
CameraManager *CameraManager::self_ = nullptr;
CameraManager::CameraManager()
- : p_(new CameraManager::Private(this))
+ : Extensible(new CameraManager::Private(this))
{
if (self_)
LOG(Camera, Fatal)
@@ -265,6 +267,11 @@ CameraManager::CameraManager()
self_ = this;
}
+/**
+ * \brief Destroy the camera manager
+ *
+ * Destroying the camera manager stops it if it is currently running.
+ */
CameraManager::~CameraManager()
{
stop();
@@ -286,7 +293,7 @@ int CameraManager::start()
{
LOG(Camera, Info) << "libcamera " << version_;
- int ret = p_->start();
+ int ret = _d()->start();
if (ret)
LOG(Camera, Error) << "Failed to start camera manager: "
<< strerror(-ret);
@@ -306,8 +313,9 @@ int CameraManager::start()
*/
void CameraManager::stop()
{
- p_->exit();
- p_->wait();
+ Private *const d = _d();
+ d->exit();
+ d->wait();
}
/**
@@ -323,9 +331,11 @@ void CameraManager::stop()
*/
std::vector<std::shared_ptr<Camera>> CameraManager::cameras() const
{
- MutexLocker locker(p_->mutex_);
+ const Private *const d = _d();
- return p_->cameras_;
+ MutexLocker locker(d->mutex_);
+
+ return d->cameras_;
}
/**
@@ -341,9 +351,11 @@ std::vector<std::shared_ptr<Camera>> CameraManager::cameras() const
*/
std::shared_ptr<Camera> CameraManager::get(const std::string &id)
{
- MutexLocker locker(p_->mutex_);
+ Private *const d = _d();
+
+ MutexLocker locker(d->mutex_);
- for (std::shared_ptr<Camera> camera : p_->cameras_) {
+ for (std::shared_ptr<Camera> camera : d->cameras_) {
if (camera->id() == id)
return camera;
}
@@ -369,10 +381,12 @@ std::shared_ptr<Camera> CameraManager::get(const std::string &id)
*/
std::shared_ptr<Camera> CameraManager::get(dev_t devnum)
{
- MutexLocker locker(p_->mutex_);
+ Private *const d = _d();
- auto iter = p_->camerasByDevnum_.find(devnum);
- if (iter == p_->camerasByDevnum_.end())
+ MutexLocker locker(d->mutex_);
+
+ auto iter = d->camerasByDevnum_.find(devnum);
+ if (iter == d->camerasByDevnum_.end())
return nullptr;
return iter->second.lock();
@@ -423,9 +437,11 @@ std::shared_ptr<Camera> CameraManager::get(dev_t devnum)
void CameraManager::addCamera(std::shared_ptr<Camera> camera,
const std::vector<dev_t> &devnums)
{
- ASSERT(Thread::current() == p_.get());
+ Private *const d = _d();
+
+ ASSERT(Thread::current() == d);
- p_->addCamera(camera, devnums);
+ d->addCamera(camera, devnums);
cameraAdded.emit(camera);
}
@@ -441,9 +457,11 @@ void CameraManager::addCamera(std::shared_ptr<Camera> camera,
*/
void CameraManager::removeCamera(std::shared_ptr<Camera> camera)
{
- ASSERT(Thread::current() == p_.get());
+ Private *const d = _d();
- p_->removeCamera(camera.get());
+ ASSERT(Thread::current() == d);
+
+ d->removeCamera(camera.get());
cameraRemoved.emit(camera);
}
@@ -454,38 +472,4 @@ void CameraManager::removeCamera(std::shared_ptr<Camera> camera)
* \return The libcamera version string
*/
-/**
- * \brief Set the event dispatcher
- * \param[in] dispatcher Pointer to the event dispatcher
- *
- * libcamera requires an event dispatcher to integrate event notification and
- * timers with the application event loop. Applications that want to provide
- * their own event dispatcher shall call this function once and only once before
- * the camera manager is started with start(). If no event dispatcher is
- * provided, a default poll-based implementation will be used.
- *
- * The CameraManager takes ownership of the event dispatcher and will delete it
- * when the application terminates.
- */
-void CameraManager::setEventDispatcher(std::unique_ptr<EventDispatcher> dispatcher)
-{
- thread()->setEventDispatcher(std::move(dispatcher));
-}
-
-/**
- * \brief Retrieve the event dispatcher
- *
- * This function retrieves the event dispatcher set with setEventDispatcher().
- * If no dispatcher has been set, a default poll-based implementation is created
- * and returned, and no custom event dispatcher may be installed anymore.
- *
- * The returned event dispatcher is valid until the camera manager is destroyed.
- *
- * \return Pointer to the event dispatcher
- */
-EventDispatcher *CameraManager::eventDispatcher()
-{
- return thread()->eventDispatcher();
-}
-
} /* namespace libcamera */
diff --git a/src/libcamera/camera_sensor.cpp b/src/libcamera/camera_sensor.cpp
index d2679a4b..cde431cc 100644
--- a/src/libcamera/camera_sensor.cpp
+++ b/src/libcamera/camera_sensor.cpp
@@ -6,6 +6,7 @@
*/
#include "libcamera/internal/camera_sensor.h"
+#include "libcamera/internal/media_device.h"
#include <algorithm>
#include <float.h>
@@ -17,9 +18,12 @@
#include <libcamera/property_ids.h>
+#include <libcamera/base/utils.h>
+
+#include "libcamera/internal/bayer_format.h"
+#include "libcamera/internal/camera_sensor_properties.h"
#include "libcamera/internal/formats.h"
#include "libcamera/internal/sysfs.h"
-#include "libcamera/internal/utils.h"
/**
* \file camera_sensor.h
@@ -28,88 +32,7 @@
namespace libcamera {
-LOG_DEFINE_CATEGORY(CameraSensor);
-
-/**
- * \struct CameraSensorInfo
- * \brief Report the image sensor characteristics
- *
- * The structure reports image sensor characteristics used by IPA modules to
- * tune their algorithms based on the image sensor model currently in use and
- * its configuration.
- *
- * The reported information describes the sensor's intrinsics characteristics,
- * such as its pixel array size and the sensor model name, as well as
- * information relative to the currently configured mode, such as the produced
- * image size and the bit depth of the requested image format.
- *
- * Instances of this structure are meant to be assembled by the CameraSensor
- * class by inspecting the sensor static properties as well as information
- * relative to the current configuration.
- */
-
-/**
- * \var CameraSensorInfo::model
- * \brief The image sensor model name
- *
- * The sensor model name is a free-formed string that uniquely identifies the
- * sensor model.
- */
-
-/**
- * \var CameraSensorInfo::bitsPerPixel
- * \brief The number of bits per pixel of the image format produced by the
- * image sensor
- */
-
-/**
- * \var CameraSensorInfo::activeAreaSize
- * \brief The size of the pixel array active area of the sensor
- */
-
-/**
- * \var CameraSensorInfo::analogCrop
- * \brief The portion of the pixel array active area which is read-out and
- * processed
- *
- * The analog crop rectangle top-left corner is defined as the displacement
- * from the top-left corner of the pixel array active area. The rectangle
- * horizontal and vertical sizes define the portion of the pixel array which
- * is read-out and provided to the sensor's internal processing pipeline, before
- * any pixel sub-sampling method, such as pixel binning, skipping and averaging
- * take place.
- */
-
-/**
- * \var CameraSensorInfo::outputSize
- * \brief The size of the images produced by the camera sensor
- *
- * The output image size defines the horizontal and vertical sizes of the images
- * produced by the image sensor. The output image size is defined as the end
- * result of the sensor's internal image processing pipeline stages, applied on
- * the pixel array portion defined by the analog crop rectangle. Each image
- * processing stage that performs pixel sub-sampling techniques, such as pixel
- * binning or skipping, or perform any additional digital scaling concur in the
- * definition of the output image size.
- */
-
-/**
- * \var CameraSensorInfo::pixelRate
- * \brief The number of pixels produced in a second
- *
- * To obtain the read-out time in seconds of a full line:
- *
- * \verbatim
- lineDuration(s) = lineLength(pixels) / pixelRate(pixels per second)
- \endverbatim
- */
-
-/**
- * \var CameraSensorInfo::lineLength
- * \brief Total line length in pixels
- *
- * The total line length in pixel clock periods, including blanking.
- */
+LOG_DEFINE_CATEGORY(CameraSensor)
/**
* \class CameraSensor
@@ -131,7 +54,8 @@ LOG_DEFINE_CATEGORY(CameraSensor);
* Once constructed the instance must be initialized with init().
*/
CameraSensor::CameraSensor(const MediaEntity *entity)
- : entity_(entity), pad_(UINT_MAX), properties_(properties::properties)
+ : entity_(entity), pad_(UINT_MAX), bayerFormat_(nullptr),
+ properties_(properties::properties)
{
}
@@ -165,13 +89,253 @@ int CameraSensor::init()
return -EINVAL;
}
- if (entity_->function() != MEDIA_ENT_F_CAM_SENSOR) {
+ switch (entity_->function()) {
+ case MEDIA_ENT_F_CAM_SENSOR:
+ case MEDIA_ENT_F_PROC_VIDEO_ISP:
+ break;
+
+ default:
LOG(CameraSensor, Error)
<< "Invalid sensor function "
<< utils::hex(entity_->function());
return -EINVAL;
}
+ /* Create and open the subdev. */
+ subdev_ = std::make_unique<V4L2Subdevice>(entity_);
+ int ret = subdev_->open();
+ if (ret < 0)
+ return ret;
+
+ /* Enumerate, sort and cache media bus codes and sizes. */
+ formats_ = subdev_->formats(pad_);
+ if (formats_.empty()) {
+ LOG(CameraSensor, Error) << "No image format found";
+ return -EINVAL;
+ }
+
+ mbusCodes_ = utils::map_keys(formats_);
+ std::sort(mbusCodes_.begin(), mbusCodes_.end());
+
+ for (const auto &format : formats_) {
+ const std::vector<SizeRange> &ranges = format.second;
+ std::transform(ranges.begin(), ranges.end(), std::back_inserter(sizes_),
+ [](const SizeRange &range) { return range.max; });
+ }
+
+ std::sort(sizes_.begin(), sizes_.end());
+
+ /* Remove duplicates. */
+ auto last = std::unique(sizes_.begin(), sizes_.end());
+ sizes_.erase(last, sizes_.end());
+
+ /*
+ * VIMC is a bit special, as it does not yet support all the mandatory
+ * requirements regular sensors have to respect.
+ *
+ * Do not validate the driver if it's VIMC and initialize the sensor
+ * properties with static information.
+ *
+ * \todo Remove the special case once the VIMC driver has been
+ * updated in all test platforms.
+ */
+ if (entity_->device()->driver() == "vimc") {
+ initVimcDefaultProperties();
+ return initProperties();
+ }
+
+ /* Get the color filter array pattern (only for RAW sensors). */
+ for (unsigned int mbusCode : mbusCodes_) {
+ const BayerFormat &bayerFormat = BayerFormat::fromMbusCode(mbusCode);
+ if (bayerFormat.isValid()) {
+ bayerFormat_ = &bayerFormat;
+ break;
+ }
+ }
+
+ ret = validateSensorDriver();
+ if (ret)
+ return ret;
+
+ ret = initProperties();
+ if (ret)
+ return ret;
+
+ return 0;
+}
+
+int CameraSensor::validateSensorDriver()
+{
+ int err = 0;
+
+ /*
+ * Optional controls are used to register optional sensor properties. If
+ * not present, some values will be defaulted.
+ */
+ static constexpr uint32_t optionalControls[] = {
+ V4L2_CID_CAMERA_SENSOR_ROTATION,
+ };
+
+ const ControlIdMap &controls = subdev_->controls().idmap();
+ for (uint32_t ctrl : optionalControls) {
+ if (!controls.count(ctrl))
+ LOG(CameraSensor, Debug)
+ << "Optional V4L2 control " << utils::hex(ctrl)
+ << " not supported";
+ }
+
+ /*
+ * Recommended controls are similar to optional controls, but will
+ * become mandatory in the near future. Be loud if they're missing.
+ */
+ static constexpr uint32_t recommendedControls[] = {
+ V4L2_CID_CAMERA_ORIENTATION,
+ };
+
+ for (uint32_t ctrl : recommendedControls) {
+ if (!controls.count(ctrl)) {
+ LOG(CameraSensor, Warning)
+ << "Recommended V4L2 control " << utils::hex(ctrl)
+ << " not supported";
+ err = -EINVAL;
+ }
+ }
+
+ /*
+ * Make sure the required selection targets are supported.
+ *
+ * Failures in reading any of the targets are not deemed to be fatal,
+ * but some properties and features, like constructing a
+ * IPACameraSensorInfo for the IPA module, won't be supported.
+ *
+ * \todo Make support for selection targets mandatory as soon as all
+ * test platforms have been updated.
+ */
+ Rectangle rect;
+ int ret = subdev_->getSelection(pad_, V4L2_SEL_TGT_CROP_BOUNDS, &rect);
+ if (ret) {
+ /*
+ * Default the pixel array size to the largest size supported
+ * by the sensor. The sizes_ vector is sorted in ascending
+ * order, the largest size is thus the last element.
+ */
+ pixelArraySize_ = sizes_.back();
+
+ LOG(CameraSensor, Warning)
+ << "The PixelArraySize property has been defaulted to "
+ << pixelArraySize_.toString();
+ err = -EINVAL;
+ } else {
+ pixelArraySize_ = rect.size();
+ }
+
+ ret = subdev_->getSelection(pad_, V4L2_SEL_TGT_CROP_DEFAULT, &activeArea_);
+ if (ret) {
+ activeArea_ = Rectangle(pixelArraySize_);
+ LOG(CameraSensor, Warning)
+ << "The PixelArrayActiveAreas property has been defaulted to "
+ << activeArea_.toString();
+ err = -EINVAL;
+ }
+
+ ret = subdev_->getSelection(pad_, V4L2_SEL_TGT_CROP, &rect);
+ if (ret) {
+ LOG(CameraSensor, Warning)
+ << "Failed to retrieve the sensor crop rectangle";
+ err = -EINVAL;
+ }
+
+ if (err) {
+ LOG(CameraSensor, Warning)
+ << "The sensor kernel driver needs to be fixed";
+ LOG(CameraSensor, Warning)
+ << "See Documentation/sensor_driver_requirements.rst in the libcamera sources for more information";
+ }
+
+ if (!bayerFormat_)
+ return 0;
+
+ /*
+ * For raw sensors, make sure the sensor driver supports the controls
+ * required by the CameraSensor class.
+ */
+ static constexpr uint32_t mandatoryControls[] = {
+ V4L2_CID_EXPOSURE,
+ V4L2_CID_HBLANK,
+ V4L2_CID_PIXEL_RATE,
+ V4L2_CID_VBLANK,
+ };
+
+ err = 0;
+ for (uint32_t ctrl : mandatoryControls) {
+ if (!controls.count(ctrl)) {
+ LOG(CameraSensor, Error)
+ << "Mandatory V4L2 control " << utils::hex(ctrl)
+ << " not available";
+ err = -EINVAL;
+ }
+ }
+
+ if (err) {
+ LOG(CameraSensor, Error)
+ << "The sensor kernel driver needs to be fixed";
+ LOG(CameraSensor, Error)
+ << "See Documentation/sensor_driver_requirements.rst in the libcamera sources for more information";
+ return err;
+ }
+
+ return 0;
+}
+
+/*
+ * \brief Initialize properties that cannot be intialized by the
+ * regular initProperties() function for VIMC
+ */
+void CameraSensor::initVimcDefaultProperties()
+{
+ /* Use the largest supported size. */
+ pixelArraySize_ = sizes_.back();
+ activeArea_ = Rectangle(pixelArraySize_);
+}
+
+void CameraSensor::initStaticProperties()
+{
+ const CameraSensorProperties *props = CameraSensorProperties::get(model_);
+ if (!props)
+ return;
+
+ /* Register the properties retrieved from the sensor database. */
+ properties_.set(properties::UnitCellSize, props->unitCellSize);
+
+ initTestPatternModes(props->testPatternModes);
+}
+
+void CameraSensor::initTestPatternModes(
+ const std::map<int32_t, int32_t> &testPatternModes)
+{
+ const auto &v4l2TestPattern = controls().find(V4L2_CID_TEST_PATTERN);
+ if (v4l2TestPattern == controls().end()) {
+ LOG(CameraSensor, Debug) << "No static test pattern map for \'"
+ << model() << "\'";
+ return;
+ }
+
+ for (const ControlValue &value : v4l2TestPattern->second.values()) {
+ const int32_t index = value.get<int32_t>();
+
+ const auto it = testPatternModes.find(index);
+ if (it == testPatternModes.end()) {
+ LOG(CameraSensor, Debug)
+ << "Test pattern mode " << index << " ignored";
+ continue;
+ }
+
+ testPatternModes_.push_back(it->second);
+ }
+}
+
+int CameraSensor::initProperties()
+{
/*
* Extract the camera sensor model name from the media entity name.
*
@@ -200,22 +364,20 @@ int CameraSensor::init()
else
model_ = entityName;
- /* Create and open the subdev. */
- subdev_ = std::make_unique<V4L2Subdevice>(entity_);
- int ret = subdev_->open();
- if (ret < 0)
- return ret;
+ properties_.set(properties::Model, utils::toAscii(model_));
/* Generate a unique ID for the sensor. */
- ret = generateId();
+ int ret = generateId();
if (ret)
return ret;
- /* Retrieve and store the camera sensor properties. */
+ /* Initialize the static properties from the sensor database. */
+ initStaticProperties();
+
+ /* Retrieve and register properties from the kernel interface. */
const ControlInfoMap &controls = subdev_->controls();
int32_t propertyValue;
- /* Camera Location: default is front location. */
const auto &orientation = controls.find(V4L2_CID_CAMERA_ORIENTATION);
if (orientation != controls.end()) {
int32_t v4l2Orientation = orientation->second.def().get<int32_t>();
@@ -224,58 +386,55 @@ int CameraSensor::init()
default:
LOG(CameraSensor, Warning)
<< "Unsupported camera location "
- << v4l2Orientation << ", setting to Front";
+ << v4l2Orientation << ", setting to External";
/* Fall-through */
+ case V4L2_CAMERA_ORIENTATION_EXTERNAL:
+ propertyValue = properties::CameraLocationExternal;
+ break;
case V4L2_CAMERA_ORIENTATION_FRONT:
propertyValue = properties::CameraLocationFront;
break;
case V4L2_CAMERA_ORIENTATION_BACK:
propertyValue = properties::CameraLocationBack;
break;
- case V4L2_CAMERA_ORIENTATION_EXTERNAL:
- propertyValue = properties::CameraLocationExternal;
- break;
}
+ properties_.set(properties::Location, propertyValue);
} else {
- propertyValue = properties::CameraLocationFront;
+ LOG(CameraSensor, Warning) << "Failed to retrieve the camera location";
}
- properties_.set(properties::Location, propertyValue);
- /* Camera Rotation: default is 0 degrees. */
const auto &rotationControl = controls.find(V4L2_CID_CAMERA_SENSOR_ROTATION);
- if (rotationControl != controls.end())
+ if (rotationControl != controls.end()) {
propertyValue = rotationControl->second.def().get<int32_t>();
- else
- propertyValue = 0;
- properties_.set(properties::Rotation, propertyValue);
-
- /* Enumerate, sort and cache media bus codes and sizes. */
- formats_ = subdev_->formats(pad_);
- if (formats_.empty()) {
- LOG(CameraSensor, Error) << "No image format found";
- return -EINVAL;
+ properties_.set(properties::Rotation, propertyValue);
}
- mbusCodes_ = utils::map_keys(formats_);
- std::sort(mbusCodes_.begin(), mbusCodes_.end());
+ properties_.set(properties::PixelArraySize, pixelArraySize_);
+ properties_.set(properties::PixelArrayActiveAreas, { activeArea_ });
- for (const auto &format : formats_) {
- const std::vector<SizeRange> &ranges = format.second;
- std::transform(ranges.begin(), ranges.end(), std::back_inserter(sizes_),
- [](const SizeRange &range) { return range.max; });
- }
-
- std::sort(sizes_.begin(), sizes_.end());
-
- /* Remove duplicates. */
- auto last = std::unique(sizes_.begin(), sizes_.end());
- sizes_.erase(last, sizes_.end());
+ /* Color filter array pattern, register only for RAW sensors. */
+ if (bayerFormat_) {
+ int32_t cfa;
+ switch (bayerFormat_->order) {
+ case BayerFormat::BGGR:
+ cfa = properties::draft::BGGR;
+ break;
+ case BayerFormat::GBRG:
+ cfa = properties::draft::GBRG;
+ break;
+ case BayerFormat::GRBG:
+ cfa = properties::draft::GRBG;
+ break;
+ case BayerFormat::RGGB:
+ cfa = properties::draft::RGGB;
+ break;
+ case BayerFormat::MONO:
+ cfa = properties::draft::MONO;
+ break;
+ }
- /*
- * The sizes_ vector is sorted in ascending order, the resolution is
- * thus the last element of the vector.
- */
- resolution_ = sizes_.back();
+ properties_.set(properties::draft::ColorFilterArrangement, cfa);
+ }
return 0;
}
@@ -323,10 +482,30 @@ int CameraSensor::init()
*/
/**
- * \fn CameraSensor::resolution()
* \brief Retrieve the camera sensor resolution
+ *
+ * The camera sensor resolution is the active pixel area size, clamped to the
+ * maximum frame size the sensor can produce if it is smaller than the active
+ * pixel area.
+ *
+ * \todo Consider if it desirable to distinguish between the maximum resolution
+ * the sensor can produce (also including upscaled ones) and the actual pixel
+ * array size by splitting this method in two.
+ *
* \return The camera sensor resolution in pixels
*/
+Size CameraSensor::resolution() const
+{
+ return std::min(sizes_.back(), activeArea_.size());
+}
+
+/**
+ * \fn CameraSensor::testPatternModes()
+ * \brief Retrieve all the supported test pattern modes of the camera sensor
+ * The test pattern mode values correspond to the controls::TestPattern control.
+ *
+ * \return The list of test pattern modes
+ */
/**
* \brief Retrieve the best sensor format for a desired output
@@ -418,15 +597,30 @@ V4L2SubdeviceFormat CameraSensor::getFormat(const std::vector<unsigned int> &mbu
/**
* \brief Set the sensor output format
* \param[in] format The desired sensor output format
+ *
+ * The ranges of any controls associated with the sensor are also updated.
+ *
* \return 0 on success or a negative error code otherwise
*/
int CameraSensor::setFormat(V4L2SubdeviceFormat *format)
{
- return subdev_->setFormat(pad_, format);
+ int ret = subdev_->setFormat(pad_, format);
+ if (ret)
+ return ret;
+
+ updateControlInfo();
+ return 0;
}
/**
* \brief Retrieve the supported V4L2 controls and their information
+ *
+ * Control information is updated automatically to reflect the current sensor
+ * configuration when the setFormat() function is called, without invalidating
+ * any iterator on the ControlInfoMap. A manual update can also be forced by
+ * calling the updateControlInfo() function for pipeline handlers that change
+ * the sensor configuration wihtout using setFormat().
+ *
* \return A map of the V4L2 controls supported by the sensor
*/
const ControlInfoMap &CameraSensor::controls() const
@@ -435,11 +629,12 @@ const ControlInfoMap &CameraSensor::controls() const
}
/**
- * \brief Read controls from the sensor
+ * \brief Read V4L2 controls from the sensor
* \param[in] ids The list of controls to read, specified by their ID
*
* This method reads the value of all controls contained in \a ids, and returns
- * their values as a ControlList.
+ * their values as a ControlList. The control identifiers are defined by the
+ * V4L2 specification (V4L2_CID_*).
*
* If any control in \a ids is not supported by the device, is disabled (i.e.
* has the V4L2_CTRL_FLAG_DISABLED flag set), or if any other error occurs
@@ -457,18 +652,13 @@ ControlList CameraSensor::getControls(const std::vector<uint32_t> &ids)
}
/**
- * \fn CameraSensor::properties()
- * \brief Retrieve the camera sensor properties
- * \return The list of camera sensor properties
- */
-
-/**
- * \brief Write controls to the sensor
+ * \brief Write V4L2 controls to the sensor
* \param[in] ctrls The list of controls to write
*
* This method writes the value of all controls contained in \a ctrls, and
- * stores the values actually applied to the device in the corresponding
- * \a ctrls entry.
+ * stores the values actually applied to the device in the corresponding \a
+ * ctrls entry. The control identifiers are defined by the V4L2 specification
+ * (V4L2_CID_*).
*
* If any control in \a ctrls is not supported by the device, is disabled (i.e.
* has the V4L2_CTRL_FLAG_DISABLED flag set), is read-only, or if any other
@@ -492,39 +682,72 @@ int CameraSensor::setControls(ControlList *ctrls)
}
/**
+ * \fn CameraSensor::device()
+ * \brief Retrieve the camera sensor device
+ * \todo Remove this function by integrating DelayedControl with CameraSensor
+ * \return The camera sensor device
+ */
+
+/**
+ * \fn CameraSensor::properties()
+ * \brief Retrieve the camera sensor properties
+ * \return The list of camera sensor properties
+ */
+
+/**
* \brief Assemble and return the camera sensor info
* \param[out] info The camera sensor info
*
- * The CameraSensorInfo content is assembled by inspecting the currently
- * applied sensor configuration and the sensor static properties.
+ * This function fills \a info with information that describes the camera sensor
+ * and its current configuration. The information combines static data (such as
+ * the the sensor model or active pixel array size) and data specific to the
+ * current sensor configuration (such as the line length and pixel rate).
+ *
+ * Sensor information is only available for raw sensors. When called for a YUV
+ * sensor, this function returns -EINVAL.
+ *
+ * Pipeline handlers that do not change the sensor format using the setFormat()
+ * method may need to call updateControlInfo() beforehand, to ensure all the
+ * control ranges are up to date.
*
* \return 0 on success, a negative error code otherwise
*/
-int CameraSensor::sensorInfo(CameraSensorInfo *info) const
+int CameraSensor::sensorInfo(IPACameraSensorInfo *info) const
{
- info->model = model();
+ if (!bayerFormat_)
+ return -EINVAL;
- /* Get the active area size. */
- Rectangle rect;
- int ret = subdev_->getSelection(pad_, V4L2_SEL_TGT_CROP_DEFAULT, &rect);
- if (ret) {
- LOG(CameraSensor, Error)
- << "Failed to construct camera sensor info: "
- << "the camera sensor does not report the active area";
+ info->model = model();
- return ret;
- }
- info->activeAreaSize = { rect.width, rect.height };
+ /*
+ * The active area size is a static property, while the crop
+ * rectangle needs to be re-read as it depends on the sensor
+ * configuration.
+ */
+ info->activeAreaSize = { activeArea_.width, activeArea_.height };
- /* It's mandatory for the subdevice to report its crop rectangle. */
- ret = subdev_->getSelection(pad_, V4L2_SEL_TGT_CROP, &info->analogCrop);
+ /*
+ * \todo Support for retreiving the crop rectangle is scheduled to
+ * become mandatory. For the time being use the default value if it has
+ * been initialized at sensor driver validation time.
+ */
+ int ret = subdev_->getSelection(pad_, V4L2_SEL_TGT_CROP, &info->analogCrop);
if (ret) {
- LOG(CameraSensor, Error)
- << "Failed to construct camera sensor info: "
- << "the camera sensor does not report the crop rectangle";
- return ret;
+ info->analogCrop = activeArea_;
+ LOG(CameraSensor, Warning)
+ << "The analogue crop rectangle has been defaulted to the active area size";
}
+ /*
+ * IPACameraSensorInfo::analogCrop::x and IPACameraSensorInfo::analogCrop::y
+ * are defined relatively to the active pixel area, while V4L2's
+ * TGT_CROP target is defined in respect to the full pixel array.
+ *
+ * Compensate it by subtracting the active area offset.
+ */
+ info->analogCrop.x -= activeArea_.x;
+ info->analogCrop.y -= activeArea_.y;
+
/* The bit depth and image size depend on the currently applied format. */
V4L2SubdeviceFormat format{};
ret = subdev_->getFormat(pad_, &format);
@@ -534,12 +757,13 @@ int CameraSensor::sensorInfo(CameraSensorInfo *info) const
info->outputSize = format.size;
/*
- * Retrieve the pixel rate and the line length through V4L2 controls.
- * Support for the V4L2_CID_PIXEL_RATE and V4L2_CID_HBLANK controls is
- * mandatory.
+ * Retrieve the pixel rate, line length and minimum/maximum frame
+ * duration through V4L2 controls. Support for the V4L2_CID_PIXEL_RATE,
+ * V4L2_CID_HBLANK and V4L2_CID_VBLANK controls is mandatory.
*/
ControlList ctrls = subdev_->getControls({ V4L2_CID_PIXEL_RATE,
- V4L2_CID_HBLANK });
+ V4L2_CID_HBLANK,
+ V4L2_CID_VBLANK });
if (ctrls.empty()) {
LOG(CameraSensor, Error)
<< "Failed to retrieve camera info controls";
@@ -550,9 +774,23 @@ int CameraSensor::sensorInfo(CameraSensorInfo *info) const
info->lineLength = info->outputSize.width + hblank;
info->pixelRate = ctrls.get(V4L2_CID_PIXEL_RATE).get<int64_t>();
+ const ControlInfo vblank = ctrls.infoMap()->at(V4L2_CID_VBLANK);
+ info->minFrameLength = info->outputSize.height + vblank.min().get<int32_t>();
+ info->maxFrameLength = info->outputSize.height + vblank.max().get<int32_t>();
+
return 0;
}
+/**
+ * \fn void CameraSensor::updateControlInfo()
+ * \brief Update the sensor's ControlInfoMap in case they have changed
+ * \sa V4L2Device::updateControlInfo()
+ */
+void CameraSensor::updateControlInfo()
+{
+ subdev_->updateControlInfo();
+}
+
std::string CameraSensor::logPrefix() const
{
return "'" + entity_->name() + "'";
diff --git a/src/libcamera/camera_sensor_properties.cpp b/src/libcamera/camera_sensor_properties.cpp
new file mode 100644
index 00000000..7d8ba9e9
--- /dev/null
+++ b/src/libcamera/camera_sensor_properties.cpp
@@ -0,0 +1,119 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera_sensor_properties.cpp - Database of camera sensor properties
+ */
+
+#include "libcamera/internal/camera_sensor_properties.h"
+
+#include <map>
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/control_ids.h>
+
+/**
+ * \file camera_sensor_properties.h
+ * \brief Database of camera sensor properties
+ *
+ * The database of camera sensor properties collects static information about
+ * camera sensors that is not possible or desirable to retrieve from the device
+ * at run time.
+ *
+ * The database is indexed using the camera sensor model, as reported by the
+ * properties::Model property, and for each supported sensor it contains a
+ * list of properties.
+ */
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(CameraSensorProperties)
+
+/**
+ * \struct CameraSensorProperties
+ * \brief Database of camera sensor properties
+ *
+ * \var CameraSensorProperties::unitCellSize
+ * \brief The physical size of a pixel, including pixel edges, in nanometers.
+ *
+ * \var CameraSensorProperties::testPatternModes
+ * \brief Map that associates the indexes of the sensor test pattern modes as
+ * returned by V4L2_CID_TEST_PATTERN with the corresponding TestPattern
+ * control value
+ */
+
+/**
+ * \brief Retrieve the properties associated with a sensor
+ * \param sensor The sensor model name as reported by properties::Model
+ * \return A pointer to the CameraSensorProperties instance associated with a sensor
+ * or nullptr if the sensor is not supported
+ */
+const CameraSensorProperties *CameraSensorProperties::get(const std::string &sensor)
+{
+ static const std::map<std::string, const CameraSensorProperties> sensorProps = {
+ { "imx219", {
+ .unitCellSize = { 1120, 1120 },
+ .testPatternModes = {
+ { 0, controls::draft::TestPatternModeOff },
+ { 1, controls::draft::TestPatternModeColorBars },
+ { 2, controls::draft::TestPatternModeSolidColor },
+ { 3, controls::draft::TestPatternModeColorBarsFadeToGray },
+ { 4, controls::draft::TestPatternModePn9 },
+ },
+ } },
+ { "imx258", {
+ .unitCellSize = { 1120, 1120 },
+ .testPatternModes = {
+ { 0, controls::draft::TestPatternModeOff },
+ { 1, controls::draft::TestPatternModeSolidColor },
+ { 2, controls::draft::TestPatternModeColorBars },
+ { 3, controls::draft::TestPatternModeColorBarsFadeToGray },
+ { 4, controls::draft::TestPatternModePn9 },
+ },
+ } },
+ { "ov5670", {
+ .unitCellSize = { 1120, 1120 },
+ .testPatternModes = {
+ { 0, controls::draft::TestPatternModeOff },
+ { 1, controls::draft::TestPatternModeColorBars },
+ },
+ } },
+ { "ov13858", {
+ .unitCellSize = { 1120, 1120 },
+ .testPatternModes = {
+ { 0, controls::draft::TestPatternModeOff },
+ { 1, controls::draft::TestPatternModeColorBars },
+ },
+ } },
+ { "ov5647", {
+ .unitCellSize = { 1400, 1400 },
+ .testPatternModes = {},
+ } },
+ { "ov5693", {
+ .unitCellSize = { 1400, 1400 },
+ .testPatternModes = {
+ { 0, controls::draft::TestPatternModeOff },
+ { 2, controls::draft::TestPatternModeColorBars },
+ /*
+ * No corresponding test pattern mode for
+ * 1: "Random data" and 3: "Colour Bars with
+ * Rolling Bar".
+ */
+ },
+ } },
+ };
+
+ const auto it = sensorProps.find(sensor);
+ if (it == sensorProps.end()) {
+ LOG(CameraSensorProperties, Warning)
+ << "No static properties available for '" << sensor << "'";
+ LOG(CameraSensorProperties, Warning)
+ << "Please consider updating the camera sensor properties database";
+ return nullptr;
+ }
+
+ return &it->second;
+}
+
+} /* namespace libcamera */
diff --git a/src/libcamera/control_ids.cpp.in b/src/libcamera/control_ids.cpp.in
index cba6258d..5fb1c2c3 100644
--- a/src/libcamera/control_ids.cpp.in
+++ b/src/libcamera/control_ids.cpp.in
@@ -8,6 +8,7 @@
*/
#include <libcamera/control_ids.h>
+#include <libcamera/controls.h>
/**
* \file control_ids.h
@@ -23,12 +24,27 @@ namespace controls {
${controls_doc}
+/**
+ * \brief Namespace for libcamera draft controls
+ */
+namespace draft {
+
+${draft_controls_doc}
+
+} /* namespace draft */
+
#ifndef __DOXYGEN__
/*
* Keep the controls definitions hidden from doxygen as it incorrectly parses
* them as functions.
*/
${controls_def}
+
+namespace draft {
+
+${draft_controls_def}
+
+} /* namespace draft */
#endif
/**
diff --git a/src/libcamera/control_ids.yaml b/src/libcamera/control_ids.yaml
index 3560d4a8..d92f29f5 100644
--- a/src/libcamera/control_ids.yaml
+++ b/src/libcamera/control_ids.yaml
@@ -48,9 +48,6 @@ controls:
- name: MeteringCustom
value: 3
description: Custom metering mode.
- - name: MeteringModeMax
- value: 3
- description: Maximum allowed value (place any new values above here).
# AeConstraintMode needs further attention:
# - Auto-generate max enum value.
@@ -85,9 +82,6 @@ controls:
- name: ConstraintCustom
value: 3
description: Custom constraint mode.
- - name: ConstraintModeMax
- value: 3
- description: Maximum allowed value (place any new values above here).
# AeExposureMode needs further attention:
# - Auto-generate max enum value.
@@ -112,9 +106,6 @@ controls:
- name: ExposureCustom
value: 3
description: Custom exposure mode.
- - name: ExposureModeMax
- value: 3
- description: Maximum allowed value (place any new values above here).
- ExposureValue:
type: float
@@ -134,8 +125,18 @@ controls:
Exposure time (shutter speed) for the frame applied in the sensor
device. This value is specified in micro-seconds.
+ Setting this value means that it is now fixed and the AE algorithm may
+ not change it. Setting it back to zero returns it to the control of the
+ AE algorithm.
+
\sa AnalogueGain AeEnable
+ \todo Document the interactions between AeEnable and setting a fixed
+ value for this control. Consider interactions with other AE features,
+ such as aperture and aperture/shutter priority mode, and decide if
+ control of which features should be automatically adjusted shouldn't
+ better be handled through a separate AE mode control.
+
- AnalogueGain:
type: float
description: |
@@ -143,8 +144,18 @@ controls:
The value of the control specifies the gain multiplier applied to all
colour channels. This value cannot be lower than 1.0.
+ Setting this value means that it is now fixed and the AE algorithm may
+ not change it. Setting it back to zero returns it to the control of the
+ AE algorithm.
+
\sa ExposureTime AeEnable
+ \todo Document the interactions between AeEnable and setting a fixed
+ value for this control. Consider interactions with other AE features,
+ such as aperture and aperture/shutter priority mode, and decide if
+ control of which features should be automatically adjusted shouldn't
+ better be handled through a separate AE mode control.
+
- Brightness:
type: float
description: |
@@ -204,9 +215,17 @@ controls:
- name: AwbCustom
value: 7
description: Custom AWB mode.
- - name: AwbModeMax
- value: 7
- description: Maximum allowed value (place any new values above here).
+
+ - AwbLocked:
+ type: bool
+ description: |
+ Report the lock status of a running AWB algorithm.
+
+ If the AWB algorithm is locked the value shall be set to true, if it's
+ converging it shall be set to false. If the AWB algorithm is not
+ running the control shall not be present in the metadata control list.
+
+ \sa AwbEnable
- ColourGains:
type: float
@@ -273,4 +292,392 @@ controls:
order in an array of 9 floating point values.
size: [3x3]
+
+ - ScalerCrop:
+ type: Rectangle
+ description: |
+ Sets the image portion that will be scaled to form the whole of
+ the final output image. The (x,y) location of this rectangle is
+ relative to the PixelArrayActiveAreas that is being used. The units
+ remain native sensor pixels, even if the sensor is being used in
+ a binning or skipping mode.
+
+ This control is only present when the pipeline supports scaling. Its
+ maximum valid value is given by the properties::ScalerCropMaximum
+ property, and the two can be used to implement digital zoom.
+
+ - DigitalGain:
+ type: float
+ description: |
+ Digital gain value applied during the processing steps applied
+ to the image as captured from the sensor.
+
+ The global digital gain factor is applied to all the colour channels
+ of the RAW image. Different pipeline models are free to
+ specify how the global gain factor applies to each separate
+ channel.
+
+ If an imaging pipeline applies digital gain in distinct
+ processing steps, this value indicates their total sum.
+ Pipelines are free to decide how to adjust each processing
+ step to respect the received gain factor and shall report
+ their total value in the request metadata.
+
+ - FrameDuration:
+ type: int64_t
+ description: |
+ The instantaneous frame duration from start of frame exposure to start
+ of next exposure, expressed in microseconds. This control is meant to
+ be returned in metadata.
+
+ - FrameDurationLimits:
+ type: int64_t
+ description: |
+ The minimum and maximum (in that order) frame duration,
+ expressed in microseconds.
+
+ When provided by applications, the control specifies the sensor frame
+ duration interval the pipeline has to use. This limits the largest
+ exposure time the sensor can use. For example, if a maximum frame
+ duration of 33ms is requested (corresponding to 30 frames per second),
+ the sensor will not be able to raise the exposure time above 33ms.
+ A fixed frame duration is achieved by setting the minimum and maximum
+ values to be the same. Setting both values to 0 reverts to using the
+ IPA provided defaults.
+
+ The maximum frame duration provides the absolute limit to the shutter
+ speed computed by the AE algorithm and it overrides any exposure mode
+ setting specified with controls::AeExposureMode. Similarly, when a
+ manual exposure time is set through controls::ExposureTime, it also
+ gets clipped to the limits set by this control. When reported in
+ metadata, the control expresses the minimum and maximum frame
+ durations used after being clipped to the sensor provided frame
+ duration limits.
+
+ \sa AeExposureMode
+ \sa ExposureTime
+
+ \todo Define how to calculate the capture frame rate by
+ defining controls to report additional delays introduced by
+ the capture pipeline or post-processing stages (ie JPEG
+ conversion, frame scaling).
+
+ \todo Provide an explicit definition of default control values, for
+ this and all other controls.
+
+ size: [2]
+
+ - SensorTimestamp:
+ type: int64_t
+ description: |
+ The time when the first row of the image sensor active array is exposed.
+
+ The timestamp, expressed in nanoseconds, represents a monotonically
+ increasing counter since the system boot time, as defined by the
+ Linux-specific CLOCK_BOOTTIME clock id.
+
+ The SensorTimestamp control can only be returned in metadata.
+
+ \todo Define how the sensor timestamp has to be used in the reprocessing
+ use case.
+
+ # ----------------------------------------------------------------------------
+ # Draft controls section
+
+ - AePrecaptureTrigger:
+ type: int32_t
+ draft: true
+ description: |
+ Control for AE metering trigger. Currently identical to
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER.
+
+ Whether the camera device will trigger a precapture metering sequence
+ when it processes this request.
+ enum:
+ - name: AePrecaptureTriggerIdle
+ value: 0
+ description: The trigger is idle.
+ - name: AePrecaptureTriggerStart
+ value: 1
+ description: The pre-capture AE metering is started by the camera.
+ - name: AePrecaptureTriggerCancel
+ value: 2
+ description: |
+ The camera will cancel any active or completed metering sequence.
+ The AE algorithm is reset to its initial state.
+
+ - AfTrigger:
+ type: int32_t
+ draft: true
+ description: |
+ Control for AF trigger. Currently identical to
+ ANDROID_CONTROL_AF_TRIGGER.
+
+ Whether the camera device will trigger autofocus for this request.
+ enum:
+ - name: AfTriggerIdle
+ value: 0
+ description: The trigger is idle.
+ - name: AfTriggerStart
+ value: 1
+ description: The AF routine is started by the camera.
+ - name: AfTriggerCancel
+ value: 2
+ description: |
+ The camera will cancel any active trigger and the AF routine is
+ reset to its initial state.
+
+ - NoiseReductionMode:
+ type: int32_t
+ draft: true
+ description: |
+ Control to select the noise reduction algorithm mode. Currently
+ identical to ANDROID_NOISE_REDUCTION_MODE.
+
+ Mode of operation for the noise reduction algorithm.
+ enum:
+ - name: NoiseReductionModeOff
+ value: 0
+ description: No noise reduction is applied
+ - name: NoiseReductionModeFast
+ value: 1
+ description: |
+ Noise reduction is applied without reducing the frame rate.
+ - name: NoiseReductionModeHighQuality
+ value: 2
+ description: |
+ High quality noise reduction at the expense of frame rate.
+ - name: NoiseReductionModeMinimal
+ value: 3
+ description: |
+ Minimal noise reduction is applied without reducing the frame rate.
+ - name: NoiseReductionModeZSL
+ value: 4
+ description: |
+ Noise reduction is applied at different levels to different streams.
+
+ - ColorCorrectionAberrationMode:
+ type: int32_t
+ draft: true
+ description: |
+ Control to select the color correction aberration mode. Currently
+ identical to ANDROID_COLOR_CORRECTION_ABERRATION_MODE.
+
+ Mode of operation for the chromatic aberration correction algorithm.
+ enum:
+ - name: ColorCorrectionAberrationOff
+ value: 0
+ description: No aberration correction is applied.
+ - name: ColorCorrectionAberrationFast
+ value: 1
+ description: Aberration correction will not slow down the frame rate.
+ - name: ColorCorrectionAberrationHighQuality
+ value: 2
+ description: |
+ High quality aberration correction which might reduce the frame
+ rate.
+
+ - AeState:
+ type: int32_t
+ draft: true
+ description: |
+ Control to report the current AE algorithm state. Currently identical to
+ ANDROID_CONTROL_AE_STATE.
+
+ Current state of the AE algorithm.
+ enum:
+ - name: AeStateInactive
+ value: 0
+ description: The AE algorithm is inactive.
+ - name: AeStateSearching
+ value: 1
+ description: The AE algorithm has not converged yet.
+ - name: AeStateConverged
+ value: 2
+ description: The AE algorithm has converged.
+ - name: AeStateLocked
+ value: 3
+ description: The AE algorithm is locked.
+ - name: AeStateFlashRequired
+ value: 4
+ description: The AE algorithm would need a flash for good results
+ - name: AeStatePrecapture
+ value: 5
+ description: |
+ The AE algorithm has started a pre-capture metering session.
+ \sa AePrecaptureTrigger
+
+ - AfState:
+ type: int32_t
+ draft: true
+ description: |
+ Control to report the current AF algorithm state. Currently identical to
+ ANDROID_CONTROL_AF_STATE.
+
+ Current state of the AF algorithm.
+ enum:
+ - name: AfStateInactive
+ value: 0
+ description: The AF algorithm is inactive.
+ - name: AfStatePassiveScan
+ value: 1
+ description: |
+ AF is performing a passive scan of the scene in continuous
+ auto-focus mode.
+ - name: AfStatePassiveFocused
+ value: 2
+ description: |
+ AF believes the scene is in focus, but might restart scanning.
+ - name: AfStateActiveScan
+ value: 3
+ description: |
+ AF is performing a scan triggered by an AF trigger request.
+ \sa AfTrigger
+ - name: AfStateFocusedLock
+ value: 4
+ description: |
+ AF believes has focused correctly and has locked focus.
+ - name: AfStateNotFocusedLock
+ value: 5
+ description: |
+ AF has not been able to focus and has locked.
+ - name: AfStatePassiveUnfocused
+ value: 6
+ description: |
+ AF has completed a passive scan without finding focus.
+
+ - AwbState:
+ type: int32_t
+ draft: true
+ description: |
+ Control to report the current AWB algorithm state. Currently identical
+ to ANDROID_CONTROL_AWB_STATE.
+
+ Current state of the AWB algorithm.
+ enum:
+ - name: AwbStateInactive
+ value: 0
+ description: The AWB algorithm is inactive.
+ - name: AwbStateSearching
+ value: 1
+ description: The AWB algorithm has not converged yet.
+ - name: AwbConverged
+ value: 2
+ description: The AWB algorithm has converged.
+ - name: AwbLocked
+ value: 3
+ description: The AWB algorithm is locked.
+
+ - SensorRollingShutterSkew:
+ type: int64_t
+ draft: true
+ description: |
+ Control to report the time between the start of exposure of the first
+ row and the start of exposure of the last row. Currently identical to
+ ANDROID_SENSOR_ROLLING_SHUTTER_SKEW
+
+ - LensShadingMapMode:
+ type: int32_t
+ draft: true
+ description: |
+ Control to report if the lens shading map is available. Currently
+ identical to ANDROID_STATISTICS_LENS_SHADING_MAP_MODE.
+ enum:
+ - name: LensShadingMapModeOff
+ value: 0
+ description: No lens shading map mode is available.
+ - name: LensShadingMapModeOn
+ value: 1
+ description: The lens shading map mode is available.
+
+ - SceneFlicker:
+ type: int32_t
+ draft: true
+ description: |
+ Control to report the detected scene light frequency. Currently
+ identical to ANDROID_STATISTICS_SCENE_FLICKER.
+ enum:
+ - name: SceneFickerOff
+ value: 0
+ description: No flickering detected.
+ - name: SceneFicker50Hz
+ value: 1
+ description: 50Hz flickering detected.
+ - name: SceneFicker60Hz
+ value: 2
+ description: 60Hz flickering detected.
+
+ - PipelineDepth:
+ type: int32_t
+ draft: true
+ description: |
+ Specifies the number of pipeline stages the frame went through from when
+ it was exposed to when the final completed result was available to the
+ framework. Always less than or equal to PipelineMaxDepth. Currently
+ identical to ANDROID_REQUEST_PIPELINE_DEPTH.
+
+ The typical value for this control is 3 as a frame is first exposed,
+ captured and then processed in a single pass through the ISP. Any
+ additional processing step performed after the ISP pass (in example face
+ detection, additional format conversions etc) count as an additional
+ pipeline stage.
+
+ - TestPatternMode:
+ type: int32_t
+ draft: true
+ description: |
+ Control to select the test pattern mode. Currently identical to
+ ANDROID_SENSOR_TEST_PATTERN_MODE.
+ enum:
+ - name: TestPatternModeOff
+ value: 0
+ description: |
+ No test pattern mode is used. The camera device returns frames from
+ the image sensor.
+ - name: TestPatternModeSolidColor
+ value: 1
+ description: |
+ Each pixel in [R, G_even, G_odd, B] is replaced by its respective
+ color channel provided in test pattern data.
+ \todo Add control for test pattern data.
+ - name: TestPatternModeColorBars
+ value: 2
+ description: |
+ All pixel data is replaced with an 8-bar color pattern. The vertical
+ bars (left-to-right) are as follows; white, yellow, cyan, green,
+ magenta, red, blue and black. Each bar should take up 1/8 of the
+ sensor pixel array width. When this is not possible, the bar size
+ should be rounded down to the nearest integer and the pattern can
+ repeat on the right side. Each bar's height must always take up the
+ full sensor pixel array height.
+ - name: TestPatternModeColorBarsFadeToGray
+ value: 3
+ description: |
+ The test pattern is similar to TestPatternModeColorBars,
+ except that each bar should start at its specified color at the top
+ and fade to gray at the bottom. Furthermore each bar is further
+ subdevided into a left and right half. The left half should have a
+ smooth gradient, and the right half should have a quantized
+ gradient. In particular, the right half's should consist of blocks
+ of the same color for 1/16th active sensor pixel array width. The
+ least significant bits in the quantized gradient should be copied
+ from the most significant bits of the smooth gradient. The height of
+ each bar should always be a multiple of 128. When this is not the
+ case, the pattern should repeat at the bottom of the image.
+ - name: TestPatternModePn9
+ value: 4
+ description: |
+ All pixel data is replaced by a pseudo-random sequence generated
+ from a PN9 512-bit sequence (typically implemented in hardware with
+ a linear feedback shift register). The generator should be reset at
+ the beginning of each frame, and thus each subsequent raw frame with
+ this test pattern should be exactly the same as the last.
+ - name: TestPatternModeCustom1
+ value: 256
+ description: |
+ The first custom test pattern. All custom patterns that are
+ available only on this camera device are at least this numeric
+ value. All of the custom test patterns will be static (that is the
+ raw image must not vary from frame to frame).
+
...
diff --git a/src/libcamera/control_serializer.cpp b/src/libcamera/control_serializer.cpp
index 258db6df..30046628 100644
--- a/src/libcamera/control_serializer.cpp
+++ b/src/libcamera/control_serializer.cpp
@@ -11,13 +11,14 @@
#include <memory>
#include <vector>
+#include <libcamera/base/log.h>
+#include <libcamera/base/span.h>
+
#include <libcamera/control_ids.h>
#include <libcamera/controls.h>
#include <libcamera/ipa/ipa_controls.h>
-#include <libcamera/span.h>
#include "libcamera/internal/byte_stream_buffer.h"
-#include "libcamera/internal/log.h"
/**
* \file control_serializer.h
@@ -173,6 +174,12 @@ void ControlSerializer::store(const ControlInfo &info, ByteStreamBuffer &buffer)
int ControlSerializer::serialize(const ControlInfoMap &infoMap,
ByteStreamBuffer &buffer)
{
+ if (isCached(infoMap)) {
+ LOG(Serializer, Debug)
+ << "Skipping already serialized ControlInfoMap";
+ return 0;
+ }
+
/* Compute entries and data required sizes. */
size_t entriesSize = infoMap.size()
* sizeof(struct ipa_control_info_entry);
@@ -347,6 +354,12 @@ ControlInfoMap ControlSerializer::deserialize<ControlInfoMap>(ByteStreamBuffer &
return {};
}
+ auto iter = infoMaps_.find(hdr->handle);
+ if (iter != infoMaps_.end()) {
+ LOG(Serializer, Debug) << "Use cached ControlInfoMap";
+ return iter->second;
+ }
+
if (hdr->version != IPA_CONTROLS_FORMAT_VERSION) {
LOG(Serializer, Error)
<< "Unsupported controls format version "
@@ -485,4 +498,18 @@ ControlList ControlSerializer::deserialize<ControlList>(ByteStreamBuffer &buffer
return ctrls;
}
+/**
+ * \brief Check if a ControlInfoMap is cached
+ * \param[in] infoMap The ControlInfoMap to check
+ *
+ * The ControlSerializer caches all ControlInfoMaps that it has (de)serialized.
+ * This function checks if \a infoMap is in the cache.
+ *
+ * \return True if \a infoMap is in the cache or false otherwise
+ */
+bool ControlSerializer::isCached(const ControlInfoMap &infoMap)
+{
+ return infoMapHandles_.count(&infoMap);
+}
+
} /* namespace libcamera */
diff --git a/src/libcamera/controls.cpp b/src/libcamera/controls.cpp
index dca78266..34317fa0 100644
--- a/src/libcamera/controls.cpp
+++ b/src/libcamera/controls.cpp
@@ -12,9 +12,10 @@
#include <string>
#include <string.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
#include "libcamera/internal/control_validator.h"
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/utils.h"
/**
* \file controls.h
@@ -437,8 +438,8 @@ void ControlValue::reserve(ControlType type, bool isArray, std::size_t numElemen
* the control value. See for instance how the ControlList::get() method
* returns a value corresponding to the type of the requested control.
*
- * While this class is the main mean to refer to a control, the control
- * identifying information are stored in the non-template base ControlId class.
+ * While this class is the main means to refer to a control, the control
+ * identifying information is stored in the non-template base ControlId class.
* This allows code that operates on a set of controls of different types to
* reference those controls through a ControlId instead of a Control. For
* instance, the list of controls supported by a camera is exposed as ControlId
@@ -492,6 +493,28 @@ ControlInfo::ControlInfo(const ControlValue &min,
}
/**
+ * \brief Construct a ControlInfo from the list of valid values
+ * \param[in] values The control valid values
+ * \param[in] def The control default value
+ *
+ * Construct a ControlInfo from a list of valid values. The ControlInfo
+ * minimum and maximum values are set to the first and last members of the
+ * values list respectively. The default value is set to \a def if provided, or
+ * to the minimum value otherwise.
+ */
+ControlInfo::ControlInfo(Span<const ControlValue> values,
+ const ControlValue &def)
+{
+ min_ = values.front();
+ max_ = values.back();
+ def_ = !def.isNone() ? def : values.front();
+
+ values_.reserve(values.size());
+ for (const ControlValue &value : values)
+ values_.push_back(value);
+}
+
+/**
* \fn ControlInfo::min()
* \brief Retrieve the minimum value of the control
*
@@ -520,6 +543,17 @@ ControlInfo::ControlInfo(const ControlValue &min,
*/
/**
+ * \fn ControlInfo::values()
+ * \brief Retrieve the list of valid values
+ *
+ * For controls that support a pre-defined number of values, the enumeration of
+ * those is reported through a vector of ControlValue instances accessible with
+ * this method.
+ *
+ * \return A vector of ControlValue representing the control valid values
+ */
+
+/**
* \brief Provide a string representation of the ControlInfo
*/
std::string ControlInfo::toString() const
@@ -842,6 +876,46 @@ ControlList::ControlList(const ControlInfoMap &infoMap, ControlValidator *valida
*/
/**
+ * \brief Merge the \a source into the ControlList
+ * \param[in] source The ControlList to merge into this object
+ *
+ * Merging two control lists copies elements from the \a source and inserts
+ * them in *this. If the \a source contains elements whose key is already
+ * present in *this, then those elements are not overwritten.
+ *
+ * Only control lists created from the same ControlIdMap or ControlInfoMap may
+ * be merged. Attempting to do otherwise results in undefined behaviour.
+ *
+ * \todo Reimplement or implement an overloaded version which internally uses
+ * std::unordered_map::merge() and accepts a non-const argument.
+ */
+void ControlList::merge(const ControlList &source)
+{
+ /**
+ * \todo: ASSERT that the current and source ControlList are derived
+ * from a compatible ControlIdMap, to prevent undefined behaviour due to
+ * id collisions.
+ *
+ * This can not currently be a direct pointer comparison due to the
+ * duplication of the ControlIdMaps in the isolated IPA use cases.
+ * Furthermore, manually checking each entry of the id map is identical
+ * is expensive.
+ * See https://bugs.libcamera.org/show_bug.cgi?id=31 for further details
+ */
+
+ for (const auto &ctrl : source) {
+ if (contains(ctrl.first)) {
+ const ControlId *id = idmap_->at(ctrl.first);
+ LOG(Controls, Warning)
+ << "Control " << id->name() << " not overwritten";
+ continue;
+ }
+
+ set(ctrl.first, ctrl.second);
+ }
+}
+
+/**
* \brief Check if the list contains a control with the specified \a id
* \param[in] id The control ID
*
diff --git a/src/libcamera/delayed_controls.cpp b/src/libcamera/delayed_controls.cpp
new file mode 100644
index 00000000..90ce7e0b
--- /dev/null
+++ b/src/libcamera/delayed_controls.cpp
@@ -0,0 +1,293 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Raspberry Pi (Trading) Ltd.
+ *
+ * delayed_controls.h - Helper to deal with controls that take effect with a delay
+ */
+
+#include "libcamera/internal/delayed_controls.h"
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/controls.h>
+
+#include "libcamera/internal/v4l2_device.h"
+
+/**
+ * \file delayed_controls.h
+ * \brief Helper to deal with controls that take effect with a delay
+ */
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(DelayedControls)
+
+/**
+ * \class DelayedControls
+ * \brief Helper to deal with controls that take effect with a delay
+ *
+ * Some sensor controls take effect with a delay as the sensor needs time to
+ * adjust, for example exposure and analog gain. This is a helper class to deal
+ * with such controls and the intended users are pipeline handlers.
+ *
+ * The idea is to extend the concept of the buffer depth of a pipeline the
+ * application needs to maintain to also cover controls. Just as with buffer
+ * depth if the application keeps the number of requests queued above the
+ * control depth the controls are guaranteed to take effect for the correct
+ * request. The control depth is determined by the control with the greatest
+ * delay.
+ */
+
+/**
+ * \struct DelayedControls::ControlParams
+ * \brief Parameters associated with controls handled by the \a DelayedControls
+ * helper class
+ *
+ * \var ControlParams::delay
+ * \brief Frame delay from setting the control on a sensor device to when it is
+ * consumed during framing.
+ *
+ * \var ControlParams::priorityWrite
+ * \brief Flag to indicate that this control must be applied ahead of, and
+ * separately from the other controls.
+ *
+ * Typically set for the \a V4L2_CID_VBLANK control so that the device driver
+ * does not reject \a V4L2_CID_EXPOSURE control values that may be outside of
+ * the existing vertical blanking specified bounds, but are within the new
+ * blanking bounds.
+ */
+
+/**
+ * \brief Construct a DelayedControls instance
+ * \param[in] device The V4L2 device the controls have to be applied to
+ * \param[in] controlParams Map of the numerical V4L2 control ids to their
+ * associated control parameters.
+ *
+ * The control parameters comprise of delays (in frames) and a priority write
+ * flag. If this flag is set, the relevant control is written separately from,
+ * and ahead of the rest of the batched controls.
+ *
+ * Only controls specified in \a controlParams are handled. If it's desired to
+ * mix delayed controls and controls that take effect immediately the immediate
+ * controls must be listed in the \a controlParams map with a delay value of 0.
+ */
+DelayedControls::DelayedControls(V4L2Device *device,
+ const std::unordered_map<uint32_t, ControlParams> &controlParams)
+ : device_(device), maxDelay_(0)
+{
+ const ControlInfoMap &controls = device_->controls();
+
+ /*
+ * Create a map of control ids to delays for controls exposed by the
+ * device.
+ */
+ for (auto const &param : controlParams) {
+ auto it = controls.find(param.first);
+ if (it == controls.end()) {
+ LOG(DelayedControls, Error)
+ << "Delay request for control id "
+ << utils::hex(param.first)
+ << " but control is not exposed by device "
+ << device_->deviceNode();
+ continue;
+ }
+
+ const ControlId *id = it->first;
+
+ controlParams_[id] = param.second;
+
+ LOG(DelayedControls, Debug)
+ << "Set a delay of " << controlParams_[id].delay
+ << " and priority write flag " << controlParams_[id].priorityWrite
+ << " for " << id->name();
+
+ maxDelay_ = std::max(maxDelay_, controlParams_[id].delay);
+ }
+
+ reset();
+}
+
+/**
+ * \brief Reset state machine
+ *
+ * Resets the state machine to a starting position based on control values
+ * retrieved from the device.
+ */
+void DelayedControls::reset()
+{
+ running_ = false;
+ firstSequence_ = 0;
+ queueCount_ = 1;
+ writeCount_ = 0;
+
+ /* Retrieve control as reported by the device. */
+ std::vector<uint32_t> ids;
+ for (auto const &param : controlParams_)
+ ids.push_back(param.first->id());
+
+ ControlList controls = device_->getControls(ids);
+
+ /* Seed the control queue with the controls reported by the device. */
+ values_.clear();
+ for (const auto &ctrl : controls) {
+ const ControlId *id = device_->controls().idmap().at(ctrl.first);
+ /*
+ * Do not mark this control value as updated, it does not need
+ * to be written to to device on startup.
+ */
+ values_[id][0] = Info(ctrl.second, false);
+ }
+}
+
+/**
+ * \brief Push a set of controls on the queue
+ * \param[in] controls List of controls to add to the device queue
+ *
+ * Push a set of controls to the control queue. This increases the control queue
+ * depth by one.
+ *
+ * \returns true if \a controls are accepted, or false otherwise
+ */
+bool DelayedControls::push(const ControlList &controls)
+{
+ /* Copy state from previous frame. */
+ for (auto &ctrl : values_) {
+ Info &info = ctrl.second[queueCount_];
+ info = values_[ctrl.first][queueCount_ - 1];
+ info.updated = false;
+ }
+
+ /* Update with new controls. */
+ const ControlIdMap &idmap = device_->controls().idmap();
+ for (const auto &control : controls) {
+ const auto &it = idmap.find(control.first);
+ if (it == idmap.end()) {
+ LOG(DelayedControls, Warning)
+ << "Unknown control " << control.first;
+ return false;
+ }
+
+ const ControlId *id = it->second;
+
+ if (controlParams_.find(id) == controlParams_.end())
+ return false;
+
+ Info &info = values_[id][queueCount_];
+
+ info = Info(control.second);
+
+ LOG(DelayedControls, Debug)
+ << "Queuing " << id->name()
+ << " to " << info.toString()
+ << " at index " << queueCount_;
+ }
+
+ queueCount_++;
+
+ return true;
+}
+
+/**
+ * \brief Read back controls in effect at a sequence number
+ * \param[in] sequence The sequence number to get controls for
+ *
+ * Read back what controls where in effect at a specific sequence number. The
+ * history is a ring buffer of 16 entries where new and old values coexist. It's
+ * the callers responsibility to not read too old sequence numbers that have been
+ * pushed out of the history.
+ *
+ * Historic values are evicted by pushing new values onto the queue using
+ * push(). The max history from the current sequence number that yields valid
+ * values are thus 16 minus number of controls pushed.
+ *
+ * \return The controls at \a sequence number
+ */
+ControlList DelayedControls::get(uint32_t sequence)
+{
+ uint32_t adjustedSeq = sequence - firstSequence_;
+ unsigned int index = std::max<int>(0, adjustedSeq - maxDelay_);
+
+ ControlList out(device_->controls());
+ for (const auto &ctrl : values_) {
+ const ControlId *id = ctrl.first;
+ const Info &info = ctrl.second[index];
+
+ out.set(id->id(), info);
+
+ LOG(DelayedControls, Debug)
+ << "Reading " << id->name()
+ << " to " << info.toString()
+ << " at index " << index;
+ }
+
+ return out;
+}
+
+/**
+ * \brief Inform DelayedControls of the start of a new frame
+ * \param[in] sequence Sequence number of the frame that started
+ *
+ * Inform the state machine that a new frame has started and of its sequence
+ * number. Any user of these helpers is responsible to inform the helper about
+ * the start of any frame. This can be connected with ease to the start of a
+ * exposure (SOE) V4L2 event.
+ */
+void DelayedControls::applyControls(uint32_t sequence)
+{
+ LOG(DelayedControls, Debug) << "frame " << sequence << " started";
+
+ if (!running_) {
+ firstSequence_ = sequence;
+ running_ = true;
+ }
+
+ /*
+ * Create control list peeking ahead in the value queue to ensure
+ * values are set in time to satisfy the sensor delay.
+ */
+ ControlList out(device_->controls());
+ for (auto &ctrl : values_) {
+ const ControlId *id = ctrl.first;
+ unsigned int delayDiff = maxDelay_ - controlParams_[id].delay;
+ unsigned int index = std::max<int>(0, writeCount_ - delayDiff);
+ Info &info = ctrl.second[index];
+
+ if (info.updated) {
+ if (controlParams_[id].priorityWrite) {
+ /*
+ * This control must be written now, it could
+ * affect validity of the other controls.
+ */
+ ControlList priority(device_->controls());
+ priority.set(id->id(), info);
+ device_->setControls(&priority);
+ } else {
+ /*
+ * Batch up the list of controls and write them
+ * at the end of the function.
+ */
+ out.set(id->id(), info);
+ }
+
+ LOG(DelayedControls, Debug)
+ << "Setting " << id->name()
+ << " to " << info.toString()
+ << " at index " << index;
+
+ /* Done with this update, so mark as completed. */
+ info.updated = false;
+ }
+ }
+
+ writeCount_++;
+
+ while (writeCount_ > queueCount_) {
+ LOG(DelayedControls, Debug)
+ << "Queue is empty, auto queue no-op.";
+ push({});
+ }
+
+ device_->setControls(&out);
+}
+
+} /* namespace libcamera */
diff --git a/src/libcamera/device_enumerator.cpp b/src/libcamera/device_enumerator.cpp
index 647974b1..cfd1e6b2 100644
--- a/src/libcamera/device_enumerator.cpp
+++ b/src/libcamera/device_enumerator.cpp
@@ -6,12 +6,13 @@
*/
#include "libcamera/internal/device_enumerator.h"
-#include "libcamera/internal/device_enumerator_sysfs.h"
-#include "libcamera/internal/device_enumerator_udev.h"
#include <string.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
+
+#include "libcamera/internal/device_enumerator_sysfs.h"
+#include "libcamera/internal/device_enumerator_udev.h"
#include "libcamera/internal/media_device.h"
/**
@@ -246,7 +247,7 @@ std::unique_ptr<MediaDevice> DeviceEnumerator::createDevice(const std::string &d
* This method shall be called after all members of the entities of the
* media graph have been confirmed to be initialized.
*/
-void DeviceEnumerator::addDevice(std::unique_ptr<MediaDevice> &&media)
+void DeviceEnumerator::addDevice(std::unique_ptr<MediaDevice> media)
{
LOG(DeviceEnumerator, Debug)
<< "Added device " << media->deviceNode() << ": " << media->driver();
diff --git a/src/libcamera/device_enumerator_sysfs.cpp b/src/libcamera/device_enumerator_sysfs.cpp
index ff728852..686bb809 100644
--- a/src/libcamera/device_enumerator_sysfs.cpp
+++ b/src/libcamera/device_enumerator_sysfs.cpp
@@ -17,7 +17,8 @@
#include <sys/types.h>
#include <unistd.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
+
#include "libcamera/internal/media_device.h"
namespace libcamera {
diff --git a/src/libcamera/device_enumerator_udev.cpp b/src/libcamera/device_enumerator_udev.cpp
index c6e23a1a..37a2c5aa 100644
--- a/src/libcamera/device_enumerator_udev.cpp
+++ b/src/libcamera/device_enumerator_udev.cpp
@@ -17,9 +17,9 @@
#include <sys/sysmacros.h>
#include <unistd.h>
-#include <libcamera/event_notifier.h>
+#include <libcamera/base/event_notifier.h>
+#include <libcamera/base/log.h>
-#include "libcamera/internal/log.h"
#include "libcamera/internal/media_device.h"
namespace libcamera {
diff --git a/src/libcamera/file_descriptor.cpp b/src/libcamera/file_descriptor.cpp
index 640e66e6..638b3bbe 100644
--- a/src/libcamera/file_descriptor.cpp
+++ b/src/libcamera/file_descriptor.cpp
@@ -11,7 +11,7 @@
#include <unistd.h>
#include <utility>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
/**
* \file file_descriptor.h
@@ -42,7 +42,7 @@ LOG_DEFINE_CATEGORY(FileDescriptor)
* constructor.
*
* - The FileDescriptor(int &&) constructor takes over the numerical file
- * descriptor and wraps it in a Descriptor. The caller is shall not touch the
+ * descriptor and wraps it in a Descriptor. The caller shall not touch the
* original file descriptor once the function returns, and the value returned
* by fd() will be identical to the value passed to the constructor.
*
diff --git a/src/libcamera/formats.cpp b/src/libcamera/formats.cpp
index cc6e5790..59a34853 100644
--- a/src/libcamera/formats.cpp
+++ b/src/libcamera/formats.cpp
@@ -10,9 +10,9 @@
#include <algorithm>
#include <errno.h>
-#include <libcamera/formats.h>
+#include <libcamera/base/log.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/formats.h>
/**
* \file internal/formats.h
@@ -155,6 +155,16 @@ const std::map<PixelFormat, PixelFormatInfo> pixelFormatInfo{
.pixelsPerGroup = 1,
.planes = {{ { 3, 1 }, { 0, 0 }, { 0, 0 } }},
} },
+ { formats::RGB565_BE, {
+ .name = "RGB565_BE",
+ .format = formats::RGB565_BE,
+ .v4l2Format = V4L2PixelFormat(V4L2_PIX_FMT_RGB565X),
+ .bitsPerPixel = 16,
+ .colourEncoding = PixelFormatInfo::ColourEncodingRGB,
+ .packed = false,
+ .pixelsPerGroup = 1,
+ .planes = {{ { 3, 1 }, { 0, 0 }, { 0, 0 } }},
+ } },
{ formats::BGR888, {
.name = "BGR888",
.format = formats::BGR888,
diff --git a/src/libcamera/formats.yaml b/src/libcamera/formats.yaml
index 6a2fb721..43b5877e 100644
--- a/src/libcamera/formats.yaml
+++ b/src/libcamera/formats.yaml
@@ -10,6 +10,9 @@ formats:
- RGB565:
fourcc: DRM_FORMAT_RGB565
+ - RGB565_BE:
+ fourcc: DRM_FORMAT_RGB565
+ big_endian: true
- RGB888:
fourcc: DRM_FORMAT_RGB888
diff --git a/src/libcamera/buffer.cpp b/src/libcamera/framebuffer.cpp
index 75b26932..40bf64b0 100644
--- a/src/libcamera/buffer.cpp
+++ b/src/libcamera/framebuffer.cpp
@@ -2,25 +2,25 @@
/*
* Copyright (C) 2019, Google Inc.
*
- * buffer.cpp - Buffer handling
+ * framebuffer.cpp - Frame buffer handling
*/
-#include <libcamera/buffer.h>
-#include "libcamera/internal/buffer.h"
+#include <libcamera/framebuffer.h>
+#include "libcamera/internal/framebuffer.h"
#include <errno.h>
#include <string.h>
#include <sys/mman.h>
#include <unistd.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
/**
- * \file libcamera/buffer.h
- * \brief Buffer handling
+ * \file libcamera/framebuffer.h
+ * \brief Frame buffer handling
*
- * \file libcamera/internal/buffer.h
- * \brief Internal buffer handling support
+ * \file libcamera/internal/framebuffer.h
+ * \brief Internal frame buffer handling support
*/
namespace libcamera {
@@ -100,6 +100,21 @@ LOG_DEFINE_CATEGORY(Buffer)
* \brief Array of per-plane metadata
*/
+FrameBuffer::Private::Private(FrameBuffer *buffer)
+ : Extensible::Private(buffer), request_(nullptr)
+{
+}
+
+/**
+ * \fn FrameBuffer::Private::setRequest()
+ * \brief Set the request this buffer belongs to
+ * \param[in] request Request to set
+ *
+ * For buffers added to requests by applications, this method is called by
+ * Request::addBuffer() or Request::reuse(). For buffers internal to pipeline
+ * handlers, it is called by the pipeline handlers themselves.
+ */
+
/**
* \class FrameBuffer
* \brief Frame buffer data and its associated dynamic metadata
@@ -161,7 +176,7 @@ LOG_DEFINE_CATEGORY(Buffer)
* \param[in] cookie Cookie
*/
FrameBuffer::FrameBuffer(const std::vector<Plane> &planes, unsigned int cookie)
- : planes_(planes), request_(nullptr), cookie_(cookie)
+ : Extensible(new Private(this)), planes_(planes), cookie_(cookie)
{
}
@@ -172,7 +187,6 @@ FrameBuffer::FrameBuffer(const std::vector<Plane> &planes, unsigned int cookie)
*/
/**
- * \fn FrameBuffer::request()
* \brief Retrieve the request this buffer belongs to
*
* The intended callers of this method are buffer completion handlers that
@@ -185,17 +199,10 @@ FrameBuffer::FrameBuffer(const std::vector<Plane> &planes, unsigned int cookie)
* \return The Request the FrameBuffer belongs to, or nullptr if the buffer is
* not associated with a request
*/
-
-/**
- * \fn FrameBuffer::setRequest()
- * \brief Set the request this buffer belongs to
- * \param[in] request Request to set
- *
- * The intended callers of this method are pipeline handlers and only for
- * buffers that are internal to the pipeline.
- *
- * \todo Shall be hidden from applications with a d-pointer design.
- */
+Request *FrameBuffer::request() const
+{
+ return _d()->request_;
+}
/**
* \fn FrameBuffer::metadata()
@@ -227,6 +234,14 @@ FrameBuffer::FrameBuffer(const std::vector<Plane> &planes, unsigned int cookie)
*/
/**
+ * \fn FrameBuffer::cancel()
+ * \brief Marks the buffer as cancelled
+ *
+ * If a buffer is not used by a request, it shall be marked as cancelled to
+ * indicate that the metadata is invalid.
+ */
+
+/**
* \class MappedBuffer
* \brief Provide an interface to support managing memory mapped buffers
*
diff --git a/src/libcamera/framebuffer_allocator.cpp b/src/libcamera/framebuffer_allocator.cpp
index 2fbba37a..695073fd 100644
--- a/src/libcamera/framebuffer_allocator.cpp
+++ b/src/libcamera/framebuffer_allocator.cpp
@@ -9,11 +9,12 @@
#include <errno.h>
-#include <libcamera/buffer.h>
+#include <libcamera/base/log.h>
+
#include <libcamera/camera.h>
+#include <libcamera/framebuffer.h>
#include <libcamera/stream.h>
-#include "libcamera/internal/log.h"
#include "libcamera/internal/pipeline_handler.h"
/**
diff --git a/src/libcamera/geometry.cpp b/src/libcamera/geometry.cpp
index b12e1a62..9bbef0b5 100644
--- a/src/libcamera/geometry.cpp
+++ b/src/libcamera/geometry.cpp
@@ -10,6 +10,8 @@
#include <sstream>
#include <stdint.h>
+#include <libcamera/base/log.h>
+
/**
* \file geometry.h
* \brief Data structures related to geometric objects
@@ -18,6 +20,70 @@
namespace libcamera {
/**
+ * \class Point
+ * \brief Describe a point in two-dimensional space
+ *
+ * The Point structure defines a point in two-dimensional space with integer
+ * precision. The coordinates of a Point may be negative as well as positive.
+ */
+
+/**
+ * \fn Point::Point()
+ * \brief Construct a Point with x and y set to 0
+ */
+
+/**
+ * \fn Point::Point(int xpos, int ypos)
+ * \brief Construct a Point at given \a xpos and \a ypos values
+ * \param[in] xpos The x-coordinate
+ * \param[in] ypos The y-coordinate
+ */
+
+/**
+ * \var Point::x
+ * \brief The x-coordinate of the Point
+ */
+
+/**
+ * \var Point::y
+ * \brief The y-coordinate of the Point
+ */
+
+/**
+ * \brief Assemble and return a string describing the point
+ * \return A string describing the point
+ */
+const std::string Point::toString() const
+{
+ std::stringstream ss;
+
+ ss << "(" << x << "," << y << ")";
+
+ return ss.str();
+}
+
+/**
+ * \fn Point Point::operator-() const
+ * \brief Negate a Point by negating both its x and y coordinates
+ * \return The negated point
+ */
+
+/**
+ * \brief Compare points for equality
+ * \return True if the two points are equal, false otherwise
+ */
+bool operator==(const Point &lhs, const Point &rhs)
+{
+ return lhs.x == rhs.x && lhs.y == rhs.y;
+}
+
+/**
+ * \fn bool operator!=(const Point &lhs, const Point &rhs)
+ * \brief Compare points for inequality
+ * \return True if the two points are not equal, false otherwise
+ */
+
+/**
* \struct Size
* \brief Describe a two-dimensional size
*
@@ -144,6 +210,117 @@ const std::string Size::toString() const
*/
/**
+ * \brief Bound the size down to match the aspect ratio given by \a ratio
+ * \param[in] ratio The size whose aspect ratio must be matched
+ *
+ * The behaviour of this function is undefined if either the width or the
+ * height of the \a ratio is zero.
+ *
+ * \return A Size whose width and height are equal to the width and height
+ * of this Size aligned down to the aspect ratio of \a ratio
+ */
+Size Size::boundedToAspectRatio(const Size &ratio) const
+{
+ ASSERT(ratio.width && ratio.height);
+
+ uint64_t ratio1 = static_cast<uint64_t>(width) *
+ static_cast<uint64_t>(ratio.height);
+ uint64_t ratio2 = static_cast<uint64_t>(ratio.width) *
+ static_cast<uint64_t>(height);
+
+ if (ratio1 > ratio2)
+ return { static_cast<unsigned int>(ratio2 / ratio.height), height };
+ else
+ return { width, static_cast<unsigned int>(ratio1 / ratio.width) };
+}
+
+/**
+ * \brief Expand the size to match the aspect ratio given by \a ratio
+ * \param[in] ratio The size whose aspect ratio must be matched
+ *
+ * The behaviour of this function is undefined if either the width or the
+ * height of the \a ratio is zero.
+ *
+ * \return A Size whose width and height are equal to the width and height
+ * of this Size expanded up to the aspect ratio of \a ratio
+ */
+Size Size::expandedToAspectRatio(const Size &ratio) const
+{
+ ASSERT(ratio.width && ratio.height);
+
+ uint64_t ratio1 = static_cast<uint64_t>(width) *
+ static_cast<uint64_t>(ratio.height);
+ uint64_t ratio2 = static_cast<uint64_t>(ratio.width) *
+ static_cast<uint64_t>(height);
+
+ if (ratio1 < ratio2)
+ return { static_cast<unsigned int>(ratio2 / ratio.height), height };
+ else
+ return { width, static_cast<unsigned int>(ratio1 / ratio.width) };
+}
+
+/**
+ * \brief Center a rectangle of this size at a given Point
+ * \param[in] center The center point the Rectangle is to have
+ *
+ * A Rectangle of this object's size is positioned so that its center
+ * is at the given Point.
+ *
+ * \return A Rectangle of this size, centered at the given Point.
+ */
+Rectangle Size::centeredTo(const Point &center) const
+{
+ int x = center.x - width / 2;
+ int y = center.y - height / 2;
+
+ return { x, y, width, height };
+}
+
+/**
+ * \brief Scale size up by the given factor
+ * \param[in] factor The factor
+ * \return The scaled Size
+ */
+Size Size::operator*(float factor) const
+{
+ return Size(width * factor, height * factor);
+}
+
+/**
+ * \brief Scale size down by the given factor
+ * \param[in] factor The factor
+ * \return The scaled Size
+ */
+Size Size::operator/(float factor) const
+{
+ return Size(width / factor, height / factor);
+}
+
+/**
+ * \brief Scale this size up by the given factor in place
+ * \param[in] factor The factor
+ * \return A reference to this object
+ */
+Size &Size::operator*=(float factor)
+{
+ width *= factor;
+ height *= factor;
+ return *this;
+}
+
+/**
+ * \brief Scale this size down by the given factor in place
+ * \param[in] factor The factor
+ * \return A reference to this object
+ */
+Size &Size::operator/=(float factor)
+{
+ width /= factor;
+ height /= factor;
+ return *this;
+}
+
+/**
* \brief Compare sizes for equality
* \return True if the two sizes are equal, false otherwise
*/
@@ -366,6 +543,13 @@ bool operator==(const SizeRange &lhs, const SizeRange &rhs)
*/
/**
+ * \fn Rectangle::Rectangle(const Size &size)
+ * \brief Construct a Rectangle of \a size with its top left corner located
+ * at (0,0)
+ * \param[in] size The desired Rectangle size
+ */
+
+/**
* \var Rectangle::x
* \brief The horizontal coordinate of the rectangle's top-left corner
*/
@@ -405,6 +589,156 @@ const std::string Rectangle::toString() const
}
/**
+ * \brief Retrieve the center point of this rectangle
+ * \return The center Point
+ */
+Point Rectangle::center() const
+{
+ return { x + static_cast<int>(width / 2), y + static_cast<int>(height / 2) };
+}
+
+/**
+ * \fn Size Rectangle::size() const
+ * \brief Retrieve the size of this rectangle
+ * \return The Rectangle size
+ */
+
+/**
+ * \fn Point Rectangle::topLeft() const
+ * \brief Retrieve the coordinates of the top left corner of this Rectangle
+ * \return The Rectangle's top left corner
+ */
+
+/**
+ * \brief Apply a non-uniform rational scaling in place to this Rectangle
+ * \param[in] numerator The numerators of the x and y scaling factors
+ * \param[in] denominator The denominators of the x and y scaling factors
+ *
+ * A non-uniform scaling is applied in place such the resulting x
+ * coordinates are multiplied by numerator.width / denominator.width,
+ * and similarly for the y coordinates (using height in place of width).
+ *
+ * \return A reference to this object
+ */
+Rectangle &Rectangle::scaleBy(const Size &numerator, const Size &denominator)
+{
+ x = static_cast<int64_t>(x) * numerator.width / denominator.width;
+ y = static_cast<int64_t>(y) * numerator.height / denominator.height;
+ width = static_cast<uint64_t>(width) * numerator.width / denominator.width;
+ height = static_cast<uint64_t>(height) * numerator.height / denominator.height;
+
+ return *this;
+}
+
+/**
+ * \brief Translate this Rectangle in place by the given Point
+ * \param[in] point The amount to translate the Rectangle by
+ *
+ * The Rectangle is translated in the x-direction by the point's x coordinate
+ * and in the y-direction by the point's y coordinate.
+ *
+ * \return A reference to this object
+ */
+Rectangle &Rectangle::translateBy(const Point &point)
+{
+ x += point.x;
+ y += point.y;
+
+ return *this;
+}
+
+/**
+ * \brief Calculate the intersection of this Rectangle with another
+ * \param[in] bound The Rectangle that is intersected with this Rectangle
+ *
+ * This method calculates the standard intersection of two rectangles. If the
+ * rectangles do not overlap in either the x or y direction, then the size
+ * of that dimension in the result (its width or height) is set to zero. Even
+ * when one dimension is set to zero, note that the other dimension may still
+ * have a positive value if there was some overlap.
+ *
+ * \return A Rectangle that is the intersection of the input rectangles
+ */
+Rectangle Rectangle::boundedTo(const Rectangle &bound) const
+{
+ int topLeftX = std::max(x, bound.x);
+ int topLeftY = std::max(y, bound.y);
+ int bottomRightX = std::min<int>(x + width, bound.x + bound.width);
+ int bottomRightY = std::min<int>(y + height, bound.y + bound.height);
+
+ unsigned int newWidth = std::max(bottomRightX - topLeftX, 0);
+ unsigned int newHeight = std::max(bottomRightY - topLeftY, 0);
+
+ return { topLeftX, topLeftY, newWidth, newHeight };
+}
+
+/**
+ * \brief Enclose a Rectangle so as not to exceed another Rectangle
+ * \param[in] boundary The limit that the returned Rectangle will not exceed
+ *
+ * The Rectangle is modified so that it does not exceed the given \a boundary.
+ * This process involves translating the Rectangle if any of its edges
+ * lie beyond \a boundary, so that those edges then lie along the boundary
+ * instead.
+ *
+ * If either width or height are larger than \a boundary, then the returned
+ * Rectangle is clipped to be no larger. But other than this, the
+ * Rectangle is not clipped or reduced in size, merely translated.
+ *
+ * Note that this is not a conventional Rectangle intersection function
+ * which is provided by boundedTo().
+ *
+ * \return A Rectangle that does not extend beyond a boundary Rectangle
+ */
+Rectangle Rectangle::enclosedIn(const Rectangle &boundary) const
+{
+ /* We can't be bigger than the boundary rectangle. */
+ Rectangle result = boundedTo(Rectangle{ x, y, boundary.size() });
+
+ result.x = std::clamp<int>(result.x, boundary.x,
+ boundary.x + boundary.width - result.width);
+ result.y = std::clamp<int>(result.y, boundary.y,
+ boundary.y + boundary.height - result.height);
+
+ return result;
+}
+
+/**
+ * \brief Apply a non-uniform rational scaling to this Rectangle
+ * \param[in] numerator The numerators of the x and y scaling factors
+ * \param[in] denominator The denominators of the x and y scaling factors
+ *
+ * A non-uniform scaling is applied such the resulting x
+ * coordinates are multiplied by numerator.width / denominator.width,
+ * and similarly for the y coordinates (using height in place of width).
+ *
+ * \return The non-uniformly scaled Rectangle
+ */
+Rectangle Rectangle::scaledBy(const Size &numerator, const Size &denominator) const
+{
+ int scaledX = static_cast<int64_t>(x) * numerator.width / denominator.width;
+ int scaledY = static_cast<int64_t>(y) * numerator.height / denominator.height;
+ unsigned int scaledWidth = static_cast<uint64_t>(width) * numerator.width / denominator.width;
+ unsigned int scaledHeight = static_cast<uint64_t>(height) * numerator.height / denominator.height;
+
+ return { scaledX, scaledY, scaledWidth, scaledHeight };
+}
+
+/**
+ * \brief Translate a Rectangle by the given amounts
+ * \param[in] point The amount to translate the Rectangle by
+ *
+ * The Rectangle is translated in the x-direction by the point's x coordinate
+ * and in the y-direction by the point's y coordinate.
+ *
+ * \return The translated Rectangle
+ */
+Rectangle Rectangle::translatedBy(const Point &point) const
+{
+ return { x + point.x, y + point.y, width, height };
+}
+
+/**
* \brief Compare rectangles for equality
* \return True if the two rectangles are equal, false otherwise
*/
diff --git a/src/libcamera/ipa/meson.build b/src/libcamera/ipa/meson.build
new file mode 100644
index 00000000..44695240
--- /dev/null
+++ b/src/libcamera/ipa/meson.build
@@ -0,0 +1,17 @@
+# SPDX-License-Identifier: CC0-1.0
+
+libcamera_ipa_interfaces = []
+
+foreach file : ipa_mojom_files
+ name = '@0@'.format(file).split('/')[-1].split('.')[0]
+
+ # {pipeline}_ipa_interface.cpp
+ libcamera_ipa_interfaces += \
+ custom_target(name + '_ipa_interface_cpp',
+ input : file,
+ output : name + '_ipa_interface.cpp',
+ command : [
+ mojom_docs_extractor,
+ '-o', '@OUTPUT@', '@INPUT@'
+ ])
+endforeach
diff --git a/src/libcamera/ipa_context_wrapper.cpp b/src/libcamera/ipa_context_wrapper.cpp
deleted file mode 100644
index 231300ce..00000000
--- a/src/libcamera/ipa_context_wrapper.cpp
+++ /dev/null
@@ -1,297 +0,0 @@
-/* SPDX-License-Identifier: LGPL-2.1-or-later */
-/*
- * Copyright (C) 2019, Google Inc.
- *
- * ipa_context_wrapper.cpp - Image Processing Algorithm context wrapper
- */
-
-#include "libcamera/internal/ipa_context_wrapper.h"
-
-#include <vector>
-
-#include <libcamera/controls.h>
-
-#include "libcamera/internal/byte_stream_buffer.h"
-#include "libcamera/internal/camera_sensor.h"
-#include "libcamera/internal/utils.h"
-
-/**
- * \file ipa_context_wrapper.h
- * \brief Image Processing Algorithm context wrapper
- */
-
-namespace libcamera {
-
-/**
- * \class IPAContextWrapper
- * \brief Wrap an ipa_context and expose it as an IPAInterface
- *
- * The IPAContextWrapper class wraps an ipa_context, provided by an IPA module, and
- * exposes an IPAInterface. This mechanism is used for IPAs that are not
- * isolated in a separate process to allow direct calls from pipeline handler
- * using the IPAInterface API instead of the lower-level ipa_context API.
- *
- * The IPAInterface methods are converted to the ipa_context API by translating
- * all C++ arguments into plain C structures or byte arrays that contain no
- * pointer, as required by the ipa_context API.
- */
-
-/**
- * \brief Construct an IPAContextWrapper instance that wraps the \a context
- * \param[in] context The IPA module context
- *
- * Ownership of the \a context is passed to the IPAContextWrapper. The context remains
- * valid for the whole lifetime of the wrapper and is destroyed automatically
- * with it.
- */
-IPAContextWrapper::IPAContextWrapper(struct ipa_context *context)
- : ctx_(context), intf_(nullptr)
-{
- if (!ctx_)
- return;
-
- bool forceCApi = !!utils::secure_getenv("LIBCAMERA_IPA_FORCE_C_API");
-
- if (!forceCApi && ctx_ && ctx_->ops->get_interface) {
- intf_ = reinterpret_cast<IPAInterface *>(ctx_->ops->get_interface(ctx_));
- intf_->queueFrameAction.connect(this, &IPAContextWrapper::doQueueFrameAction);
- return;
- }
-
- ctx_->ops->register_callbacks(ctx_, &IPAContextWrapper::callbacks_,
- this);
-}
-
-IPAContextWrapper::~IPAContextWrapper()
-{
- if (!ctx_)
- return;
-
- ctx_->ops->destroy(ctx_);
-}
-
-int IPAContextWrapper::init(const IPASettings &settings)
-{
- if (intf_)
- return intf_->init(settings);
-
- if (!ctx_)
- return 0;
-
- struct ipa_settings c_settings;
- c_settings.configuration_file = settings.configurationFile.c_str();
-
- ctx_->ops->init(ctx_, &c_settings);
-
- return 0;
-}
-
-int IPAContextWrapper::start()
-{
- if (intf_)
- return intf_->start();
-
- if (!ctx_)
- return 0;
-
- return ctx_->ops->start(ctx_);
-}
-
-void IPAContextWrapper::stop()
-{
- if (intf_)
- return intf_->stop();
-
- if (!ctx_)
- return;
-
- ctx_->ops->stop(ctx_);
-}
-
-void IPAContextWrapper::configure(const CameraSensorInfo &sensorInfo,
- const std::map<unsigned int, IPAStream> &streamConfig,
- const std::map<unsigned int, const ControlInfoMap &> &entityControls,
- const IPAOperationData &ipaConfig,
- IPAOperationData *result)
-{
- if (intf_)
- return intf_->configure(sensorInfo, streamConfig,
- entityControls, ipaConfig, result);
-
- if (!ctx_)
- return;
-
- serializer_.reset();
-
- /* Translate the camera sensor info. */
- struct ipa_sensor_info sensor_info = {};
- sensor_info.model = sensorInfo.model.c_str();
- sensor_info.bits_per_pixel = sensorInfo.bitsPerPixel;
- sensor_info.active_area.width = sensorInfo.activeAreaSize.width;
- sensor_info.active_area.height = sensorInfo.activeAreaSize.height;
- sensor_info.analog_crop.left = sensorInfo.analogCrop.x;
- sensor_info.analog_crop.top = sensorInfo.analogCrop.y;
- sensor_info.analog_crop.width = sensorInfo.analogCrop.width;
- sensor_info.analog_crop.height = sensorInfo.analogCrop.height;
- sensor_info.output_size.width = sensorInfo.outputSize.width;
- sensor_info.output_size.height = sensorInfo.outputSize.height;
- sensor_info.pixel_rate = sensorInfo.pixelRate;
- sensor_info.line_length = sensorInfo.lineLength;
-
- /* Translate the IPA stream configurations map. */
- struct ipa_stream c_streams[streamConfig.size()];
-
- unsigned int i = 0;
- for (const auto &stream : streamConfig) {
- struct ipa_stream *c_stream = &c_streams[i];
- unsigned int id = stream.first;
- const IPAStream &ipaStream = stream.second;
-
- c_stream->id = id;
- c_stream->pixel_format = ipaStream.pixelFormat;
- c_stream->width = ipaStream.size.width;
- c_stream->height = ipaStream.size.height;
-
- ++i;
- }
-
- /* Translate the IPA entity controls map. */
- struct ipa_control_info_map c_info_maps[entityControls.size()];
- std::vector<std::vector<uint8_t>> data(entityControls.size());
-
- i = 0;
- for (const auto &info : entityControls) {
- struct ipa_control_info_map &c_info_map = c_info_maps[i];
- unsigned int id = info.first;
- const ControlInfoMap &infoMap = info.second;
-
- size_t infoMapSize = serializer_.binarySize(infoMap);
- data[i].resize(infoMapSize);
- ByteStreamBuffer byteStream(data[i].data(), data[i].size());
- serializer_.serialize(infoMap, byteStream);
-
- c_info_map.id = id;
- c_info_map.data = byteStream.base();
- c_info_map.size = byteStream.size();
-
- ++i;
- }
-
- /* \todo Translate the ipaConfig and reponse */
- ctx_->ops->configure(ctx_, &sensor_info, c_streams, streamConfig.size(),
- c_info_maps, entityControls.size());
-}
-
-void IPAContextWrapper::mapBuffers(const std::vector<IPABuffer> &buffers)
-{
- if (intf_)
- return intf_->mapBuffers(buffers);
-
- if (!ctx_)
- return;
-
- struct ipa_buffer c_buffers[buffers.size()];
-
- for (unsigned int i = 0; i < buffers.size(); ++i) {
- struct ipa_buffer &c_buffer = c_buffers[i];
- const IPABuffer &buffer = buffers[i];
- const std::vector<FrameBuffer::Plane> &planes = buffer.planes;
-
- c_buffer.id = buffer.id;
- c_buffer.num_planes = planes.size();
-
- for (unsigned int j = 0; j < planes.size(); ++j) {
- const FrameBuffer::Plane &plane = planes[j];
- c_buffer.planes[j].dmabuf = plane.fd.fd();
- c_buffer.planes[j].length = plane.length;
- }
- }
-
- ctx_->ops->map_buffers(ctx_, c_buffers, buffers.size());
-}
-
-void IPAContextWrapper::unmapBuffers(const std::vector<unsigned int> &ids)
-{
- if (intf_)
- return intf_->unmapBuffers(ids);
-
- if (!ctx_)
- return;
-
- ctx_->ops->unmap_buffers(ctx_, ids.data(), ids.size());
-}
-
-void IPAContextWrapper::processEvent(const IPAOperationData &data)
-{
- if (intf_)
- return intf_->processEvent(data);
-
- if (!ctx_)
- return;
-
- struct ipa_operation_data c_data;
- c_data.operation = data.operation;
- c_data.data = data.data.data();
- c_data.num_data = data.data.size();
-
- struct ipa_control_list control_lists[data.controls.size()];
- c_data.lists = control_lists;
- c_data.num_lists = data.controls.size();
-
- std::size_t listsSize = 0;
- for (const auto &list : data.controls)
- listsSize += serializer_.binarySize(list);
-
- std::vector<uint8_t> binaryData(listsSize);
- ByteStreamBuffer byteStreamBuffer(binaryData.data(), listsSize);
-
- unsigned int i = 0;
- for (const auto &list : data.controls) {
- struct ipa_control_list &c_list = control_lists[i];
- c_list.size = serializer_.binarySize(list);
- ByteStreamBuffer b = byteStreamBuffer.carveOut(c_list.size);
-
- serializer_.serialize(list, b);
-
- c_list.data = b.base();
- }
-
- ctx_->ops->process_event(ctx_, &c_data);
-}
-
-void IPAContextWrapper::doQueueFrameAction(unsigned int frame,
- const IPAOperationData &data)
-{
- IPAInterface::queueFrameAction.emit(frame, data);
-}
-
-void IPAContextWrapper::queue_frame_action(void *ctx, unsigned int frame,
- struct ipa_operation_data &data)
-{
- IPAContextWrapper *_this = static_cast<IPAContextWrapper *>(ctx);
- IPAOperationData opData;
-
- opData.operation = data.operation;
- for (unsigned int i = 0; i < data.num_data; ++i)
- opData.data.push_back(data.data[i]);
-
- for (unsigned int i = 0; i < data.num_lists; ++i) {
- const struct ipa_control_list &c_list = data.lists[i];
- ByteStreamBuffer b(c_list.data, c_list.size);
- opData.controls.push_back(_this->serializer_.deserialize<ControlList>(b));
- }
-
- _this->doQueueFrameAction(frame, opData);
-}
-
-#ifndef __DOXYGEN__
-/*
- * This construct confuses Doxygen and makes it believe that all members of the
- * operations is a member of IPAContextWrapper. It must thus be hidden.
- */
-const struct ipa_callback_ops IPAContextWrapper::callbacks_ = {
- .queue_frame_action = &IPAContextWrapper::queue_frame_action,
-};
-#endif
-
-} /* namespace libcamera */
diff --git a/src/libcamera/ipa_data_serializer.cpp b/src/libcamera/ipa_data_serializer.cpp
new file mode 100644
index 00000000..fb941e6b
--- /dev/null
+++ b/src/libcamera/ipa_data_serializer.cpp
@@ -0,0 +1,615 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * ipa_data_serializer.cpp - Image Processing Algorithm data serializer
+ */
+
+#include "libcamera/internal/ipa_data_serializer.h"
+
+#include <libcamera/base/log.h>
+
+/**
+ * \file ipa_data_serializer.h
+ * \brief IPA Data Serializer
+ */
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(IPADataSerializer)
+
+/**
+ * \class IPADataSerializer
+ * \brief IPA Data Serializer
+ *
+ * Static template class that provides functions for serializing and
+ * deserializing IPA data.
+ *
+ * \todo Switch to Span instead of byte and fd vector
+ *
+ * \todo Harden the vector and map deserializer
+ *
+ * \todo For FileDescriptors, instead of storing a validity flag, store an
+ * index into the fd array. This will allow us to use views instead of copying.
+ */
+
+namespace {
+
+/**
+ * \fn template<typename T> void appendPOD(std::vector<uint8_t> &vec, T val)
+ * \brief Append POD to end of byte vector, in little-endian order
+ * \tparam T Type of POD to append
+ * \param[in] vec Byte vector to append to
+ * \param[in] val Value to append
+ *
+ * This function is meant to be used by the IPA data serializer, and the
+ * generated IPA proxies.
+ */
+
+/**
+ * \fn template<typename T> T readPOD(std::vector<uint8_t>::iterator it, size_t pos,
+ * std::vector<uint8_t>::iterator end)
+ * \brief Read POD from byte vector, in little-endian order
+ * \tparam T Type of POD to read
+ * \param[in] it Iterator of byte vector to read from
+ * \param[in] pos Index in byte vector to read from
+ * \param[in] end Iterator marking end of byte vector
+ *
+ * This function is meant to be used by the IPA data serializer, and the
+ * generated IPA proxies.
+ *
+ * If the \a pos plus the byte-width of the desired POD is past \a end, it is
+ * a fata error will occur, as it means there is insufficient data for
+ * deserialization, which should never happen.
+ *
+ * \return The POD read from \a it at index \a pos
+ */
+
+/**
+ * \fn template<typename T> T readPOD(std::vector<uint8_t> &vec, size_t pos)
+ * \brief Read POD from byte vector, in little-endian order
+ * \tparam T Type of POD to read
+ * \param[in] vec Byte vector to read from
+ * \param[in] pos Index in vec to start reading from
+ *
+ * This function is meant to be used by the IPA data serializer, and the
+ * generated IPA proxies.
+ *
+ * If the \a pos plus the byte-width of the desired POD is past the end of
+ * \a vec, a fatal error will occur, as it means there is insufficient data
+ * for deserialization, which should never happen.
+ *
+ * \return The POD read from \a vec at index \a pos
+ */
+
+} /* namespace */
+
+/**
+ * \fn template<typename T> IPADataSerializer<T>::serialize(
+ * T data,
+ * ControlSerializer *cs = nullptr)
+ * \brief Serialize an object into byte vector and fd vector
+ * \tparam T Type of object to serialize
+ * \param[in] data Object to serialize
+ * \param[in] cs ControlSerializer
+ *
+ * \a cs is only necessary if the object type \a T or its members contain
+ * ControlList or ControlInfoMap.
+ *
+ * \return Tuple of byte vector and fd vector, that is the serialized form
+ * of \a data
+ */
+
+/**
+ * \fn template<typename T> IPADataSerializer<T>::deserialize(
+ * const std::vector<uint8_t> &data,
+ * ControlSerializer *cs = nullptr)
+ * \brief Deserialize byte vector into an object
+ * \tparam T Type of object to deserialize to
+ * \param[in] data Byte vector to deserialize from
+ * \param[in] cs ControlSerializer
+ *
+ * This version of deserialize() can be used if the object type \a T and its
+ * members don't have any FileDescriptor.
+ *
+ * \a cs is only necessary if the object type \a T or its members contain
+ * ControlList or ControlInfoMap.
+ *
+ * \return The deserialized object
+ */
+
+/**
+ * \fn template<typename T> IPADataSerializer<T>::deserialize(
+ * std::vector<uint8_t>::const_iterator dataBegin,
+ * std::vector<uint8_t>::const_iterator dataEnd,
+ * ControlSerializer *cs = nullptr)
+ * \brief Deserialize byte vector into an object
+ * \tparam T Type of object to deserialize to
+ * \param[in] dataBegin Begin iterator of byte vector to deserialize from
+ * \param[in] dataEnd End iterator of byte vector to deserialize from
+ * \param[in] cs ControlSerializer
+ *
+ * This version of deserialize() can be used if the object type \a T and its
+ * members don't have any FileDescriptor.
+ *
+ * \a cs is only necessary if the object type \a T or its members contain
+ * ControlList or ControlInfoMap.
+ *
+ * \return The deserialized object
+ */
+
+/**
+ * \fn template<typename T> IPADataSerializer<T>::deserialize(
+ * const std::vector<uint8_t> &data,
+ * const std::vector<int32_t> &fds,
+ * ControlSerializer *cs = nullptr)
+ * \brief Deserialize byte vector and fd vector into an object
+ * \tparam T Type of object to deserialize to
+ * \param[in] data Byte vector to deserialize from
+ * \param[in] fds Fd vector to deserialize from
+ * \param[in] cs ControlSerializer
+ *
+ * This version of deserialize() (or the iterator version) must be used if
+ * the object type \a T or its members contain FileDescriptor.
+ *
+ * \a cs is only necessary if the object type \a T or its members contain
+ * ControlList or ControlInfoMap.
+ *
+ * \return The deserialized object
+ */
+
+/**
+ * \fn template<typename T> IPADataSerializer::deserialize(
+ * std::vector<uint8_t>::const_iterator dataBegin,
+ * std::vector<uint8_t>::const_iterator dataEnd,
+ * std::vector<int32_t>::const_iterator fdsBegin,
+ * std::vector<int32_t>::const_iterator fdsEnd,
+ * ControlSerializer *cs = nullptr)
+ * \brief Deserialize byte vector and fd vector into an object
+ * \tparam T Type of object to deserialize to
+ * \param[in] dataBegin Begin iterator of byte vector to deserialize from
+ * \param[in] dataEnd End iterator of byte vector to deserialize from
+ * \param[in] fdsBegin Begin iterator of fd vector to deserialize from
+ * \param[in] fdsEnd End iterator of fd vector to deserialize from
+ * \param[in] cs ControlSerializer
+ *
+ * This version of deserialize() (or the vector version) must be used if
+ * the object type \a T or its members contain FileDescriptor.
+ *
+ * \a cs is only necessary if the object type \a T or its members contain
+ * ControlList or ControlInfoMap.
+ *
+ * \return The deserialized object
+ */
+
+#ifndef __DOXYGEN__
+
+#define DEFINE_POD_SERIALIZER(type) \
+ \
+template<> \
+std::tuple<std::vector<uint8_t>, std::vector<int32_t>> \
+IPADataSerializer<type>::serialize(const type &data, \
+ [[maybe_unused]] ControlSerializer *cs) \
+{ \
+ std::vector<uint8_t> dataVec; \
+ dataVec.reserve(sizeof(type)); \
+ appendPOD<type>(dataVec, data); \
+ \
+ return { dataVec, {} }; \
+} \
+ \
+template<> \
+type IPADataSerializer<type>::deserialize(std::vector<uint8_t>::const_iterator dataBegin, \
+ std::vector<uint8_t>::const_iterator dataEnd, \
+ [[maybe_unused]] ControlSerializer *cs) \
+{ \
+ return readPOD<type>(dataBegin, 0, dataEnd); \
+} \
+ \
+template<> \
+type IPADataSerializer<type>::deserialize(const std::vector<uint8_t> &data, \
+ ControlSerializer *cs) \
+{ \
+ return deserialize(data.cbegin(), data.end(), cs); \
+} \
+ \
+template<> \
+type IPADataSerializer<type>::deserialize(const std::vector<uint8_t> &data, \
+ [[maybe_unused]] const std::vector<int32_t> &fds, \
+ ControlSerializer *cs) \
+{ \
+ return deserialize(data.cbegin(), data.end(), cs); \
+} \
+ \
+template<> \
+type IPADataSerializer<type>::deserialize(std::vector<uint8_t>::const_iterator dataBegin, \
+ std::vector<uint8_t>::const_iterator dataEnd, \
+ [[maybe_unused]] std::vector<int32_t>::const_iterator fdsBegin, \
+ [[maybe_unused]] std::vector<int32_t>::const_iterator fdsEnd, \
+ ControlSerializer *cs) \
+{ \
+ return deserialize(dataBegin, dataEnd, cs); \
+}
+
+DEFINE_POD_SERIALIZER(bool)
+DEFINE_POD_SERIALIZER(uint8_t)
+DEFINE_POD_SERIALIZER(uint16_t)
+DEFINE_POD_SERIALIZER(uint32_t)
+DEFINE_POD_SERIALIZER(uint64_t)
+DEFINE_POD_SERIALIZER(int8_t)
+DEFINE_POD_SERIALIZER(int16_t)
+DEFINE_POD_SERIALIZER(int32_t)
+DEFINE_POD_SERIALIZER(int64_t)
+DEFINE_POD_SERIALIZER(float)
+DEFINE_POD_SERIALIZER(double)
+
+/*
+ * Strings are serialized simply by converting by {string.cbegin(), string.end()}.
+ * The size of the string is recorded by the container (struct, vector, map, or
+ * function parameter serdes).
+ */
+template<>
+std::tuple<std::vector<uint8_t>, std::vector<int32_t>>
+IPADataSerializer<std::string>::serialize(const std::string &data,
+ [[maybe_unused]] ControlSerializer *cs)
+{
+ return { { data.cbegin(), data.end() }, {} };
+}
+
+template<>
+std::string
+IPADataSerializer<std::string>::deserialize(const std::vector<uint8_t> &data,
+ [[maybe_unused]] ControlSerializer *cs)
+{
+ return { data.cbegin(), data.cend() };
+}
+
+template<>
+std::string
+IPADataSerializer<std::string>::deserialize(std::vector<uint8_t>::const_iterator dataBegin,
+ std::vector<uint8_t>::const_iterator dataEnd,
+ [[maybe_unused]] ControlSerializer *cs)
+{
+ return { dataBegin, dataEnd };
+}
+
+template<>
+std::string
+IPADataSerializer<std::string>::deserialize(const std::vector<uint8_t> &data,
+ [[maybe_unused]] const std::vector<int32_t> &fds,
+ [[maybe_unused]] ControlSerializer *cs)
+{
+ return { data.cbegin(), data.cend() };
+}
+
+template<>
+std::string
+IPADataSerializer<std::string>::deserialize(std::vector<uint8_t>::const_iterator dataBegin,
+ std::vector<uint8_t>::const_iterator dataEnd,
+ [[maybe_unused]] std::vector<int32_t>::const_iterator fdsBegin,
+ [[maybe_unused]] std::vector<int32_t>::const_iterator fdsEnd,
+ [[maybe_unused]] ControlSerializer *cs)
+{
+ return { dataBegin, dataEnd };
+}
+
+/*
+ * ControlList is serialized as:
+ *
+ * 4 bytes - uint32_t Size of serialized ControlInfoMap, in bytes
+ * 4 bytes - uint32_t Size of serialized ControlList, in bytes
+ * X bytes - Serialized ControlInfoMap (using ControlSerializer)
+ * X bytes - Serialized ControlList (using ControlSerializer)
+ *
+ * If data.infoMap() is nullptr, then the default controls::controls will
+ * be used. The serialized ControlInfoMap will have zero length.
+ */
+template<>
+std::tuple<std::vector<uint8_t>, std::vector<int32_t>>
+IPADataSerializer<ControlList>::serialize(const ControlList &data, ControlSerializer *cs)
+{
+ if (!cs)
+ LOG(IPADataSerializer, Fatal)
+ << "ControlSerializer not provided for serialization of ControlList";
+
+ size_t size;
+ std::vector<uint8_t> infoData;
+ int ret;
+
+ /*
+ * \todo Revisit this opportunistic serialization of the
+ * ControlInfoMap, as it could be fragile
+ */
+ if (data.infoMap() && !cs->isCached(*data.infoMap())) {
+ size = cs->binarySize(*data.infoMap());
+ infoData.resize(size);
+ ByteStreamBuffer buffer(infoData.data(), infoData.size());
+ ret = cs->serialize(*data.infoMap(), buffer);
+
+ if (ret < 0 || buffer.overflow()) {
+ LOG(IPADataSerializer, Error) << "Failed to serialize ControlList's ControlInfoMap";
+ return { {}, {} };
+ }
+ }
+
+ size = cs->binarySize(data);
+ std::vector<uint8_t> listData(size);
+ ByteStreamBuffer buffer(listData.data(), listData.size());
+ ret = cs->serialize(data, buffer);
+
+ if (ret < 0 || buffer.overflow()) {
+ LOG(IPADataSerializer, Error) << "Failed to serialize ControlList";
+ return { {}, {} };
+ }
+
+ std::vector<uint8_t> dataVec;
+ dataVec.reserve(8 + infoData.size() + listData.size());
+ appendPOD<uint32_t>(dataVec, infoData.size());
+ appendPOD<uint32_t>(dataVec, listData.size());
+ dataVec.insert(dataVec.end(), infoData.begin(), infoData.end());
+ dataVec.insert(dataVec.end(), listData.begin(), listData.end());
+
+ return { dataVec, {} };
+}
+
+template<>
+ControlList
+IPADataSerializer<ControlList>::deserialize(std::vector<uint8_t>::const_iterator dataBegin,
+ std::vector<uint8_t>::const_iterator dataEnd,
+ ControlSerializer *cs)
+{
+ if (!cs)
+ LOG(IPADataSerializer, Fatal)
+ << "ControlSerializer not provided for deserialization of ControlList";
+
+ if (std::distance(dataBegin, dataEnd) < 8)
+ return {};
+
+ uint32_t infoDataSize = readPOD<uint32_t>(dataBegin, 0, dataEnd);
+ uint32_t listDataSize = readPOD<uint32_t>(dataBegin, 4, dataEnd);
+
+ std::vector<uint8_t>::const_iterator it = dataBegin + 8;
+
+ if (infoDataSize + listDataSize < infoDataSize ||
+ static_cast<uint32_t>(std::distance(it, dataEnd)) < infoDataSize + listDataSize)
+ return {};
+
+ if (infoDataSize > 0) {
+ ByteStreamBuffer buffer(&*it, infoDataSize);
+ ControlInfoMap map = cs->deserialize<ControlInfoMap>(buffer);
+ /* It's fine if map is empty. */
+ if (buffer.overflow()) {
+ LOG(IPADataSerializer, Error)
+ << "Failed to deserialize ControlLists's ControlInfoMap: buffer overflow";
+ return ControlList();
+ }
+ }
+
+ it += infoDataSize;
+ ByteStreamBuffer buffer(&*it, listDataSize);
+ ControlList list = cs->deserialize<ControlList>(buffer);
+ if (buffer.overflow())
+ LOG(IPADataSerializer, Error) << "Failed to deserialize ControlList: buffer overflow";
+
+ return list;
+}
+
+template<>
+ControlList
+IPADataSerializer<ControlList>::deserialize(const std::vector<uint8_t> &data,
+ ControlSerializer *cs)
+{
+ return deserialize(data.cbegin(), data.end(), cs);
+}
+
+template<>
+ControlList
+IPADataSerializer<ControlList>::deserialize(const std::vector<uint8_t> &data,
+ [[maybe_unused]] const std::vector<int32_t> &fds,
+ ControlSerializer *cs)
+{
+ return deserialize(data.cbegin(), data.end(), cs);
+}
+
+template<>
+ControlList
+IPADataSerializer<ControlList>::deserialize(std::vector<uint8_t>::const_iterator dataBegin,
+ std::vector<uint8_t>::const_iterator dataEnd,
+ [[maybe_unused]] std::vector<int32_t>::const_iterator fdsBegin,
+ [[maybe_unused]] std::vector<int32_t>::const_iterator fdsEnd,
+ ControlSerializer *cs)
+{
+ return deserialize(dataBegin, dataEnd, cs);
+}
+
+/*
+ * const ControlInfoMap is serialized as:
+ *
+ * 4 bytes - uint32_t Size of serialized ControlInfoMap, in bytes
+ * X bytes - Serialized ControlInfoMap (using ControlSerializer)
+ */
+template<>
+std::tuple<std::vector<uint8_t>, std::vector<int32_t>>
+IPADataSerializer<ControlInfoMap>::serialize(const ControlInfoMap &map,
+ ControlSerializer *cs)
+{
+ if (!cs)
+ LOG(IPADataSerializer, Fatal)
+ << "ControlSerializer not provided for serialization of ControlInfoMap";
+
+ size_t size = cs->binarySize(map);
+ std::vector<uint8_t> infoData(size);
+ ByteStreamBuffer buffer(infoData.data(), infoData.size());
+ int ret = cs->serialize(map, buffer);
+
+ if (ret < 0 || buffer.overflow()) {
+ LOG(IPADataSerializer, Error) << "Failed to serialize ControlInfoMap";
+ return { {}, {} };
+ }
+
+ std::vector<uint8_t> dataVec;
+ appendPOD<uint32_t>(dataVec, infoData.size());
+ dataVec.insert(dataVec.end(), infoData.begin(), infoData.end());
+
+ return { dataVec, {} };
+}
+
+template<>
+ControlInfoMap
+IPADataSerializer<ControlInfoMap>::deserialize(std::vector<uint8_t>::const_iterator dataBegin,
+ std::vector<uint8_t>::const_iterator dataEnd,
+ ControlSerializer *cs)
+{
+ if (!cs)
+ LOG(IPADataSerializer, Fatal)
+ << "ControlSerializer not provided for deserialization of ControlInfoMap";
+
+ if (std::distance(dataBegin, dataEnd) < 4)
+ return {};
+
+ uint32_t infoDataSize = readPOD<uint32_t>(dataBegin, 0, dataEnd);
+
+ std::vector<uint8_t>::const_iterator it = dataBegin + 4;
+
+ if (static_cast<uint32_t>(std::distance(it, dataEnd)) < infoDataSize)
+ return {};
+
+ ByteStreamBuffer buffer(&*it, infoDataSize);
+ ControlInfoMap map = cs->deserialize<ControlInfoMap>(buffer);
+
+ return map;
+}
+
+template<>
+ControlInfoMap
+IPADataSerializer<ControlInfoMap>::deserialize(const std::vector<uint8_t> &data,
+ ControlSerializer *cs)
+{
+ return deserialize(data.cbegin(), data.end(), cs);
+}
+
+template<>
+ControlInfoMap
+IPADataSerializer<ControlInfoMap>::deserialize(const std::vector<uint8_t> &data,
+ [[maybe_unused]] const std::vector<int32_t> &fds,
+ ControlSerializer *cs)
+{
+ return deserialize(data.cbegin(), data.end(), cs);
+}
+
+template<>
+ControlInfoMap
+IPADataSerializer<ControlInfoMap>::deserialize(std::vector<uint8_t>::const_iterator dataBegin,
+ std::vector<uint8_t>::const_iterator dataEnd,
+ [[maybe_unused]] std::vector<int32_t>::const_iterator fdsBegin,
+ [[maybe_unused]] std::vector<int32_t>::const_iterator fdsEnd,
+ ControlSerializer *cs)
+{
+ return deserialize(dataBegin, dataEnd, cs);
+}
+
+/*
+ * FileDescriptors are serialized into a single byte that tells if the
+ * FileDescriptor is valid or not. If it is valid, then for serialization
+ * the fd will be written to the fd vector, or for deserialization the
+ * fd vector const_iterator will be valid.
+ *
+ * This validity is necessary so that we don't send -1 fd over sendmsg(). It
+ * also allows us to simply send the entire fd vector into the deserializer
+ * and it will be recursively consumed as necessary.
+ *
+ * \todo Consider serializing the FileDescriptor in 4 bytes to ensure
+ * 32-bit alignment of all serialized data
+ */
+template<>
+std::tuple<std::vector<uint8_t>, std::vector<int32_t>>
+IPADataSerializer<FileDescriptor>::serialize(const FileDescriptor &data,
+ [[maybe_unused]] ControlSerializer *cs)
+{
+ std::vector<uint8_t> dataVec = { data.isValid() };
+ std::vector<int32_t> fdVec;
+ if (data.isValid())
+ fdVec.push_back(data.fd());
+
+ return { dataVec, fdVec };
+}
+
+template<>
+FileDescriptor IPADataSerializer<FileDescriptor>::deserialize(std::vector<uint8_t>::const_iterator dataBegin,
+ std::vector<uint8_t>::const_iterator dataEnd,
+ std::vector<int32_t>::const_iterator fdsBegin,
+ std::vector<int32_t>::const_iterator fdsEnd,
+ [[maybe_unused]] ControlSerializer *cs)
+{
+ ASSERT(std::distance(dataBegin, dataEnd) >= 1);
+
+ bool valid = !!(*dataBegin);
+
+ ASSERT(!(valid && std::distance(fdsBegin, fdsEnd) < 1));
+
+ return valid ? FileDescriptor(*fdsBegin) : FileDescriptor();
+}
+
+template<>
+FileDescriptor IPADataSerializer<FileDescriptor>::deserialize(const std::vector<uint8_t> &data,
+ const std::vector<int32_t> &fds,
+ [[maybe_unused]] ControlSerializer *cs)
+{
+ return deserialize(data.cbegin(), data.end(), fds.cbegin(), fds.end());
+}
+
+/*
+ * FrameBuffer::Plane is serialized as:
+ *
+ * 1 byte - FileDescriptor
+ * 4 bytes - uint32_t Length
+ */
+template<>
+std::tuple<std::vector<uint8_t>, std::vector<int32_t>>
+IPADataSerializer<FrameBuffer::Plane>::serialize(const FrameBuffer::Plane &data,
+ [[maybe_unused]] ControlSerializer *cs)
+{
+ std::vector<uint8_t> dataVec;
+ std::vector<int32_t> fdsVec;
+
+ std::vector<uint8_t> fdBuf;
+ std::vector<int32_t> fdFds;
+ std::tie(fdBuf, fdFds) =
+ IPADataSerializer<FileDescriptor>::serialize(data.fd);
+ dataVec.insert(dataVec.end(), fdBuf.begin(), fdBuf.end());
+ fdsVec.insert(fdsVec.end(), fdFds.begin(), fdFds.end());
+
+ appendPOD<uint32_t>(dataVec, data.length);
+
+ return { dataVec, fdsVec };
+}
+
+template<>
+FrameBuffer::Plane
+IPADataSerializer<FrameBuffer::Plane>::deserialize(std::vector<uint8_t>::const_iterator dataBegin,
+ std::vector<uint8_t>::const_iterator dataEnd,
+ std::vector<int32_t>::const_iterator fdsBegin,
+ [[maybe_unused]] std::vector<int32_t>::const_iterator fdsEnd,
+ [[maybe_unused]] ControlSerializer *cs)
+{
+ FrameBuffer::Plane ret;
+
+ ret.fd = IPADataSerializer<FileDescriptor>::deserialize(dataBegin, dataBegin + 1,
+ fdsBegin, fdsBegin + 1);
+ ret.length = readPOD<uint32_t>(dataBegin, 1, dataEnd);
+
+ return ret;
+}
+
+template<>
+FrameBuffer::Plane
+IPADataSerializer<FrameBuffer::Plane>::deserialize(const std::vector<uint8_t> &data,
+ const std::vector<int32_t> &fds,
+ ControlSerializer *cs)
+{
+ return deserialize(data.cbegin(), data.end(), fds.cbegin(), fds.end(), cs);
+}
+
+#endif /* __DOXYGEN__ */
+
+} /* namespace libcamera */
diff --git a/src/libcamera/ipa_interface.cpp b/src/libcamera/ipa_interface.cpp
index 23fc56d7..c44581b2 100644
--- a/src/libcamera/ipa_interface.cpp
+++ b/src/libcamera/ipa_interface.cpp
@@ -15,371 +15,51 @@
* an Image Processing Algorithm (IPA) module. An IPA module is developed for a
* specific pipeline handler and each pipeline handler may be compatible with
* multiple IPA implementations, both open and closed source. To support this,
- * libcamera communicates with IPA modules through a standard plain C interface.
+ * libcamera communicates with IPA modules through a per-pipeline C++ interface.
*
- * IPA modules shall expose a public function named ipaCreate() with the
- * following prototype.
+ * IPA modules shall provide an ipaCreate() function exported as a public C
+ * symbol with the following prototype:
*
* \code{.c}
- * struct ipa_context *ipaCreate();
+ * IPAInterface *ipaCreate();
* \endcode
*
- * The ipaCreate() function creates an instance of an IPA context, which models
+ * The ipaCreate() function creates an instance of an IPA interface, which models
* a context of execution for the IPA. IPA modules shall support creating one
* context per camera, as required by their associated pipeline handler.
*
- * The IPA module context operations are defined in the struct ipa_context_ops.
- * They model a low-level interface to configure the IPA, notify it of events,
- * and receive IPA actions through callbacks. An IPA module stores a pointer to
- * the operations corresponding to its context in the ipa_context::ops field.
- * That pointer is immutable for the lifetime of the context, and may differ
- * between different contexts created by the same IPA module.
+ * The IPA module interface operations are defined in the mojom file
+ * corresponding to the pipeline handler, in
+ * include/libcamera/ipa/{pipeline_name}.mojom.
*
- * The IPA interface defines base data types and functions to exchange data. On
- * top of this, each pipeline handler is responsible for defining the set of
- * events and actions used to communicate with their IPA. These are collectively
- * referred to as IPA operations and define the pipeline handler-specific IPA
- * protocol. Each operation defines the data that it carries, and how that data
- * is encoded in the ipa_context_ops functions arguments.
+ * The IPA interface is specific to each pipeline handler. The pipeline handlers
+ * define a set of operations used to communicate with their IPA modules. The
+ * operations, along with the data structures they use, are collectively
+ * referred to as the IPA protocol.
+ *
+ * The IPA protocol is defined using the
+ * <a href="https://chromium.googlesource.com/chromium/src/+/master/mojo/public/tools/bindings/README.md">Mojo interface definition language</a>,
+ * in a Mojo module file stored in include/libcamera/ipa/{pipeline_name}.mojom.
+ * The Mojo module contains two Mojo interfaces: IPAInterface defines the
+ * operations exposed by the IPA and called by the pipeline handler, and
+ * IPAEventInterface defines the events generated by the IPA and received by the
+ * pipeline handler.
*
* \todo Add reference to how pipelines shall document their protocol.
*
* IPAs can be isolated in a separate process. This implies that arguments to
- * the IPA interface functions may need to be transferred over IPC. All
- * arguments use Plain Old Data types and are documented either in the form of C
- * data types, or as a textual description of byte arrays for types that can't
- * be expressed using C data types (such as arrays of mixed data types). IPA
- * modules can thus use the C API without calling into libcamera to access the
- * data passed to the IPA context operations.
+ * the IPA interface functions may need to be transferred over IPC. An IPA
+ * proxy is auto-generated based on the mojom file, which abstracts away the
+ * (de)serialization from the pipeline handler and the IPA implementation. Thus
+ * any C++ structure that is defined in the mojom file, or the C++ libcamera
+ * objects that are listed in core.mojom, can be used directly.
*
* Due to IPC, synchronous communication between pipeline handlers and IPAs can
- * be costly. For that reason, the interface operates asynchronously. This
- * implies that methods don't return a status, and that all methods may copy
- * their arguments.
- *
- * The IPAInterface class is a C++ representation of the ipa_context_ops, using
- * C++ data classes provided by libcamera. This is the API exposed to pipeline
- * handlers to communicate with IPA modules. IPA modules may use the
- * IPAInterface API internally if they want to benefit from the data and helper
- * classes offered by libcamera.
- *
- * When an IPA module is loaded directly into the libcamera process and uses
- * the IPAInterface API internally, short-circuiting the path to the
- * ipa_context_ops and back to IPAInterface is desirable. To support this, IPA
- * modules may implement the ipa_context_ops::get_interface function to return a
- * pointer to their internal IPAInterface.
- */
-
-/**
- * \struct ipa_context
- * \brief IPA module context of execution
- *
- * This structure models a context of execution for an IPA module. It is
- * instantiated by the IPA module ipaCreate() function. IPA modules allocate
- * context instances in an implementation-defined way, contexts shall thus be
- * destroyed using the ipa_operation::destroy function only.
- *
- * The ipa_context structure provides a pointer to the IPA context operations.
- * It shall otherwise be treated as a constant black-box cookie and passed
- * unmodified to the functions defined in struct ipa_context_ops.
- *
- * IPA modules are expected to extend struct ipa_context by inheriting from it,
- * either through structure embedding to model inheritance in plain C, or
- * through C++ class inheritance. A simple example of the latter is available
- * in the IPAContextWrapper class implementation.
- *
- * \var ipa_context::ops
- * \brief The IPA context operations
- */
-
-/**
- * \struct ipa_settings
- * \brief IPA initialization settings for the IPA context operations
- * \sa IPASettings
- *
- * \var ipa_settings::configuration_file
- * \brief The name of the IPA configuration file (may be null or point to an
- * empty string)
- */
-
-/**
- * \struct ipa_sensor_info
- * \brief Camera sensor information for the IPA context operations
- * \sa libcamera::CameraSensorInfo
- *
- * \var ipa_sensor_info::model
- * \brief The camera sensor model name
- * \todo Remove this field as soon as no IPA depends on it anymore
- *
- * \var ipa_sensor_info::bits_per_pixel
- * \brief The camera sensor image format bit depth
- * \sa libcamera::CameraSensorInfo::bitsPerPixel
- *
- * \var ipa_sensor_info::active_area.width
- * \brief The camera sensor pixel array active area width
- * \sa libcamera::CameraSensorInfo::activeAreaSize
- *
- * \var ipa_sensor_info::active_area.height
- * \brief The camera sensor pixel array active area height
- * \sa libcamera::CameraSensorInfo::activeAreaSize
- *
- * \var ipa_sensor_info::active_area
- * \brief The camera sensor pixel array active size
- * \sa libcamera::CameraSensorInfo::activeAreaSize
- *
- * \var ipa_sensor_info::analog_crop.left
- * \brief The left coordinate of the analog crop rectangle, relative to the
- * pixel array active area
- * \sa libcamera::CameraSensorInfo::analogCrop
- *
- * \var ipa_sensor_info::analog_crop.top
- * \brief The top coordinate of the analog crop rectangle, relative to the pixel
- * array active area
- * \sa libcamera::CameraSensorInfo::analogCrop
- *
- * \var ipa_sensor_info::analog_crop.width
- * \brief The horizontal size of the analog crop rectangle
- * \sa libcamera::CameraSensorInfo::analogCrop
- *
- * \var ipa_sensor_info::analog_crop.height
- * \brief The vertical size of the analog crop rectangle
- * \sa libcamera::CameraSensorInfo::analogCrop
- *
- * \var ipa_sensor_info::analog_crop
- * \brief The analog crop rectangle
- * \sa libcamera::CameraSensorInfo::analogCrop
- *
- * \var ipa_sensor_info::output_size.width
- * \brief The horizontal size of the output image
- * \sa libcamera::CameraSensorInfo::outputSize
- *
- * \var ipa_sensor_info::output_size.height
- * \brief The vertical size of the output image
- * \sa libcamera::CameraSensorInfo::outputSize
- *
- * \var ipa_sensor_info::output_size
- * \brief The size of the output image
- * \sa libcamera::CameraSensorInfo::outputSize
- *
- * \var ipa_sensor_info::pixel_rate
- * \brief The number of pixel produced in a second
- * \sa libcamera::CameraSensorInfo::pixelRate
- *
- * \var ipa_sensor_info::line_length
- * \brief The full line length, including blanking, in pixel units
- * \sa libcamera::CameraSensorInfo::lineLength
- */
-
-/**
- * \struct ipa_stream
- * \brief Stream information for the IPA context operations
- *
- * \var ipa_stream::id
- * \brief Identifier for the stream, defined by the IPA protocol
- *
- * \var ipa_stream::pixel_format
- * \brief The stream pixel format, as defined by the PixelFormat class
- *
- * \var ipa_stream::width
- * \brief The stream width in pixels
- *
- * \var ipa_stream::height
- * \brief The stream height in pixels
- */
-
-/**
- * \struct ipa_control_info_map
- * \brief ControlInfoMap description for the IPA context operations
- *
- * \var ipa_control_info_map::id
- * \brief Identifier for the ControlInfoMap, defined by the IPA protocol
- *
- * \var ipa_control_info_map::data
- * \brief Pointer to a control packet for the ControlInfoMap
- * \sa ipa_controls.h
- *
- * \var ipa_control_info_map::size
- * \brief The size of the control packet in bytes
- */
-
-/**
- * \struct ipa_buffer_plane
- * \brief A plane for an ipa_buffer
- *
- * \var ipa_buffer_plane::dmabuf
- * \brief The dmabuf file descriptor for the plane (-1 for unused planes)
- *
- * \var ipa_buffer_plane::length
- * \brief The plane length in bytes (0 for unused planes)
- */
-
-/**
- * \struct ipa_buffer
- * \brief Buffer information for the IPA context operations
- *
- * \var ipa_buffer::id
- * \brief The buffer unique ID (see \ref libcamera::IPABuffer::id)
- *
- * \var ipa_buffer::num_planes
- * \brief The number of used planes in the ipa_buffer::planes array
- *
- * \var ipa_buffer::planes
- * \brief The buffer planes (up to 3)
- */
-
-/**
- * \struct ipa_control_list
- * \brief ControlList description for the IPA context operations
- *
- * \var ipa_control_list::data
- * \brief Pointer to a control packet for the ControlList
- * \sa ipa_controls.h
- *
- * \var ipa_control_list::size
- * \brief The size of the control packet in bytes
- */
-
-/**
- * \struct ipa_operation_data
- * \brief IPA operation data for the IPA context operations
- * \sa libcamera::IPAOperationData
- *
- * \var ipa_operation_data::operation
- * \brief IPA protocol operation
- *
- * \var ipa_operation_data::data
- * \brief Pointer to the operation data array
- *
- * \var ipa_operation_data::num_data
- * \brief Number of entries in the ipa_operation_data::data array
- *
- * \var ipa_operation_data::lists
- * \brief Pointer to an array of ipa_control_list
- *
- * \var ipa_operation_data::num_lists
- * \brief Number of entries in the ipa_control_list array
- */
-
-/**
- * \struct ipa_callback_ops
- * \brief IPA context operations as a set of function pointers
- */
-
-/**
- * \var ipa_callback_ops::queue_frame_action
- * \brief Queue an action associated with a frame to the pipeline handler
- * \param[in] cb_ctx The callback context registered with
- * ipa_context_ops::register_callbacks
- * \param[in] frame The frame number
- *
- * \sa libcamera::IPAInterface::queueFrameAction
- */
-
-/**
- * \struct ipa_context_ops
- * \brief IPA context operations as a set of function pointers
- *
- * To allow for isolation of IPA modules in separate processes, the functions
- * defined in the ipa_context_ops structure return only data related to the
- * libcamera side of the operations. In particular, error related to the
- * libcamera side of the IPC may be returned. Data returned by the IPA,
- * including status information, shall be provided through callbacks from the
- * IPA to libcamera.
- */
-
-/**
- * \var ipa_context_ops::destroy
- * \brief Destroy the IPA context created by the module's ipaCreate() function
- * \param[in] ctx The IPA context
- */
-
-/**
- * \var ipa_context_ops::get_interface
- * \brief Retrieve the IPAInterface implemented by the ipa_context (optional)
- * \param[in] ctx The IPA context
- *
- * IPA modules may implement this function to expose their internal
- * IPAInterface, if any. When implemented, libcamera may at its sole discretion
- * call it and then bypass the ipa_context_ops API by calling the IPAInterface
- * methods directly. IPA modules shall still implement and support the full
- * ipa_context_ops API.
- */
-
-/**
- * \var ipa_context_ops::init
- * \brief Initialise the IPA context
- * \param[in] ctx The IPA context
- * \param[in] settings The IPA initialization settings
- *
- * \sa libcamera::IPAInterface::init()
- */
-
-/**
- * \var ipa_context_ops::start
- * \brief Start the IPA context
- *
- * \sa libcamera::IPAInterface::start()
- */
-
-/**
- * \var ipa_context_ops::stop
- * \brief Stop the IPA context
- *
- * \sa libcamera::IPAInterface::stop()
- */
-
-/**
- * \var ipa_context_ops::register_callbacks
- * \brief Register callback operation from the IPA to the pipeline handler
- * \param[in] ctx The IPA context
- * \param[in] callback The IPA callback operations
- * \param[in] cb_ctx The callback context, passed to all callback operations
- */
-
-/**
- * \var ipa_context_ops::configure
- * \brief Configure the IPA stream and sensor settings
- * \param[in] ctx The IPA context
- * \param[in] sensor_info Camera sensor information
- * \param[in] streams Configuration of all active streams
- * \param[in] num_streams The number of entries in the \a streams array
- * \param[in] maps Controls provided by the pipeline entities
- * \param[in] num_maps The number of entries in the \a maps array
- *
- * \sa libcamera::IPAInterface::configure()
- */
-
-/**
- * \var ipa_context_ops::map_buffers
- * \brief Map buffers shared between the pipeline handler and the IPA
- * \param[in] ctx The IPA context
- * \param[in] buffers The buffers to map
- * \param[in] num_buffers The number of entries in the \a buffers array
- *
- * The dmabuf file descriptors provided in \a buffers are borrowed from the
- * caller and are only guaranteed to be valid during the map_buffers() call.
- * Should the callee need to store a copy of the file descriptors, it shall
- * duplicate them first with ::%dup().
- *
- * \sa libcamera::IPAInterface::mapBuffers()
- */
-
-/**
- * \var ipa_context_ops::unmap_buffers
- * \brief Unmap buffers shared by the pipeline to the IPA
- * \param[in] ctx The IPA context
- * \param[in] ids The IDs of the buffers to unmap
- * \param[in] num_buffers The number of entries in the \a ids array
- *
- * \sa libcamera::IPAInterface::unmapBuffers()
- */
-
-/**
- * \var ipa_context_ops::process_event
- * \brief Process an event from the pipeline handler
- * \param[in] ctx The IPA context
- *
- * \sa libcamera::IPAInterface::processEvent()
+ * be costly. For that reason, functions that cannot afford the high cost
+ * should be marked as [async] in the mojom file, and they will operate
+ * asynchronously. This implies that these methods don't return a status, and
+ * that all methods may copy their arguments. Synchronous functions are still
+ * allowed, but should be used with caution.
*/
/**
@@ -387,128 +67,22 @@
* \brief Entry point to the IPA modules
*
* This function is the entry point to the IPA modules. It is implemented by
- * every IPA module, and called by libcamera to create a new IPA context.
+ * every IPA module, and called by libcamera to create a new IPA interface
+ * instance.
*
- * \return A newly created IPA context
+ * \return A newly created IPA interface instance
*/
namespace libcamera {
/**
- * \struct IPASettings
- * \brief IPA interface initialization settings
- *
- * The IPASettings structure stores data passed to the IPAInterface::init()
- * function. The data contains settings that don't depend on a particular camera
- * or pipeline configuration and are valid for the whole life time of the IPA
- * interface.
- */
-
-/**
- * \var IPASettings::configurationFile
- * \brief The name of the IPA configuration file
- *
- * This field may be an empty string if the IPA doesn't require a configuration
- * file.
- */
-
-/**
- * \struct IPAStream
- * \brief Stream configuration for the IPA interface
- *
- * The IPAStream structure stores stream configuration parameters needed by the
- * IPAInterface::configure() method. It mirrors the StreamConfiguration class
- * that is not suitable for this purpose due to not being serializable.
- */
-
-/**
- * \var IPAStream::pixelFormat
- * \brief The stream pixel format
- */
-
-/**
- * \var IPAStream::size
- * \brief The stream size in pixels
- */
-
-/**
- * \struct IPABuffer
- * \brief Buffer information for the IPA interface
- *
- * The IPABuffer structure associates buffer memory with a unique ID. It is
- * used to map buffers to the IPA with IPAInterface::mapBuffers(), after which
- * buffers will be identified by their ID in the IPA interface.
- */
-
-/**
- * \var IPABuffer::id
- * \brief The buffer unique ID
- *
- * Buffers mapped to the IPA are identified by numerical unique IDs. The IDs
- * are chosen by the pipeline handler to fulfil the following constraints:
- *
- * - IDs shall be positive integers different than zero
- * - IDs shall be unique among all mapped buffers
- *
- * When buffers are unmapped with IPAInterface::unmapBuffers() their IDs are
- * freed and may be reused for new buffer mappings.
- */
-
-/**
- * \var IPABuffer::planes
- * \brief The buffer planes description
- *
- * Stores the dmabuf handle and length for each plane of the buffer.
- */
-
-/**
- * \struct IPAOperationData
- * \brief Parameters for IPA operations
- *
- * The IPAOperationData structure carries parameters for the IPA operations
- * performed through the IPAInterface::processEvent() method and the
- * IPAInterface::queueFrameAction signal.
- */
-
-/**
- * \var IPAOperationData::operation
- * \brief IPA protocol operation
- *
- * The operation field describes which operation the receiver shall perform. It
- * defines, through the IPA protocol, how the other fields of the structure are
- * interpreted. The protocol freely assigns numerical values to operations.
- */
-
-/**
- * \var IPAOperationData::data
- * \brief Operation integer data
- *
- * The interpretation and position of different values in the array are defined
- * by the IPA protocol.
- */
-
-/**
- * \var IPAOperationData::controls
- * \brief Operation controls data
- *
- * The interpretation and position of different values in the array are defined
- * by the IPA protocol.
- */
-
-/**
* \class IPAInterface
* \brief C++ Interface for IPA implementation
*
- * This pure virtual class defines a C++ API corresponding to the ipa_context,
- * ipa_context_ops and ipa_callback_ops API. It is used by pipeline handlers to
- * interact with IPA modules, and may be used internally in IPA modules if
- * desired to benefit from the data and helper classes provided by libcamera.
- *
- * Functions defined in the ipa_context_ops structure are mapped to IPAInterface
- * methods, while functions defined in the ipa_callback_ops are mapped to
- * IPAInterface signals. As with the C API, the IPA C++ interface uses
- * serializable data types only. It reuses structures defined by the C API, or
- * defines corresponding classes using C++ containers when required.
+ * This pure virtual class defines a skeletal C++ API for IPA modules.
+ * Specializations of this class must be defined in a mojom file in
+ * include/libcamera/ipa/ (see the IPA Writers Guide for details
+ * on how to do so).
*
* Due to process isolation all arguments to the IPAInterface methods and
* signals may need to be transferred over IPC. The class thus uses serializable
@@ -516,140 +90,15 @@ namespace libcamera {
* mirror core libcamera structures when the latter are not suitable, such as
* IPAStream to carry StreamConfiguration data.
*
- * As for the functions defined in struct ipa_context_ops, the methods defined
- * by this class shall not return data from the IPA.
+ * Custom data structures may also be defined in the mojom file, in which case
+ * the (de)serialization will automatically be generated. If any other libcamera
+ * structures are to be used as parameters, then a (de)serializer for them must
+ * be implemented in IPADataSerializer.
*
- * The pipeline handler shall use the IPAManager to locate a compatible
+ * The pipeline handlers shall use the IPAManager to locate a compatible
* IPAInterface. The interface may then be used to interact with the IPA module.
- */
-
-/**
- * \fn IPAInterface::init()
- * \brief Initialise the IPAInterface
- * \param[in] settings The IPA initialization settings
- *
- * This function initializes the IPA interface. It shall be called before any
- * other function of the IPAInterface. The \a settings carry initialization
- * parameters that are valid for the whole life time of the IPA interface.
- */
-
-/**
- * \fn IPAInterface::start()
- * \brief Start the IPA
- *
- * This method informs the IPA module that the camera is about to be started.
- * The IPA module shall prepare any resources it needs to operate.
- *
- * \return 0 on success or a negative error code otherwise
- */
-
-/**
- * \fn IPAInterface::stop()
- * \brief Stop the IPA
- *
- * This method informs the IPA module that the camera is stopped. The IPA module
- * shall release resources prepared in start().
- */
-
-/**
- * \fn IPAInterface::configure()
- * \brief Configure the IPA stream and sensor settings
- * \param[in] sensorInfo Camera sensor information
- * \param[in] streamConfig Configuration of all active streams
- * \param[in] entityControls Controls provided by the pipeline entities
- * \param[in] ipaConfig Pipeline-handler-specific configuration data
- * \param[out] result Pipeline-handler-specific configuration result
- *
- * This method shall be called when the camera is started to inform the IPA of
- * the camera's streams and the sensor settings. The meaning of the numerical
- * keys in the \a streamConfig and \a entityControls maps is defined by the IPA
- * protocol.
- *
- * The \a sensorInfo conveys information about the camera sensor settings that
- * the pipeline handler has selected for the configuration. The IPA may use
- * that information to tune its algorithms.
- *
- * The \a ipaConfig and \a result parameters carry custom data passed by the
- * pipeline handler to the IPA and back. The pipeline handler may set the \a
- * result parameter to null if the IPA protocol doesn't need to pass a result
- * back through the configure() function.
- */
-
-/**
- * \fn IPAInterface::mapBuffers()
- * \brief Map buffers shared between the pipeline handler and the IPA
- * \param[in] buffers List of buffers to map
- *
- * This method informs the IPA module of memory buffers set up by the pipeline
- * handler that the IPA needs to access. It provides dmabuf file handles for
- * each buffer, and associates the buffers with unique numerical IDs.
- *
- * IPAs shall map the dmabuf file handles to their address space and keep a
- * cache of the mappings, indexed by the buffer numerical IDs. The IDs are used
- * in all other IPA interface methods to refer to buffers, including the
- * unmapBuffers() method.
- *
- * All buffers that the pipeline handler wishes to share with an IPA shall be
- * mapped with this method. Buffers may be mapped all at once with a single
- * call, or mapped and unmapped dynamically at runtime, depending on the IPA
- * protocol. Regardless of the protocol, all buffers mapped at a given time
- * shall have unique numerical IDs.
- *
- * The numerical IDs have no meaning defined by the IPA interface, and IPA
- * protocols shall not give them any specific meaning either. They should be
- * treated as opaque handles by IPAs, with the only exception that ID zero is
- * invalid.
- *
- * \sa unmapBuffers()
- *
- * \todo Provide a generic implementation of mapBuffers and unmapBuffers for
- * IPAs
- */
-
-/**
- * \fn IPAInterface::unmapBuffers()
- * \brief Unmap buffers shared by the pipeline to the IPA
- * \param[in] ids List of buffer IDs to unmap
- *
- * This method removes mappings set up with mapBuffers(). Buffers may be
- * unmapped all at once with a single call, or selectively at runtime, depending
- * on the IPA protocol. Numerical IDs of unmapped buffers may be reused when
- * mapping new buffers.
- *
- * \sa mapBuffers()
- */
-
-/**
- * \fn IPAInterface::processEvent()
- * \brief Process an event from the pipeline handler
- * \param[in] data IPA operation data
- *
- * This operation is used by pipeline handlers to inform the IPA module of
- * events that occurred during the on-going capture operation.
- *
- * The event notified by the pipeline handler with this method is handled by the
- * IPA, which interprets the operation parameters according to the separately
- * documented IPA protocol.
- */
-
-/**
- * \var IPAInterface::queueFrameAction
- * \brief Queue an action associated with a frame to the pipeline handler
- * \param[in] frame The frame number for the action
- * \param[in] data IPA operation data
- *
- * This signal is emitted when the IPA wishes to queue a FrameAction on the
- * pipeline. The pipeline is still responsible for the scheduling of the action
- * on its timeline.
- *
- * This signal is emitted by the IPA to queue an action to be executed by the
- * pipeline handler on a frame. The type of action is identified by the
- * \a data.operation field, as defined by the IPA protocol, and the rest of the
- * \a data is interpreted accordingly. The pipeline handler shall queue the
- * action and execute it as appropriate.
*
- * The signal is only emitted when the IPA is running, that is after start() and
- * before stop() have been called.
+ * \todo Figure out how to generate IPAInterface documentation.
*/
} /* namespace libcamera */
diff --git a/src/libcamera/ipa_manager.cpp b/src/libcamera/ipa_manager.cpp
index 046fd5c6..b4606c61 100644
--- a/src/libcamera/ipa_manager.cpp
+++ b/src/libcamera/ipa_manager.cpp
@@ -12,12 +12,13 @@
#include <string.h>
#include <sys/types.h>
-#include "libcamera/internal/file.h"
+#include <libcamera/base/file.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
#include "libcamera/internal/ipa_module.h"
#include "libcamera/internal/ipa_proxy.h"
-#include "libcamera/internal/log.h"
#include "libcamera/internal/pipeline_handler.h"
-#include "libcamera/internal/utils.h"
/**
* \file ipa_manager.h
@@ -43,7 +44,7 @@ LOG_DEFINE_CATEGORY(IPAManager)
* The isolation mechanism ensures that no code from a closed-source module is
* ever run in the libcamera process.
*
- * To create an IPA context, pipeline handlers call the IPAManager::ipaCreate()
+ * To create an IPA context, pipeline handlers call the IPAManager::createIPA()
* method. For a directly loaded module, the manager calls the module's
* ipaCreate() function directly and wraps the returned context in an
* IPAContextWrapper that exposes an IPAInterface.
@@ -245,6 +246,7 @@ unsigned int IPAManager::addDir(const char *libDir, unsigned int maxDepth)
}
/**
+ * \fn IPAManager::createIPA()
* \brief Create an IPA proxy that matches a given pipeline handler
* \param[in] pipe The pipeline handler that wants a matching IPA proxy
* \param[in] minVersion Minimum acceptable version of IPA module
@@ -253,52 +255,6 @@ unsigned int IPAManager::addDir(const char *libDir, unsigned int maxDepth)
* \return A newly created IPA proxy, or nullptr if no matching IPA module is
* found or if the IPA proxy fails to initialize
*/
-std::unique_ptr<IPAProxy> IPAManager::createIPA(PipelineHandler *pipe,
- uint32_t maxVersion,
- uint32_t minVersion)
-{
- IPAModule *m = nullptr;
-
- for (IPAModule *module : self_->modules_) {
- if (module->match(pipe, minVersion, maxVersion)) {
- m = module;
- break;
- }
- }
-
- if (!m)
- return nullptr;
-
- /*
- * Load and run the IPA module in a thread if it has a valid signature,
- * or isolate it in a separate process otherwise.
- *
- * \todo Implement a better proxy selection
- */
- const char *proxyName = self_->isSignatureValid(m)
- ? "IPAProxyThread" : "IPAProxyLinux";
- IPAProxyFactory *pf = nullptr;
-
- for (IPAProxyFactory *factory : IPAProxyFactory::factories()) {
- if (!strcmp(factory->name().c_str(), proxyName)) {
- pf = factory;
- break;
- }
- }
-
- if (!pf) {
- LOG(IPAManager, Error) << "Failed to get proxy factory";
- return nullptr;
- }
-
- std::unique_ptr<IPAProxy> proxy = pf->create(m);
- if (!proxy->isValid()) {
- LOG(IPAManager, Error) << "Failed to load proxy";
- return nullptr;
- }
-
- return proxy;
-}
bool IPAManager::isSignatureValid([[maybe_unused]] IPAModule *ipa) const
{
diff --git a/src/libcamera/ipa_module.cpp b/src/libcamera/ipa_module.cpp
index de512a7f..adfb8d40 100644
--- a/src/libcamera/ipa_module.cpp
+++ b/src/libcamera/ipa_module.cpp
@@ -21,12 +21,12 @@
#include <sys/types.h>
#include <unistd.h>
-#include <libcamera/span.h>
+#include <libcamera/base/file.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/span.h>
+#include <libcamera/base/utils.h>
-#include "libcamera/internal/file.h"
-#include "libcamera/internal/log.h"
#include "libcamera/internal/pipeline_handler.h"
-#include "libcamera/internal/utils.h"
/**
* \file ipa_module.h
@@ -391,13 +391,13 @@ const std::string &IPAModule::path() const
/**
* \brief Load the IPA implementation factory from the shared object
*
- * The IPA module shared object implements an ipa_context object to be used
+ * The IPA module shared object implements an IPAInterface object to be used
* by pipeline handlers. This method loads the factory function from the
- * shared object. Later, createContext() can be called to instantiate the
- * ipa_context.
+ * shared object. Later, createInterface() can be called to instantiate the
+ * IPAInterface.
*
* This method only needs to be called successfully once, after which
- * createContext() can be called as many times as ipa_context instances are
+ * createInterface() can be called as many times as IPAInterface instances are
* needed.
*
* Calling this function on an invalid module (as returned by isValid()) is
@@ -439,20 +439,18 @@ bool IPAModule::load()
}
/**
- * \brief Instantiate an IPA context
+ * \brief Instantiate an IPA interface
*
* After loading the IPA module with load(), this method creates an instance of
- * the IPA module context. Ownership of the context is passed to the caller, and
- * the context shall be destroyed by calling the \ref ipa_context_ops::destroy
- * "ipa_context::ops::destroy()" function.
+ * the IPA module interface.
*
* Calling this function on a module that has not yet been loaded, or an
* invalid module (as returned by load() and isValid(), respectively) is
* an error.
*
- * \return The IPA context on success, or nullptr on error
+ * \return The IPA interface on success, or nullptr on error
*/
-struct ipa_context *IPAModule::createContext()
+IPAInterface *IPAModule::createInterface()
{
if (!valid_ || !loaded_)
return nullptr;
diff --git a/src/libcamera/ipa_proxy.cpp b/src/libcamera/ipa_proxy.cpp
index ff4d7fd1..babcc48b 100644
--- a/src/libcamera/ipa_proxy.cpp
+++ b/src/libcamera/ipa_proxy.cpp
@@ -12,9 +12,10 @@
#include <sys/types.h>
#include <unistd.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
#include "libcamera/internal/ipa_module.h"
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/utils.h"
/**
* \file ipa_proxy.h
@@ -30,20 +31,27 @@ LOG_DEFINE_CATEGORY(IPAProxy)
* \brief IPA Proxy
*
* Isolate IPA into separate process.
+ */
+
+/**
+ * \enum IPAProxy::ProxyState
+ * \brief Identifies the available operational states of the proxy
*
- * Every subclass of proxy shall be registered with libcamera using
- * the REGISTER_IPA_PROXY() macro.
+ * \var IPAProxy::ProxyStopped
+ * \brief The proxy is not active and only synchronous operations are permitted
+ * \var IPAProxy::ProxyStopping
+ * \brief No new tasks can be submitted to the proxy, however existing events
+ * can be completed
+ * \var IPAProxy::ProxyRunning
+ * \brief The Proxy is active and asynchronous tasks may be queued
*/
/**
* \brief Construct an IPAProxy instance
* \param[in] ipam The IPA module
- *
- * IPAProxy instances shall be constructed through the IPAProxyFactory::create()
- * method implemented by the respective factories.
*/
IPAProxy::IPAProxy(IPAModule *ipam)
- : valid_(false), ipam_(ipam)
+ : valid_(false), state_(ProxyStopped), ipam_(ipam)
{
}
@@ -146,16 +154,6 @@ std::string IPAProxy::configurationFile(const std::string &name) const
}
/**
- * \fn IPAProxy::stop()
- * \brief Stop the IPA proxy
- *
- * This function stops the IPA and releases all the resources acquired by the
- * proxy in start(). Calling stop() when the IPA proxy hasn't been started or
- * has already been stopped is valid, the proxy shall treat this as a no-op and
- * shall not forward the call to the IPA.
- */
-
-/**
* \brief Find a valid full path for a proxy worker for a given executable name
* \param[in] file File name of proxy worker executable
*
@@ -230,88 +228,14 @@ std::string IPAProxy::resolvePath(const std::string &file) const
*/
/**
- * \class IPAProxyFactory
- * \brief Registration of IPAProxy classes and creation of instances
- *
- * To facilitate discovery and instantiation of IPAProxy classes, the
- * IPAProxyFactory class maintains a registry of IPAProxy classes. Each
- * IPAProxy subclass shall register itself using the REGISTER_IPA_PROXY()
- * macro, which will create a corresponding instance of a IPAProxyFactory
- * subclass and register it with the static list of factories.
- */
-
-/**
- * \brief Construct a IPAProxy factory
- * \param[in] name Name of the IPAProxy class
- *
- * Creating an instance of the factory registers is with the global list of
- * factories, accessible through the factories() function.
- *
- * The factory \a name is used for debugging and IPAProxy matching purposes
- * and shall be unique.
- */
-IPAProxyFactory::IPAProxyFactory(const char *name)
- : name_(name)
-{
- registerType(this);
-}
-
-/**
- * \fn IPAProxyFactory::create()
- * \brief Create an instance of the IPAProxy corresponding to the factory
- * \param[in] ipam The IPA module
- *
- * This virtual function is implemented by the REGISTER_IPA_PROXY() macro.
- * It creates a IPAProxy instance that isolates an IPA interface designated
- * by the IPA module \a ipam.
+ * \var IPAProxy::state_
+ * \brief Current state of the IPAProxy
*
- * \return A pointer to a newly constructed instance of the IPAProxy subclass
- * corresponding to the factory
- */
-
-/**
- * \fn IPAProxyFactory::name()
- * \brief Retrieve the factory name
- * \return The factory name
- */
-
-/**
- * \brief Add a IPAProxy class to the registry
- * \param[in] factory Factory to use to construct the IPAProxy
- *
- * The caller is responsible to guarantee the uniqueness of the IPAProxy name.
- */
-void IPAProxyFactory::registerType(IPAProxyFactory *factory)
-{
- std::vector<IPAProxyFactory *> &factories = IPAProxyFactory::factories();
-
- factories.push_back(factory);
-
- LOG(IPAProxy, Debug)
- << "Registered proxy \"" << factory->name() << "\"";
-}
-
-/**
- * \brief Retrieve the list of all IPAProxy factories
- *
- * The static factories map is defined inside the function to ensure it gets
- * initialized on first use, without any dependency on link order.
- *
- * \return The list of pipeline handler factories
- */
-std::vector<IPAProxyFactory *> &IPAProxyFactory::factories()
-{
- static std::vector<IPAProxyFactory *> factories;
- return factories;
-}
-
-/**
- * \def REGISTER_IPA_PROXY
- * \brief Register a IPAProxy with the IPAProxy factory
- * \param[in] proxy Class name of IPAProxy derived class to register
+ * The IPAProxy can be Running, Stopped, or Stopping.
*
- * Register a proxy subclass with the factory and make it available to
- * isolate IPA modules.
+ * This state provides a means to ensure that asynchronous methods are only
+ * called while the proxy is running, and prevent new tasks being submitted
+ * while still enabling events to complete when the IPAProxy is stopping.
*/
} /* namespace libcamera */
diff --git a/src/libcamera/ipc_pipe.cpp b/src/libcamera/ipc_pipe.cpp
new file mode 100644
index 00000000..28e20e03
--- /dev/null
+++ b/src/libcamera/ipc_pipe.cpp
@@ -0,0 +1,218 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * ipc_pipe.cpp - Image Processing Algorithm IPC module for IPA proxies
+ */
+
+#include "libcamera/internal/ipc_pipe.h"
+
+#include <libcamera/base/log.h>
+
+/**
+ * \file ipc_pipe.h
+ * \brief IPC mechanism for IPA isolation
+ */
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(IPCPipe)
+
+/**
+ * \struct IPCMessage::Header
+ * \brief Container for an IPCMessage header
+ *
+ * Holds a cmd code for the IPC message, and a cookie.
+ */
+
+/**
+ * \var IPCMessage::Header::cmd
+ * \brief Type of IPCMessage
+ *
+ * Typically used to carry a command code for an RPC.
+ */
+
+/**
+ * \var IPCMessage::Header::cookie
+ * \brief Cookie to identify the message and a corresponding reply.
+ *
+ * Populated and used by IPCPipe implementations for matching calls with
+ * replies.
+ */
+
+/**
+ * \class IPCMessage
+ * \brief IPC message to be passed through IPC message pipe
+ */
+
+/**
+ * \brief Construct an empty IPCMessage instance
+ */
+IPCMessage::IPCMessage()
+ : header_(Header{ 0, 0 })
+{
+}
+
+/**
+ * \brief Construct an IPCMessage instance with a given command code
+ * \param[in] cmd The command code
+ */
+IPCMessage::IPCMessage(uint32_t cmd)
+ : header_(Header{ cmd, 0 })
+{
+}
+
+/**
+ * \brief Construct an IPCMessage instance with a given header
+ * \param[in] header The header that the constructed IPCMessage will contain
+ */
+IPCMessage::IPCMessage(const Header &header)
+ : header_(header)
+{
+}
+
+/**
+ * \brief Construct an IPCMessage instance from an IPC payload
+ * \param[in] payload The IPCUnixSocket payload to construct from
+ *
+ * This essentially converts an IPCUnixSocket payload into an IPCMessage.
+ * The header is extracted from the payload into the IPCMessage's header field.
+ */
+IPCMessage::IPCMessage(const IPCUnixSocket::Payload &payload)
+{
+ memcpy(&header_, payload.data.data(), sizeof(header_));
+ data_ = std::vector<uint8_t>(payload.data.begin() + sizeof(header_),
+ payload.data.end());
+ fds_ = payload.fds;
+}
+
+/**
+ * \brief Create an IPCUnixSocket payload from the IPCMessage
+ *
+ * This essentially converts the IPCMessage into an IPCUnixSocket payload.
+ *
+ * \todo Resolve the layering violation (add other converters later?)
+ */
+IPCUnixSocket::Payload IPCMessage::payload() const
+{
+ IPCUnixSocket::Payload payload;
+
+ payload.data.resize(sizeof(Header) + data_.size());
+ payload.fds.reserve(fds_.size());
+
+ memcpy(payload.data.data(), &header_, sizeof(Header));
+
+ /* \todo Make this work without copy */
+ memcpy(payload.data.data() + sizeof(Header), data_.data(), data_.size());
+ payload.fds = fds_;
+
+ return payload;
+}
+
+/**
+ * \fn IPCMessage::header()
+ * \brief Returns a reference to the header
+ */
+
+/**
+ * \fn IPCMessage::data()
+ * \brief Returns a reference to the byte vector containing data
+ */
+
+/**
+ * \fn IPCMessage::fds()
+ * \brief Returns a reference to the vector containing file descriptors
+ */
+
+/**
+ * \fn IPCMessage::header() const
+ * \brief Returns a const reference to the header
+ */
+
+/**
+ * \fn IPCMessage::data() const
+ * \brief Returns a const reference to the byte vector containing data
+ */
+
+/**
+ * \fn IPCMessage::fds() const
+ * \brief Returns a const reference to the vector containing file descriptors
+ */
+
+/**
+ * \class IPCPipe
+ * \brief IPC message pipe for IPA isolation
+ *
+ * Virtual class to model an IPC message pipe for use by IPA proxies for IPA
+ * isolation. sendSync() and sendAsync() must be implemented, and the recvMessage
+ * signal must be emitted whenever new data is available.
+ */
+
+/**
+ * \brief Construct an IPCPipe instance
+ */
+IPCPipe::IPCPipe()
+ : connected_(false)
+{
+}
+
+IPCPipe::~IPCPipe()
+{
+}
+
+/**
+ * \fn IPCPipe::isConnected()
+ * \brief Check if the IPCPipe instance is connected
+ *
+ * An IPCPipe instance is connected if IPC is successfully set up.
+ *
+ * \return True if the IPCPipe is connected, false otherwise
+ */
+
+/**
+ * \fn IPCPipe::sendSync()
+ * \brief Send a message over IPC synchronously
+ * \param[in] in Data to send
+ * \param[in] out IPCMessage instance in which to receive data, if applicable
+ *
+ * This function will not return until a response is received. The event loop
+ * will still continue to execute, however.
+ *
+ * \return Zero on success, negative error code otherwise
+ *
+ * \todo Determine if the event loop should limit the types of messages it
+ * processes, to avoid reintrancy in the caller, and carefully document what
+ * the caller needs to implement to make this safe.
+ */
+
+/**
+ * \fn IPCPipe::sendAsync()
+ * \brief Send a message over IPC asynchronously
+ * \param[in] data Data to send
+ *
+ * This function will return immediately after sending the message.
+ *
+ * \return Zero on success, negative error code otherwise
+ */
+
+/**
+ * \var IPCPipe::recv
+ * \brief Signal to be emitted when a message is received over IPC
+ *
+ * When a message is received over IPC, this signal shall be emitted. Users must
+ * connect to this to receive messages.
+ */
+
+/**
+ * \var IPCPipe::connected_
+ * \brief Flag to indicate if the IPCPipe instance is connected
+ *
+ * An IPCPipe instance is connected if IPC is successfully set up.
+ *
+ * This flag can be read via IPCPipe::isConnected().
+ *
+ * Implementations of the IPCPipe class should set this flag upon successful
+ * connection.
+ */
+
+} /* namespace libcamera */
diff --git a/src/libcamera/ipc_pipe_unixsocket.cpp b/src/libcamera/ipc_pipe_unixsocket.cpp
new file mode 100644
index 00000000..4511775f
--- /dev/null
+++ b/src/libcamera/ipc_pipe_unixsocket.cpp
@@ -0,0 +1,145 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * ipc_pipe_unixsocket.cpp - Image Processing Algorithm IPC module using unix socket
+ */
+
+#include "libcamera/internal/ipc_pipe_unixsocket.h"
+
+#include <vector>
+
+#include <libcamera/base/event_dispatcher.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/thread.h>
+#include <libcamera/base/timer.h>
+
+#include "libcamera/internal/ipc_pipe.h"
+#include "libcamera/internal/ipc_unixsocket.h"
+#include "libcamera/internal/process.h"
+
+namespace libcamera {
+
+LOG_DECLARE_CATEGORY(IPCPipe)
+
+IPCPipeUnixSocket::IPCPipeUnixSocket(const char *ipaModulePath,
+ const char *ipaProxyWorkerPath)
+ : IPCPipe()
+{
+ std::vector<int> fds;
+ std::vector<std::string> args;
+ args.push_back(ipaModulePath);
+
+ socket_ = std::make_unique<IPCUnixSocket>();
+ int fd = socket_->create();
+ if (fd < 0) {
+ LOG(IPCPipe, Error) << "Failed to create socket";
+ return;
+ }
+ socket_->readyRead.connect(this, &IPCPipeUnixSocket::readyRead);
+ args.push_back(std::to_string(fd));
+ fds.push_back(fd);
+
+ proc_ = std::make_unique<Process>();
+ int ret = proc_->start(ipaProxyWorkerPath, args, fds);
+ if (ret) {
+ LOG(IPCPipe, Error)
+ << "Failed to start proxy worker process";
+ return;
+ }
+
+ connected_ = true;
+}
+
+IPCPipeUnixSocket::~IPCPipeUnixSocket()
+{
+}
+
+int IPCPipeUnixSocket::sendSync(const IPCMessage &in, IPCMessage *out)
+{
+ IPCUnixSocket::Payload response;
+
+ int ret = call(in.payload(), &response, in.header().cookie);
+ if (ret) {
+ LOG(IPCPipe, Error) << "Failed to call sync";
+ return ret;
+ }
+
+ if (out)
+ *out = IPCMessage(response);
+
+ return 0;
+}
+
+int IPCPipeUnixSocket::sendAsync(const IPCMessage &data)
+{
+ int ret = socket_->send(data.payload());
+ if (ret) {
+ LOG(IPCPipe, Error) << "Failed to call async";
+ return ret;
+ }
+
+ return 0;
+}
+
+void IPCPipeUnixSocket::readyRead(IPCUnixSocket *socket)
+{
+ IPCUnixSocket::Payload payload;
+ int ret = socket->receive(&payload);
+ if (ret) {
+ LOG(IPCPipe, Error) << "Receive message failed" << ret;
+ return;
+ }
+
+ /* \todo Use span to avoid the double copy when callData is found. */
+ if (payload.data.size() < sizeof(IPCMessage::Header)) {
+ LOG(IPCPipe, Error) << "Not enough data received";
+ return;
+ }
+
+ IPCMessage ipcMessage(payload);
+
+ auto callData = callData_.find(ipcMessage.header().cookie);
+ if (callData != callData_.end()) {
+ *callData->second.response = std::move(payload);
+ callData->second.done = true;
+ return;
+ }
+
+ /* Received unexpected data, this means it's a call from the IPA. */
+ recv.emit(ipcMessage);
+}
+
+int IPCPipeUnixSocket::call(const IPCUnixSocket::Payload &message,
+ IPCUnixSocket::Payload *response, uint32_t cookie)
+{
+ Timer timeout;
+ int ret;
+
+ const auto result = callData_.insert({ cookie, { response, false } });
+ const auto &iter = result.first;
+
+ ret = socket_->send(message);
+ if (ret) {
+ callData_.erase(iter);
+ return ret;
+ }
+
+ /* \todo Make this less dangerous, see IPCPipe::sendSync() */
+ timeout.start(2000);
+ while (!iter->second.done) {
+ if (!timeout.isRunning()) {
+ LOG(IPCPipe, Error) << "Call timeout!";
+ callData_.erase(iter);
+ return -ETIMEDOUT;
+ }
+
+ Thread::current()->eventDispatcher()->processEvents();
+ }
+
+ callData_.erase(iter);
+
+ return 0;
+}
+
+} /* namespace libcamera */
diff --git a/src/libcamera/ipc_unixsocket.cpp b/src/libcamera/ipc_unixsocket.cpp
index 5c8cce16..f23eb783 100644
--- a/src/libcamera/ipc_unixsocket.cpp
+++ b/src/libcamera/ipc_unixsocket.cpp
@@ -12,9 +12,8 @@
#include <sys/socket.h>
#include <unistd.h>
-#include <libcamera/event_notifier.h>
-
-#include "libcamera/internal/log.h"
+#include <libcamera/base/event_notifier.h>
+#include <libcamera/base/log.h>
/**
* \file ipc_unixsocket.h
diff --git a/src/libcamera/media_device.cpp b/src/libcamera/media_device.cpp
index de18d572..fa50264f 100644
--- a/src/libcamera/media_device.cpp
+++ b/src/libcamera/media_device.cpp
@@ -18,7 +18,7 @@
#include <linux/media.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
/**
* \file media_device.h
@@ -44,13 +44,13 @@ LOG_DEFINE_CATEGORY(MediaDevice)
* MediaEntity, MediaPad and MediaLink are created to model the media graph,
* and stored in a map indexed by object id.
*
- * The graph is valid once successfully populated, as reported by the valid()
+ * The graph is valid once successfully populated, as reported by the isValid()
* function. It can be queried to list all entities(), or entities can be
* looked up by name with getEntityByName(). The graph can be traversed from
* entity to entity through pads and links as exposed by the corresponding
* classes.
*
- * Media device can be claimed for exclusive use with acquire(), released with
+ * Media devices can be claimed for exclusive use with acquire(), released with
* release() and tested with busy(). This mechanism is aimed at pipeline
* managers to claim media devices they support during enumeration.
*/
@@ -231,6 +231,7 @@ int MediaDevice::populate()
driver_ = info.driver;
model_ = info.model;
version_ = info.media_version;
+ hwRevision_ = info.hw_revision;
/*
* Keep calling G_TOPOLOGY until the version number stays stable.
@@ -291,7 +292,7 @@ done:
}
/**
- * \fn MediaDevice::valid()
+ * \fn MediaDevice::isValid()
* \brief Query whether the media graph has been populated and is valid
* \return true if the media graph is valid, false otherwise
*/
@@ -324,6 +325,15 @@ done:
*/
/**
+ * \fn MediaDevice::hwRevision()
+ * \brief Retrieve the media device hardware revision
+ *
+ * The hardware revision is in a driver-specific format.
+ *
+ * \return The MediaDevice hardware revision
+ */
+
+/**
* \fn MediaDevice::entities()
* \brief Retrieve the list of entities in the media graph
* \return The list of MediaEntities registered in the MediaDevice
@@ -561,7 +571,7 @@ bool MediaDevice::addObject(MediaObject *object)
*
* The media device graph state is reset to invalid when the graph is cleared.
*
- * \sa valid()
+ * \sa isValid()
*/
void MediaDevice::clear()
{
diff --git a/src/libcamera/media_object.cpp b/src/libcamera/media_object.cpp
index b7bf048c..815edc8e 100644
--- a/src/libcamera/media_object.cpp
+++ b/src/libcamera/media_object.cpp
@@ -15,7 +15,8 @@
#include <linux/media.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
+
#include "libcamera/internal/media_device.h"
/**
@@ -67,6 +68,11 @@ LOG_DECLARE_CATEGORY(MediaDevice)
/**
* \fn MediaObject::device()
+ * \copydoc MediaObject::device() const
+ */
+
+/**
+ * \fn MediaObject::device() const
* \brief Retrieve the media device the media object belongs to
* \return The MediaDevice
*/
@@ -191,15 +197,6 @@ MediaPad::MediaPad(const struct media_v2_pad *pad, MediaEntity *entity)
{
}
-MediaPad::~MediaPad()
-{
- /*
- * Don't delete the links as we only borrow the reference owned by
- * MediaDevice.
- */
- links_.clear();
-}
-
/**
* \fn MediaPad::index()
* \brief Retrieve the pad index
@@ -371,15 +368,6 @@ MediaEntity::MediaEntity(MediaDevice *dev,
{
}
-MediaEntity::~MediaEntity()
-{
- /*
- * Don't delete the pads as we only borrow the reference owned by
- * MediaDevice.
- */
- pads_.clear();
-}
-
/**
* \brief Add \a pad to the entity's list of pads
* \param[in] pad The pad to add to the list
diff --git a/src/libcamera/meson.build b/src/libcamera/meson.build
index 07711b5f..4f085801 100644
--- a/src/libcamera/meson.build
+++ b/src/libcamera/meson.build
@@ -2,52 +2,43 @@
libcamera_sources = files([
'bayer_format.cpp',
- 'bound_method.cpp',
- 'buffer.cpp',
'byte_stream_buffer.cpp',
'camera.cpp',
'camera_controls.cpp',
'camera_manager.cpp',
'camera_sensor.cpp',
+ 'camera_sensor_properties.cpp',
'controls.cpp',
'control_serializer.cpp',
'control_validator.cpp',
+ 'delayed_controls.cpp',
'device_enumerator.cpp',
'device_enumerator_sysfs.cpp',
- 'event_dispatcher.cpp',
- 'event_dispatcher_poll.cpp',
- 'event_notifier.cpp',
- 'file.cpp',
'file_descriptor.cpp',
'formats.cpp',
+ 'framebuffer.cpp',
'framebuffer_allocator.cpp',
'geometry.cpp',
- 'ipa_context_wrapper.cpp',
'ipa_controls.cpp',
+ 'ipa_data_serializer.cpp',
'ipa_interface.cpp',
'ipa_manager.cpp',
'ipa_module.cpp',
'ipa_proxy.cpp',
+ 'ipc_pipe.cpp',
+ 'ipc_pipe_unixsocket.cpp',
'ipc_unixsocket.cpp',
- 'log.cpp',
'media_device.cpp',
'media_object.cpp',
- 'message.cpp',
- 'object.cpp',
'pipeline_handler.cpp',
'pixel_format.cpp',
'process.cpp',
'pub_key.cpp',
'request.cpp',
- 'semaphore.cpp',
- 'signal.cpp',
+ 'source_paths.cpp',
'stream.cpp',
'sysfs.cpp',
- 'thread.cpp',
- 'timer.cpp',
'transform.cpp',
- 'utils.cpp',
- 'v4l2_controls.cpp',
'v4l2_device.cpp',
'v4l2_pixelformat.cpp',
'v4l2_subdevice.cpp',
@@ -55,11 +46,15 @@ libcamera_sources = files([
])
libcamera_sources += libcamera_public_headers
+libcamera_sources += libcamera_generated_ipa_headers
+libcamera_sources += libcamera_tracepoint_header
includes = [
libcamera_includes,
]
+subdir('base')
+subdir('ipa')
subdir('pipeline')
subdir('proxy')
@@ -72,6 +67,11 @@ if libgnutls.found()
config_h.set('HAVE_GNUTLS', 1)
endif
+if liblttng.found()
+ config_h.set('HAVE_TRACING', 1)
+ libcamera_sources += files(['tracepoints.cpp'])
+endif
+
if libudev.found()
config_h.set('HAVE_LIBUDEV', 1)
libcamera_sources += files([
@@ -86,13 +86,12 @@ foreach source : control_source_files
control_sources += custom_target(source + '_cpp',
input : input_files,
output : source + '.cpp',
- depend_files : gen_controls,
command : [gen_controls, '-o', '@OUTPUT@', '@INPUT@'])
endforeach
libcamera_sources += control_sources
-gen_version = join_paths(meson.source_root(), 'utils', 'gen-version.sh')
+gen_version = meson.source_root() / 'utils' / 'gen-version.sh'
version_cpp = vcs_tag(command : [gen_version, meson.build_root()],
input : 'version.cpp.in',
@@ -103,49 +102,58 @@ libcamera_sources += version_cpp
if ipa_sign_module
ipa_pub_key_cpp = custom_target('ipa_pub_key_cpp',
- input : [ ipa_priv_key, 'ipa_pub_key.cpp.in' ],
+ input : [ipa_priv_key, 'ipa_pub_key.cpp.in'],
output : 'ipa_pub_key.cpp',
- command : [ gen_ipa_pub_key, '@INPUT@', '@OUTPUT@' ])
+ command : [gen_ipa_pub_key, '@INPUT@', '@OUTPUT@'])
libcamera_sources += ipa_pub_key_cpp
endif
libcamera_deps = [
libatomic,
+ libcamera_base,
+ libcamera_base_private,
libdl,
libgnutls,
+ liblttng,
libudev,
- dependency('threads'),
]
-libcamera_link_with = []
-
-if android_enabled
- libcamera_sources += android_hal_sources
- includes += android_includes
- libcamera_link_with += android_camera_metadata
-
- libcamera_deps += android_deps
-endif
-
# We add '/' to the build_rpath as a 'safe' path to act as a boolean flag.
# The build_rpath is stripped at install time by meson, so we determine at
# runtime if the library is running from an installed location by checking
# for the presence or abscence of the dynamic tag.
-libcamera = shared_library('camera',
+libcamera = shared_library('libcamera',
libcamera_sources,
+ name_prefix : '',
install : true,
- link_with : libcamera_link_with,
include_directories : includes,
build_rpath : '/',
dependencies : libcamera_deps)
-libcamera_dep = declare_dependency(sources : [
- libcamera_ipa_headers,
- libcamera_public_headers,
- ],
- include_directories : libcamera_includes,
- link_with : libcamera)
+libcamera_public = declare_dependency(sources : [
+ libcamera_ipa_headers,
+ libcamera_public_headers,
+ ],
+ include_directories : libcamera_includes,
+ dependencies : libcamera_base,
+ link_with : libcamera)
+
+# Internal dependency for components and plugins which can use private APIs
+libcamera_private = declare_dependency(sources : [
+ libcamera_generated_ipa_headers,
+ ],
+ dependencies : [
+ libcamera_public,
+ libcamera_base_private,
+ ])
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(libcamera,
+ libraries : libcamera_base_lib,
+ version : '1.0',
+ description : 'Complex Camera Support Library',
+ subdirs : 'libcamera')
subdir('proxy/worker')
diff --git a/src/libcamera/pipeline/ipu3/cio2.cpp b/src/libcamera/pipeline/ipu3/cio2.cpp
index e43ec70f..1bcd580e 100644
--- a/src/libcamera/pipeline/ipu3/cio2.cpp
+++ b/src/libcamera/pipeline/ipu3/cio2.cpp
@@ -14,6 +14,7 @@
#include <libcamera/stream.h>
#include "libcamera/internal/camera_sensor.h"
+#include "libcamera/internal/framebuffer.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/v4l2_subdevice.h"
@@ -33,17 +34,9 @@ const std::map<uint32_t, PixelFormat> mbusCodesToPixelFormat = {
} /* namespace */
CIO2Device::CIO2Device()
- : sensor_(nullptr), csi2_(nullptr), output_(nullptr)
{
}
-CIO2Device::~CIO2Device()
-{
- delete output_;
- delete csi2_;
- delete sensor_;
-}
-
/**
* \brief Retrieve the list of supported PixelFormats
*
@@ -118,7 +111,7 @@ int CIO2Device::init(const MediaDevice *media, unsigned int index)
MediaLink *link = links[0];
MediaEntity *sensorEntity = link->source()->entity();
- sensor_ = new CameraSensor(sensorEntity);
+ sensor_ = std::make_unique<CameraSensor>(sensorEntity);
ret = sensor_->init();
if (ret)
return ret;
@@ -149,7 +142,7 @@ int CIO2Device::init(const MediaDevice *media, unsigned int index)
* might impact on power consumption.
*/
- csi2_ = new V4L2Subdevice(csi2Entity);
+ csi2_ = std::make_unique<V4L2Subdevice>(csi2Entity);
ret = csi2_->open();
if (ret)
return ret;
@@ -203,8 +196,7 @@ int CIO2Device::configure(const Size &size, V4L2DeviceFormat *outputFormat)
return 0;
}
-StreamConfiguration
-CIO2Device::generateConfiguration(Size size) const
+StreamConfiguration CIO2Device::generateConfiguration(Size size) const
{
StreamConfiguration cfg;
@@ -247,39 +239,54 @@ int CIO2Device::start()
availableBuffers_.push(buffer.get());
ret = output_->streamOn();
- if (ret)
+ if (ret) {
freeBuffers();
+ return ret;
+ }
- return ret;
+ ret = csi2_->setFrameStartEnabled(true);
+ if (ret) {
+ stop();
+ return ret;
+ }
+
+ return 0;
}
int CIO2Device::stop()
{
- int ret = output_->streamOff();
+ int ret;
+
+ csi2_->setFrameStartEnabled(false);
+
+ ret = output_->streamOff();
freeBuffers();
return ret;
}
-int CIO2Device::queueBuffer(Request *request, FrameBuffer *rawBuffer)
+FrameBuffer *CIO2Device::queueBuffer(Request *request, FrameBuffer *rawBuffer)
{
FrameBuffer *buffer = rawBuffer;
/* If no buffer is provided in the request, use an internal one. */
if (!buffer) {
if (availableBuffers_.empty()) {
- LOG(IPU3, Error) << "CIO2 buffer underrun";
- return -EINVAL;
+ LOG(IPU3, Debug) << "CIO2 buffer underrun";
+ return nullptr;
}
buffer = availableBuffers_.front();
availableBuffers_.pop();
+ buffer->_d()->setRequest(request);
}
- buffer->setRequest(request);
+ int ret = output_->queueBuffer(buffer);
+ if (ret)
+ return nullptr;
- return output_->queueBuffer(buffer);
+ return buffer;
}
void CIO2Device::tryReturnBuffer(FrameBuffer *buffer)
@@ -296,6 +303,8 @@ void CIO2Device::tryReturnBuffer(FrameBuffer *buffer)
break;
}
}
+
+ bufferAvailable.emit();
}
void CIO2Device::freeBuffers()
diff --git a/src/libcamera/pipeline/ipu3/cio2.h b/src/libcamera/pipeline/ipu3/cio2.h
index fa813a98..f28e9f1d 100644
--- a/src/libcamera/pipeline/ipu3/cio2.h
+++ b/src/libcamera/pipeline/ipu3/cio2.h
@@ -11,8 +11,9 @@
#include <queue>
#include <vector>
-#include <libcamera/signal.h>
+#include <libcamera/base/signal.h>
+#include "libcamera/internal/v4l2_subdevice.h"
#include "libcamera/internal/v4l2_videodevice.h"
namespace libcamera {
@@ -24,7 +25,6 @@ class PixelFormat;
class Request;
class Size;
class SizeRange;
-class V4L2Subdevice;
struct StreamConfiguration;
class CIO2Device
@@ -33,7 +33,6 @@ public:
static constexpr unsigned int CIO2_BUFFER_COUNT = 4;
CIO2Device();
- ~CIO2Device();
std::vector<PixelFormat> formats() const;
std::vector<SizeRange> sizes() const;
@@ -49,21 +48,24 @@ public:
int start();
int stop();
- CameraSensor *sensor() { return sensor_; }
- const CameraSensor *sensor() const { return sensor_; }
+ CameraSensor *sensor() { return sensor_.get(); }
+ const CameraSensor *sensor() const { return sensor_.get(); }
- int queueBuffer(Request *request, FrameBuffer *rawBuffer);
+ FrameBuffer *queueBuffer(Request *request, FrameBuffer *rawBuffer);
void tryReturnBuffer(FrameBuffer *buffer);
Signal<FrameBuffer *> &bufferReady() { return output_->bufferReady; }
+ Signal<uint32_t> &frameStart() { return csi2_->frameStart; }
+
+ Signal<> bufferAvailable;
private:
void freeBuffers();
void cio2BufferReady(FrameBuffer *buffer);
- CameraSensor *sensor_;
- V4L2Subdevice *csi2_;
- V4L2VideoDevice *output_;
+ std::unique_ptr<CameraSensor> sensor_;
+ std::unique_ptr<V4L2Subdevice> csi2_;
+ std::unique_ptr<V4L2VideoDevice> output_;
std::vector<std::unique_ptr<FrameBuffer>> buffers_;
std::queue<FrameBuffer *> availableBuffers_;
diff --git a/src/libcamera/pipeline/ipu3/frames.cpp b/src/libcamera/pipeline/ipu3/frames.cpp
new file mode 100644
index 00000000..a4c3477c
--- /dev/null
+++ b/src/libcamera/pipeline/ipu3/frames.cpp
@@ -0,0 +1,143 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * frames.cpp - Intel IPU3 Frames helper
+ */
+
+#include "frames.h"
+
+#include <libcamera/framebuffer.h>
+#include <libcamera/request.h>
+
+#include "libcamera/internal/framebuffer.h"
+#include "libcamera/internal/pipeline_handler.h"
+#include "libcamera/internal/v4l2_videodevice.h"
+
+namespace libcamera {
+
+LOG_DECLARE_CATEGORY(IPU3)
+
+IPU3Frames::IPU3Frames()
+{
+}
+
+void IPU3Frames::init(const std::vector<std::unique_ptr<FrameBuffer>> &paramBuffers,
+ const std::vector<std::unique_ptr<FrameBuffer>> &statBuffers)
+{
+ for (const std::unique_ptr<FrameBuffer> &buffer : paramBuffers)
+ availableParamBuffers_.push(buffer.get());
+
+ for (const std::unique_ptr<FrameBuffer> &buffer : statBuffers)
+ availableStatBuffers_.push(buffer.get());
+
+ frameInfo_.clear();
+}
+
+void IPU3Frames::clear()
+{
+ availableParamBuffers_ = {};
+ availableStatBuffers_ = {};
+}
+
+IPU3Frames::Info *IPU3Frames::create(Request *request)
+{
+ unsigned int id = request->sequence();
+
+ if (availableParamBuffers_.empty()) {
+ LOG(IPU3, Debug) << "Parameters buffer underrun";
+ return nullptr;
+ }
+
+ if (availableStatBuffers_.empty()) {
+ LOG(IPU3, Debug) << "Statistics buffer underrun";
+ return nullptr;
+ }
+
+ FrameBuffer *paramBuffer = availableParamBuffers_.front();
+ FrameBuffer *statBuffer = availableStatBuffers_.front();
+
+ paramBuffer->_d()->setRequest(request);
+ statBuffer->_d()->setRequest(request);
+
+ availableParamBuffers_.pop();
+ availableStatBuffers_.pop();
+
+ /* \todo Remove the dynamic allocation of Info */
+ std::unique_ptr<Info> info = std::make_unique<Info>();
+
+ info->id = id;
+ info->request = request;
+ info->rawBuffer = nullptr;
+ info->paramBuffer = paramBuffer;
+ info->statBuffer = statBuffer;
+ info->paramDequeued = false;
+ info->metadataProcessed = false;
+
+ frameInfo_[id] = std::move(info);
+
+ return frameInfo_[id].get();
+}
+
+void IPU3Frames::remove(IPU3Frames::Info *info)
+{
+ /* Return params and stat buffer for reuse. */
+ availableParamBuffers_.push(info->paramBuffer);
+ availableStatBuffers_.push(info->statBuffer);
+
+ /* Delete the extended frame information. */
+ frameInfo_.erase(info->id);
+}
+
+bool IPU3Frames::tryComplete(IPU3Frames::Info *info)
+{
+ Request *request = info->request;
+
+ if (request->hasPendingBuffers())
+ return false;
+
+ if (!info->metadataProcessed)
+ return false;
+
+ if (!info->paramDequeued)
+ return false;
+
+ remove(info);
+
+ bufferAvailable.emit();
+
+ return true;
+}
+
+IPU3Frames::Info *IPU3Frames::find(unsigned int id)
+{
+ const auto &itInfo = frameInfo_.find(id);
+
+ if (itInfo != frameInfo_.end())
+ return itInfo->second.get();
+
+ LOG(IPU3, Fatal) << "Can't find tracking information for frame " << id;
+
+ return nullptr;
+}
+
+IPU3Frames::Info *IPU3Frames::find(FrameBuffer *buffer)
+{
+ for (auto const &itInfo : frameInfo_) {
+ Info *info = itInfo.second.get();
+
+ for (auto const itBuffers : info->request->buffers())
+ if (itBuffers.second == buffer)
+ return info;
+
+ if (info->rawBuffer == buffer || info->paramBuffer == buffer ||
+ info->statBuffer == buffer)
+ return info;
+ }
+
+ LOG(IPU3, Fatal) << "Can't find tracking information from buffer";
+
+ return nullptr;
+}
+
+} /* namespace libcamera */
diff --git a/src/libcamera/pipeline/ipu3/frames.h b/src/libcamera/pipeline/ipu3/frames.h
new file mode 100644
index 00000000..3ef7e445
--- /dev/null
+++ b/src/libcamera/pipeline/ipu3/frames.h
@@ -0,0 +1,65 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * frames.h - Intel IPU3 Frames helper
+ */
+#ifndef __LIBCAMERA_PIPELINE_IPU3_FRAMES_H__
+#define __LIBCAMERA_PIPELINE_IPU3_FRAMES_H__
+
+#include <map>
+#include <memory>
+#include <queue>
+#include <vector>
+
+#include <libcamera/base/signal.h>
+
+namespace libcamera {
+
+class FrameBuffer;
+class IPAProxy;
+class PipelineHandler;
+class Request;
+class V4L2VideoDevice;
+struct IPABuffer;
+
+class IPU3Frames
+{
+public:
+ struct Info {
+ unsigned int id;
+ Request *request;
+
+ FrameBuffer *rawBuffer;
+ FrameBuffer *paramBuffer;
+ FrameBuffer *statBuffer;
+
+ bool paramDequeued;
+ bool metadataProcessed;
+ };
+
+ IPU3Frames();
+
+ void init(const std::vector<std::unique_ptr<FrameBuffer>> &paramBuffers,
+ const std::vector<std::unique_ptr<FrameBuffer>> &statBuffers);
+ void clear();
+
+ Info *create(Request *request);
+ void remove(Info *info);
+ bool tryComplete(Info *info);
+
+ Info *find(unsigned int id);
+ Info *find(FrameBuffer *buffer);
+
+ Signal<> bufferAvailable;
+
+private:
+ std::queue<FrameBuffer *> availableParamBuffers_;
+ std::queue<FrameBuffer *> availableStatBuffers_;
+
+ std::map<unsigned int, std::unique_ptr<Info>> frameInfo_;
+};
+
+} /* namespace libcamera */
+
+#endif /* __LIBCAMERA_PIPELINE_IPU3_FRAMES_H__ */
diff --git a/src/libcamera/pipeline/ipu3/imgu.cpp b/src/libcamera/pipeline/ipu3/imgu.cpp
index a4d74a62..e955bc34 100644
--- a/src/libcamera/pipeline/ipu3/imgu.cpp
+++ b/src/libcamera/pipeline/ipu3/imgu.cpp
@@ -13,12 +13,13 @@
#include <linux/media-bus-format.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
#include <libcamera/formats.h>
#include <libcamera/stream.h>
-#include "libcamera/internal/log.h"
#include "libcamera/internal/media_device.h"
-#include "libcamera/internal/utils.h"
namespace libcamera {
@@ -30,9 +31,10 @@ namespace {
* The procedure to calculate the ImgU pipe configuration has been ported
* from the pipe_config.py python script, available at:
* https://github.com/intel/intel-ipu3-pipecfg
- * at revision: 61e83f2f7606 ("Add more information into README")
+ * at revision: 243d13446e44 ("Fix some bug for some resolutions")
*/
+static constexpr unsigned int FILTER_W = 4;
static constexpr unsigned int FILTER_H = 4;
static constexpr unsigned int IF_ALIGN_W = 2;
@@ -128,20 +130,22 @@ void calculateBDSHeight(ImgUDevice::Pipe *pipe, const Size &iif, const Size &gdc
float bdsHeight;
if (!isSameRatio(pipe->input, gdc)) {
+ unsigned int foundIfHeight = 0;
float estIFHeight = (iif.width * gdc.height) /
static_cast<float>(gdc.width);
estIFHeight = std::clamp<float>(estIFHeight, minIFHeight, iif.height);
- bool found = false;
ifHeight = utils::alignUp(estIFHeight, IF_ALIGN_H);
- while (ifHeight >= minIFHeight && ifHeight / bdsSF >= minBDSHeight) {
+ while (ifHeight >= minIFHeight && ifHeight <= iif.height &&
+ ifHeight / bdsSF >= minBDSHeight) {
- bdsHeight = ifHeight / bdsSF;
- if (std::fmod(bdsHeight, 1.0) == 0) {
- unsigned int bdsIntHeight = static_cast<unsigned int>(bdsHeight);
+ float height = ifHeight / bdsSF;
+ if (std::fmod(height, 1.0) == 0) {
+ unsigned int bdsIntHeight = static_cast<unsigned int>(height);
if (!(bdsIntHeight % BDS_ALIGN_H)) {
- found = true;
+ foundIfHeight = ifHeight;
+ bdsHeight = height;
break;
}
}
@@ -150,14 +154,16 @@ void calculateBDSHeight(ImgUDevice::Pipe *pipe, const Size &iif, const Size &gdc
}
ifHeight = utils::alignUp(estIFHeight, IF_ALIGN_H);
- while (ifHeight <= iif.height && ifHeight / bdsSF >= minBDSHeight) {
+ while (ifHeight >= minIFHeight && ifHeight <= iif.height &&
+ ifHeight / bdsSF >= minBDSHeight) {
- bdsHeight = ifHeight / bdsSF;
- if (std::fmod(bdsHeight, 1.0) == 0) {
- unsigned int bdsIntHeight = static_cast<unsigned int>(bdsHeight);
+ float height = ifHeight / bdsSF;
+ if (std::fmod(height, 1.0) == 0) {
+ unsigned int bdsIntHeight = static_cast<unsigned int>(height);
if (!(bdsIntHeight % BDS_ALIGN_H)) {
- found = true;
+ foundIfHeight = ifHeight;
+ bdsHeight = height;
break;
}
}
@@ -165,16 +171,16 @@ void calculateBDSHeight(ImgUDevice::Pipe *pipe, const Size &iif, const Size &gdc
ifHeight += IF_ALIGN_H;
}
- if (found) {
+ if (foundIfHeight) {
unsigned int bdsIntHeight = static_cast<unsigned int>(bdsHeight);
- pipeConfigs.push_back({ bdsSF, { iif.width, ifHeight },
+ pipeConfigs.push_back({ bdsSF, { iif.width, foundIfHeight },
{ bdsWidth, bdsIntHeight }, gdc });
return;
}
} else {
ifHeight = utils::alignUp(iif.height, IF_ALIGN_H);
- while (ifHeight > minIFHeight && ifHeight / bdsSF >= minBDSHeight) {
+ while (ifHeight >= minIFHeight && ifHeight / bdsSF >= minBDSHeight) {
bdsHeight = ifHeight / bdsSF;
if (std::fmod(ifHeight, 1.0) == 0 && std::fmod(bdsHeight, 1.0) == 0) {
@@ -194,15 +200,20 @@ void calculateBDSHeight(ImgUDevice::Pipe *pipe, const Size &iif, const Size &gdc
void calculateBDS(ImgUDevice::Pipe *pipe, const Size &iif, const Size &gdc, float bdsSF)
{
- unsigned int minBDSWidth = gdc.width + FILTER_H * 2;
+ unsigned int minBDSWidth = gdc.width + FILTER_W * 2;
+ unsigned int minBDSHeight = gdc.height + FILTER_H * 2;
float sf = bdsSF;
while (sf <= BDS_SF_MAX && sf >= BDS_SF_MIN) {
float bdsWidth = static_cast<float>(iif.width) / sf;
+ float bdsHeight = static_cast<float>(iif.height) / sf;
- if (std::fmod(bdsWidth, 1.0) == 0) {
+ if (std::fmod(bdsWidth, 1.0) == 0 &&
+ std::fmod(bdsHeight, 1.0) == 0) {
unsigned int bdsIntWidth = static_cast<unsigned int>(bdsWidth);
- if (!(bdsIntWidth % BDS_ALIGN_W) && bdsWidth >= minBDSWidth)
+ unsigned int bdsIntHeight = static_cast<unsigned int>(bdsHeight);
+ if (!(bdsIntWidth % BDS_ALIGN_W) && bdsWidth >= minBDSWidth &&
+ !(bdsIntHeight % BDS_ALIGN_H) && bdsHeight >= minBDSHeight)
calculateBDSHeight(pipe, iif, gdc, bdsIntWidth, sf);
}
@@ -212,10 +223,14 @@ void calculateBDS(ImgUDevice::Pipe *pipe, const Size &iif, const Size &gdc, floa
sf = bdsSF;
while (sf <= BDS_SF_MAX && sf >= BDS_SF_MIN) {
float bdsWidth = static_cast<float>(iif.width) / sf;
+ float bdsHeight = static_cast<float>(iif.height) / sf;
- if (std::fmod(bdsWidth, 1.0) == 0) {
+ if (std::fmod(bdsWidth, 1.0) == 0 &&
+ std::fmod(bdsHeight, 1.0) == 0) {
unsigned int bdsIntWidth = static_cast<unsigned int>(bdsWidth);
- if (!(bdsIntWidth % BDS_ALIGN_W) && bdsWidth >= minBDSWidth)
+ unsigned int bdsIntHeight = static_cast<unsigned int>(bdsHeight);
+ if (!(bdsIntWidth % BDS_ALIGN_W) && bdsWidth >= minBDSWidth &&
+ !(bdsIntHeight % BDS_ALIGN_H) && bdsHeight >= minBDSHeight)
calculateBDSHeight(pipe, iif, gdc, bdsIntWidth, sf);
}
@@ -343,29 +358,32 @@ int ImgUDevice::init(MediaDevice *media, unsigned int index)
* by the match() function: no need to check for newly created
* video devices and subdevice validity here.
*/
- imgu_.reset(V4L2Subdevice::fromEntityName(media, name_));
+ imgu_ = V4L2Subdevice::fromEntityName(media, name_);
ret = imgu_->open();
if (ret)
return ret;
- input_.reset(V4L2VideoDevice::fromEntityName(media, name_ + " input"));
+ input_ = V4L2VideoDevice::fromEntityName(media, name_ + " input");
ret = input_->open();
if (ret)
return ret;
- output_.reset(V4L2VideoDevice::fromEntityName(media,
- name_ + " output"));
+ output_ = V4L2VideoDevice::fromEntityName(media, name_ + " output");
ret = output_->open();
if (ret)
return ret;
- viewfinder_.reset(V4L2VideoDevice::fromEntityName(media,
- name_ + " viewfinder"));
+ viewfinder_ = V4L2VideoDevice::fromEntityName(media, name_ + " viewfinder");
ret = viewfinder_->open();
if (ret)
return ret;
- stat_.reset(V4L2VideoDevice::fromEntityName(media, name_ + " 3a stat"));
+ param_ = V4L2VideoDevice::fromEntityName(media, name_ + " parameters");
+ ret = param_->open();
+ if (ret)
+ return ret;
+
+ stat_ = V4L2VideoDevice::fromEntityName(media, name_ + " 3a stat");
ret = stat_->open();
if (ret)
return ret;
@@ -389,21 +407,56 @@ ImgUDevice::PipeConfig ImgUDevice::calculatePipeConfig(Pipe *pipe)
LOG(IPU3, Debug) << "vf: " << pipe->viewfinder.toString();
const Size &in = pipe->input;
+
+ /*
+ * \todo Filter out all resolutions < IF_CROP_MAX.
+ * See https://bugs.libcamera.org/show_bug.cgi?id=32
+ */
+ if (in.width < IF_CROP_MAX_W || in.height < IF_CROP_MAX_H) {
+ LOG(IPU3, Error) << "Input resolution " << in.toString()
+ << " not supported";
+ return {};
+ }
+
Size gdc = calculateGDC(pipe);
- unsigned int ifWidth = utils::alignUp(in.width, IF_ALIGN_W);
- unsigned int ifHeight = in.height;
- unsigned int minIfWidth = in.width - IF_CROP_MAX_W;
float bdsSF = static_cast<float>(in.width) / gdc.width;
float sf = findScaleFactor(bdsSF, bdsScalingFactors, true);
+ /* Populate the configurations vector by scaling width and height. */
+ unsigned int ifWidth = utils::alignUp(in.width, IF_ALIGN_W);
+ unsigned int ifHeight = utils::alignUp(in.height, IF_ALIGN_H);
+ unsigned int minIfWidth = in.width - IF_CROP_MAX_W;
+ unsigned int minIfHeight = in.height - IF_CROP_MAX_H;
while (ifWidth >= minIfWidth) {
- Size iif{ ifWidth, ifHeight };
- calculateBDS(pipe, iif, gdc, sf);
+ while (ifHeight >= minIfHeight) {
+ Size iif{ ifWidth, ifHeight };
+ calculateBDS(pipe, iif, gdc, sf);
+ ifHeight -= IF_ALIGN_H;
+ }
ifWidth -= IF_ALIGN_W;
}
+ /* Repeat search by scaling width first. */
+ ifWidth = utils::alignUp(in.width, IF_ALIGN_W);
+ ifHeight = utils::alignUp(in.height, IF_ALIGN_H);
+ minIfWidth = in.width - IF_CROP_MAX_W;
+ minIfHeight = in.height - IF_CROP_MAX_H;
+ while (ifHeight >= minIfHeight) {
+ /*
+ * \todo This procedure is probably broken:
+ * https://github.com/intel/intel-ipu3-pipecfg/issues/2
+ */
+ while (ifWidth >= minIfWidth) {
+ Size iif{ ifWidth, ifHeight };
+ calculateBDS(pipe, iif, gdc, sf);
+ ifWidth -= IF_ALIGN_W;
+ }
+
+ ifHeight -= IF_ALIGN_H;
+ }
+
if (pipeConfigs.size() == 0) {
LOG(IPU3, Error) << "Failed to calculate pipe configuration";
return {};
@@ -477,6 +530,20 @@ int ImgUDevice::configure(const PipeConfig &pipeConfig, V4L2DeviceFormat *inputF
LOG(IPU3, Debug) << "ImgU GDC format = " << gdcFormat.toString();
+ StreamConfiguration paramCfg = {};
+ paramCfg.size = inputFormat->size;
+ V4L2DeviceFormat paramFormat;
+ ret = configureVideoDevice(param_.get(), PAD_PARAM, paramCfg, &paramFormat);
+ if (ret)
+ return ret;
+
+ StreamConfiguration statCfg = {};
+ statCfg.size = inputFormat->size;
+ V4L2DeviceFormat statFormat;
+ ret = configureVideoDevice(stat_.get(), PAD_STAT, statCfg, &statFormat);
+ if (ret)
+ return ret;
+
return 0;
}
@@ -500,8 +567,11 @@ int ImgUDevice::configureVideoDevice(V4L2VideoDevice *dev, unsigned int pad,
if (ret)
return ret;
- /* No need to apply format to the stat node. */
- if (dev == stat_.get())
+ /*
+ * No need to apply format to the param or stat video devices as the
+ * driver ignores the operation.
+ */
+ if (dev == param_.get() || dev == stat_.get())
return 0;
*outputFormat = {};
@@ -532,14 +602,13 @@ int ImgUDevice::allocateBuffers(unsigned int bufferCount)
return ret;
}
- /*
- * The kernel fails to start if buffers are not either imported or
- * allocated for the statistics video device. As statistics buffers are
- * not yet used by the pipeline import buffers to save memory.
- *
- * \todo To be revised when we'll actually use the stat node.
- */
- ret = stat_->importBuffers(bufferCount);
+ ret = param_->allocateBuffers(bufferCount, &paramBuffers_);
+ if (ret < 0) {
+ LOG(IPU3, Error) << "Failed to allocate ImgU param buffers";
+ goto error;
+ }
+
+ ret = stat_->allocateBuffers(bufferCount, &statBuffers_);
if (ret < 0) {
LOG(IPU3, Error) << "Failed to allocate ImgU stat buffers";
goto error;
@@ -577,10 +646,17 @@ void ImgUDevice::freeBuffers()
{
int ret;
+ paramBuffers_.clear();
+ statBuffers_.clear();
+
ret = output_->releaseBuffers();
if (ret)
LOG(IPU3, Error) << "Failed to release ImgU output buffers";
+ ret = param_->releaseBuffers();
+ if (ret)
+ LOG(IPU3, Error) << "Failed to release ImgU param buffers";
+
ret = stat_->releaseBuffers();
if (ret)
LOG(IPU3, Error) << "Failed to release ImgU stat buffers";
@@ -611,6 +687,12 @@ int ImgUDevice::start()
return ret;
}
+ ret = param_->streamOn();
+ if (ret) {
+ LOG(IPU3, Error) << "Failed to start ImgU param";
+ return ret;
+ }
+
ret = stat_->streamOn();
if (ret) {
LOG(IPU3, Error) << "Failed to start ImgU stat";
@@ -632,6 +714,7 @@ int ImgUDevice::stop()
ret = output_->streamOff();
ret |= viewfinder_->streamOff();
+ ret |= param_->streamOff();
ret |= stat_->streamOff();
ret |= input_->streamOff();
@@ -673,6 +756,7 @@ int ImgUDevice::linkSetup(const std::string &source, unsigned int sourcePad,
int ImgUDevice::enableLinks(bool enable)
{
std::string viewfinderName = name_ + " viewfinder";
+ std::string paramName = name_ + " parameters";
std::string outputName = name_ + " output";
std::string statName = name_ + " 3a stat";
std::string inputName = name_ + " input";
@@ -690,6 +774,10 @@ int ImgUDevice::enableLinks(bool enable)
if (ret)
return ret;
+ ret = linkSetup(paramName, 0, name_, PAD_PARAM, enable);
+ if (ret)
+ return ret;
+
return linkSetup(name_, PAD_STAT, statName, 0, enable);
}
diff --git a/src/libcamera/pipeline/ipu3/imgu.h b/src/libcamera/pipeline/ipu3/imgu.h
index c73ac5a5..9d491511 100644
--- a/src/libcamera/pipeline/ipu3/imgu.h
+++ b/src/libcamera/pipeline/ipu3/imgu.h
@@ -61,13 +61,6 @@ public:
outputFormat);
}
- int configureStat(const StreamConfiguration &cfg,
- V4L2DeviceFormat *outputFormat)
- {
- return configureVideoDevice(stat_.get(), PAD_STAT, cfg,
- outputFormat);
- }
-
int allocateBuffers(unsigned int bufferCount);
void freeBuffers();
@@ -78,13 +71,17 @@ public:
std::unique_ptr<V4L2Subdevice> imgu_;
std::unique_ptr<V4L2VideoDevice> input_;
+ std::unique_ptr<V4L2VideoDevice> param_;
std::unique_ptr<V4L2VideoDevice> output_;
std::unique_ptr<V4L2VideoDevice> viewfinder_;
std::unique_ptr<V4L2VideoDevice> stat_;
- /* \todo Add param video device for 3A tuning */
+
+ std::vector<std::unique_ptr<FrameBuffer>> paramBuffers_;
+ std::vector<std::unique_ptr<FrameBuffer>> statBuffers_;
private:
static constexpr unsigned int PAD_INPUT = 0;
+ static constexpr unsigned int PAD_PARAM = 1;
static constexpr unsigned int PAD_OUTPUT = 2;
static constexpr unsigned int PAD_VF = 3;
static constexpr unsigned int PAD_STAT = 4;
diff --git a/src/libcamera/pipeline/ipu3/ipu3.cpp b/src/libcamera/pipeline/ipu3/ipu3.cpp
index f5a20d30..76c3bb3d 100644
--- a/src/libcamera/pipeline/ipu3/ipu3.cpp
+++ b/src/libcamera/pipeline/ipu3/ipu3.cpp
@@ -11,20 +11,27 @@
#include <queue>
#include <vector>
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
#include <libcamera/camera.h>
+#include <libcamera/control_ids.h>
#include <libcamera/formats.h>
+#include <libcamera/ipa/ipu3_ipa_interface.h>
+#include <libcamera/ipa/ipu3_ipa_proxy.h>
+#include <libcamera/property_ids.h>
#include <libcamera/request.h>
#include <libcamera/stream.h>
#include "libcamera/internal/camera_sensor.h"
+#include "libcamera/internal/delayed_controls.h"
#include "libcamera/internal/device_enumerator.h"
-#include "libcamera/internal/log.h"
+#include "libcamera/internal/ipa_manager.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/pipeline_handler.h"
-#include "libcamera/internal/utils.h"
-#include "libcamera/internal/v4l2_controls.h"
#include "cio2.h"
+#include "frames.h"
#include "imgu.h"
namespace libcamera {
@@ -39,17 +46,28 @@ static constexpr unsigned int IMGU_OUTPUT_WIDTH_ALIGN = 64;
static constexpr unsigned int IMGU_OUTPUT_HEIGHT_ALIGN = 4;
static constexpr unsigned int IMGU_OUTPUT_WIDTH_MARGIN = 64;
static constexpr unsigned int IMGU_OUTPUT_HEIGHT_MARGIN = 32;
+static constexpr Size IPU3ViewfinderSize(1280, 720);
+
+static const ControlInfoMap::Map IPU3Controls = {
+ { &controls::draft::PipelineDepth, ControlInfo(2, 3) },
+};
class IPU3CameraData : public CameraData
{
public:
IPU3CameraData(PipelineHandler *pipe)
- : CameraData(pipe)
+ : CameraData(pipe), exposureTime_(0), supportsFlips_(false)
{
}
+ int loadIPA();
+
void imguOutputBufferReady(FrameBuffer *buffer);
void cio2BufferReady(FrameBuffer *buffer);
+ void paramBufferReady(FrameBuffer *buffer);
+ void statBufferReady(FrameBuffer *buffer);
+ void queuePendingRequests();
+ void cancelPendingRequests();
CIO2Device cio2_;
ImgUDevice *imgu_;
@@ -57,6 +75,22 @@ public:
Stream outStream_;
Stream vfStream_;
Stream rawStream_;
+
+ uint32_t exposureTime_;
+ Rectangle cropRegion_;
+ bool supportsFlips_;
+ Transform rotationTransform_;
+
+ std::unique_ptr<DelayedControls> delayedCtrls_;
+ IPU3Frames frameInfos_;
+
+ std::unique_ptr<ipa::ipu3::IPAProxyIPU3> ipa_;
+
+ std::queue<Request *> pendingRequests_;
+
+private:
+ void queueFrameAction(unsigned int id,
+ const ipa::ipu3::IPU3Action &action);
};
class IPU3CameraConfiguration : public CameraConfiguration
@@ -66,9 +100,12 @@ public:
Status validate() override;
- const StreamConfiguration &cio2Format() const { return cio2Configuration_; };
+ const StreamConfiguration &cio2Format() const { return cio2Configuration_; }
const ImgUDevice::PipeConfig imguConfig() const { return pipeConfig_; }
+ /* Cache the combinedTransform_ that will be applied to the sensor */
+ Transform combinedTransform_;
+
private:
/*
* The IPU3CameraData instance is guaranteed to be valid as long as the
@@ -100,7 +137,7 @@ public:
int exportFrameBuffers(Camera *camera, Stream *stream,
std::vector<std::unique_ptr<FrameBuffer>> *buffers) override;
- int start(Camera *camera) override;
+ int start(Camera *camera, const ControlList *controls) override;
void stop(Camera *camera) override;
int queueRequestDevice(Camera *camera, Request *request) override;
@@ -114,6 +151,7 @@ private:
PipelineHandler::cameraData(camera));
}
+ int initControls(IPU3CameraData *data);
int registerCameras();
int allocateBuffers(Camera *camera);
@@ -123,6 +161,8 @@ private:
ImgUDevice imgu1_;
MediaDevice *cio2MediaDev_;
MediaDevice *imguMediaDev_;
+
+ std::vector<IPABuffer> ipaBuffers_;
};
IPU3CameraConfiguration::IPU3CameraConfiguration(IPU3CameraData *data)
@@ -138,11 +178,49 @@ CameraConfiguration::Status IPU3CameraConfiguration::validate()
if (config_.empty())
return Invalid;
- if (transform != Transform::Identity) {
- transform = Transform::Identity;
+ Transform combined = transform * data_->rotationTransform_;
+
+ /*
+ * We combine the platform and user transform, but must "adjust away"
+ * any combined result that includes a transposition, as we can't do
+ * those. In this case, flipping only the transpose bit is helpful to
+ * applications - they either get the transform they requested, or have
+ * to do a simple transpose themselves (they don't have to worry about
+ * the other possible cases).
+ */
+ if (!!(combined & Transform::Transpose)) {
+ /*
+ * Flipping the transpose bit in "transform" flips it in the
+ * combined result too (as it's the last thing that happens),
+ * which is of course clearing it.
+ */
+ transform ^= Transform::Transpose;
+ combined &= ~Transform::Transpose;
+ status = Adjusted;
+ }
+
+ /*
+ * We also check if the sensor doesn't do h/vflips at all, in which
+ * case we clear them, and the application will have to do everything.
+ */
+ if (!data_->supportsFlips_ && !!combined) {
+ /*
+ * If the sensor can do no transforms, then combined must be
+ * changed to the identity. The only user transform that gives
+ * rise to this is the inverse of the rotation. (Recall that
+ * combined = transform * rotationTransform.)
+ */
+ transform = -data_->rotationTransform_;
+ combined = Transform::Identity;
status = Adjusted;
}
+ /*
+ * Store the final combined transform that configure() will need to
+ * apply to the sensor to save us working it out again.
+ */
+ combinedTransform_ = combined;
+
/* Cap the number of entries to the available streams. */
if (config_.size() > IPU3_MAX_STREAMS) {
config_.resize(IPU3_MAX_STREAMS);
@@ -153,12 +231,14 @@ CameraConfiguration::Status IPU3CameraConfiguration::validate()
unsigned int rawCount = 0;
unsigned int yuvCount = 0;
Size maxYuvSize;
+ Size rawSize;
for (const StreamConfiguration &cfg : config_) {
const PixelFormatInfo &info = PixelFormatInfo::info(cfg.pixelFormat);
if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW) {
rawCount++;
+ rawSize = cfg.size;
} else {
yuvCount++;
maxYuvSize.expandTo(cfg.size);
@@ -181,11 +261,16 @@ CameraConfiguration::Status IPU3CameraConfiguration::validate()
* commit message of the patch that introduced this comment for more
* failure examples).
*
- * Until the sensor frame size calculation criteria are clarified,
- * always use the largest possible one which guarantees better results
- * at the expense of the frame rate and CSI-2 bus bandwidth.
+ * Until the sensor frame size calculation criteria are clarified, when
+ * capturing from ImgU always use the largest possible size which
+ * guarantees better results at the expense of the frame rate and CSI-2
+ * bus bandwidth. When only a raw stream is requested use the requested
+ * size instead, as the ImgU is not involved.
*/
- cio2Configuration_ = data_->cio2_.generateConfiguration({});
+ if (!yuvCount)
+ cio2Configuration_ = data_->cio2_.generateConfiguration(rawSize);
+ else
+ cio2Configuration_ = data_->cio2_.generateConfiguration({});
if (!cio2Configuration_.pixelFormat.isValid())
return Invalid;
@@ -368,7 +453,7 @@ CameraConfiguration *PipelineHandlerIPU3::generateConfiguration(Camera *camera,
* capped to the maximum sensor resolution and aligned
* to the ImgU output constraints.
*/
- size = sensorResolution.boundedTo({ 1280, 720 })
+ size = sensorResolution.boundedTo(IPU3ViewfinderSize)
.alignedDownTo(IMGU_OUTPUT_WIDTH_ALIGN,
IMGU_OUTPUT_HEIGHT_ALIGN);
pixelFormat = formats::NV12;
@@ -458,11 +543,33 @@ int PipelineHandlerIPU3::configure(Camera *camera, CameraConfiguration *c)
* adjusted format to be propagated to the ImgU output devices.
*/
const Size &sensorSize = config->cio2Format().size;
- V4L2DeviceFormat cio2Format = {};
+ V4L2DeviceFormat cio2Format;
ret = cio2->configure(sensorSize, &cio2Format);
if (ret)
return ret;
+ IPACameraSensorInfo sensorInfo;
+ cio2->sensor()->sensorInfo(&sensorInfo);
+ data->cropRegion_ = sensorInfo.analogCrop;
+
+ /*
+ * Configure the H/V flip controls based on the combination of
+ * the sensor and user transform.
+ */
+ if (data->supportsFlips_) {
+ ControlList sensorCtrls(cio2->sensor()->controls());
+ sensorCtrls.set(V4L2_CID_HFLIP,
+ static_cast<int32_t>(!!(config->combinedTransform_
+ & Transform::HFlip)));
+ sensorCtrls.set(V4L2_CID_VFLIP,
+ static_cast<int32_t>(!!(config->combinedTransform_
+ & Transform::VFlip)));
+
+ ret = cio2->sensor()->setControls(&sensorCtrls);
+ if (ret)
+ return ret;
+ }
+
/*
* If the ImgU gets configured, its driver seems to expect that
* buffers will be queued to its outputs, as otherwise the next
@@ -513,28 +620,36 @@ int PipelineHandlerIPU3::configure(Camera *camera, CameraConfiguration *c)
return ret;
}
- /*
- * Apply the largest available format to the stat node.
- * \todo Revise this when we'll actually use the stat node.
- */
- StreamConfiguration statCfg = {};
- statCfg.size = cio2Format.size;
-
- ret = imgu->configureStat(statCfg, &outputFormat);
- if (ret)
- return ret;
-
/* Apply the "pipe_mode" control to the ImgU subdevice. */
ControlList ctrls(imgu->imgu_->controls());
+ /*
+ * Set the ImgU pipe mode to 'Video' unconditionally to have statistics
+ * generated.
+ *
+ * \todo Figure out what the 'Still Capture' mode is meant for, and use
+ * it accordingly.
+ */
ctrls.set(V4L2_CID_IPU3_PIPE_MODE,
- static_cast<int32_t>(vfCfg ? IPU3PipeModeVideo :
- IPU3PipeModeStillCapture));
+ static_cast<int32_t>(IPU3PipeModeVideo));
ret = imgu->imgu_->setControls(&ctrls);
if (ret) {
LOG(IPU3, Error) << "Unable to set pipe_mode control";
return ret;
}
+ ipa::ipu3::IPAConfigInfo configInfo;
+ configInfo.entityControls.emplace(0, data->cio2_.sensor()->controls());
+ configInfo.sensorInfo = sensorInfo;
+ configInfo.bdsOutputSize = config->imguConfig().bds;
+ configInfo.iif = config->imguConfig().iif;
+
+ ret = data->ipa_->configure(configInfo);
+ if (ret) {
+ LOG(IPU3, Error) << "Failed to configure IPA: "
+ << strerror(-ret);
+ return ret;
+ }
+
return 0;
}
@@ -579,6 +694,25 @@ int PipelineHandlerIPU3::allocateBuffers(Camera *camera)
if (ret < 0)
return ret;
+ /* Map buffers to the IPA. */
+ unsigned int ipaBufferId = 1;
+
+ for (const std::unique_ptr<FrameBuffer> &buffer : imgu->paramBuffers_) {
+ buffer->setCookie(ipaBufferId++);
+ ipaBuffers_.emplace_back(buffer->cookie(), buffer->planes());
+ }
+
+ for (const std::unique_ptr<FrameBuffer> &buffer : imgu->statBuffers_) {
+ buffer->setCookie(ipaBufferId++);
+ ipaBuffers_.emplace_back(buffer->cookie(), buffer->planes());
+ }
+
+ data->ipa_->mapBuffers(ipaBuffers_);
+
+ data->frameInfos_.init(imgu->paramBuffers_, imgu->statBuffers_);
+ data->frameInfos_.bufferAvailable.connect(
+ data, &IPU3CameraData::queuePendingRequests);
+
return 0;
}
@@ -586,12 +720,21 @@ int PipelineHandlerIPU3::freeBuffers(Camera *camera)
{
IPU3CameraData *data = cameraData(camera);
+ data->frameInfos_.clear();
+
+ std::vector<unsigned int> ids;
+ for (IPABuffer &ipabuf : ipaBuffers_)
+ ids.push_back(ipabuf.id);
+
+ data->ipa_->unmapBuffers(ids);
+ ipaBuffers_.clear();
+
data->imgu_->freeBuffers();
return 0;
}
-int PipelineHandlerIPU3::start(Camera *camera)
+int PipelineHandlerIPU3::start(Camera *camera, [[maybe_unused]] const ControlList *controls)
{
IPU3CameraData *data = cameraData(camera);
CIO2Device *cio2 = &data->cio2_;
@@ -603,6 +746,10 @@ int PipelineHandlerIPU3::start(Camera *camera)
if (ret)
return ret;
+ ret = data->ipa_->start();
+ if (ret)
+ goto error;
+
/*
* Start the ImgU video devices, buffers will be queued to the
* ImgU output and viewfinder when requests will be queued.
@@ -612,15 +759,15 @@ int PipelineHandlerIPU3::start(Camera *camera)
goto error;
ret = imgu->start();
- if (ret) {
- imgu->stop();
- cio2->stop();
+ if (ret)
goto error;
- }
return 0;
error:
+ imgu->stop();
+ cio2->stop();
+ data->ipa_->stop();
freeBuffers(camera);
LOG(IPU3, Error) << "Failed to start camera " << camera->id();
@@ -632,6 +779,10 @@ void PipelineHandlerIPU3::stop(Camera *camera)
IPU3CameraData *data = cameraData(camera);
int ret = 0;
+ data->cancelPendingRequests();
+
+ data->ipa_->stop();
+
ret |= data->imgu_->stop();
ret |= data->cio2_.stop();
if (ret)
@@ -640,38 +791,68 @@ void PipelineHandlerIPU3::stop(Camera *camera)
freeBuffers(camera);
}
-int PipelineHandlerIPU3::queueRequestDevice(Camera *camera, Request *request)
+void IPU3CameraData::cancelPendingRequests()
{
- IPU3CameraData *data = cameraData(camera);
- int error = 0;
+ while (!pendingRequests_.empty()) {
+ Request *request = pendingRequests_.front();
- /*
- * Queue a buffer on the CIO2, using the raw stream buffer provided in
- * the request, if any, or a CIO2 internal buffer otherwise.
- */
- FrameBuffer *rawBuffer = request->findBuffer(&data->rawStream_);
- error = data->cio2_.queueBuffer(request, rawBuffer);
- if (error)
- return error;
-
- /* Queue all buffers from the request aimed for the ImgU. */
- for (auto it : request->buffers()) {
- const Stream *stream = it.first;
- FrameBuffer *buffer = it.second;
- int ret;
-
- if (stream == &data->outStream_)
- ret = data->imgu_->output_->queueBuffer(buffer);
- else if (stream == &data->vfStream_)
- ret = data->imgu_->viewfinder_->queueBuffer(buffer);
- else
- continue;
+ for (auto it : request->buffers()) {
+ FrameBuffer *buffer = it.second;
+ buffer->cancel();
+ pipe_->completeBuffer(request, buffer);
+ }
+
+ pipe_->completeRequest(request);
+ pendingRequests_.pop();
+ }
+}
+
+void IPU3CameraData::queuePendingRequests()
+{
+ while (!pendingRequests_.empty()) {
+ Request *request = pendingRequests_.front();
+
+ IPU3Frames::Info *info = frameInfos_.create(request);
+ if (!info)
+ break;
+
+ /*
+ * Queue a buffer on the CIO2, using the raw stream buffer
+ * provided in the request, if any, or a CIO2 internal buffer
+ * otherwise.
+ */
+ FrameBuffer *reqRawBuffer = request->findBuffer(&rawStream_);
+ FrameBuffer *rawBuffer = cio2_.queueBuffer(request, reqRawBuffer);
+ /*
+ * \todo If queueBuffer fails in queuing a buffer to the device,
+ * report the request as error by cancelling the request and
+ * calling PipelineHandler::completeRequest().
+ */
+ if (!rawBuffer) {
+ frameInfos_.remove(info);
+ break;
+ }
+
+ info->rawBuffer = rawBuffer;
+
+ ipa::ipu3::IPU3Event ev;
+ ev.op = ipa::ipu3::EventProcessControls;
+ ev.frame = info->id;
+ ev.controls = request->controls();
+ ipa_->processEvent(ev);
- if (ret < 0)
- error = ret;
+ pendingRequests_.pop();
}
+}
+
+int PipelineHandlerIPU3::queueRequestDevice(Camera *camera, Request *request)
+{
+ IPU3CameraData *data = cameraData(camera);
- return error;
+ data->pendingRequests_.push(request);
+ data->queuePendingRequests();
+
+ return 0;
}
bool PipelineHandlerIPU3::match(DeviceEnumerator *enumerator)
@@ -727,6 +908,161 @@ bool PipelineHandlerIPU3::match(DeviceEnumerator *enumerator)
}
/**
+ * \brief Initialize the camera controls
+ * \param[in] data The camera data
+ *
+ * Initialize the camera controls as the union of the static pipeline handler
+ * controls (IPU3Controls) and controls created dynamically from the sensor
+ * capabilities.
+ *
+ * \return 0 on success or a negative error code otherwise
+ */
+int PipelineHandlerIPU3::initControls(IPU3CameraData *data)
+{
+ /*
+ * \todo The controls initialized here depend on sensor configuration
+ * and their limits should be updated once the configuration gets
+ * changed.
+ *
+ * Initialize the sensor using its resolution and compute the control
+ * limits.
+ */
+ CameraSensor *sensor = data->cio2_.sensor();
+ V4L2SubdeviceFormat sensorFormat = {};
+ sensorFormat.size = sensor->resolution();
+ int ret = sensor->setFormat(&sensorFormat);
+ if (ret)
+ return ret;
+
+ IPACameraSensorInfo sensorInfo{};
+ ret = sensor->sensorInfo(&sensorInfo);
+ if (ret)
+ return ret;
+
+ ControlInfoMap::Map controls = IPU3Controls;
+ const ControlInfoMap &sensorControls = sensor->controls();
+ const std::vector<int32_t> &testPatternModes = sensor->testPatternModes();
+ if (!testPatternModes.empty()) {
+ std::vector<ControlValue> values;
+ values.reserve(testPatternModes.size());
+
+ for (int32_t pattern : testPatternModes)
+ values.emplace_back(pattern);
+
+ controls[&controls::draft::TestPatternMode] = ControlInfo(values);
+ }
+
+ /*
+ * Compute exposure time limits.
+ *
+ * Initialize the control using the line length and pixel rate of the
+ * current configuration converted to microseconds. Use the
+ * V4L2_CID_EXPOSURE control to get exposure min, max and default and
+ * convert it from lines to microseconds.
+ */
+ double lineDuration = sensorInfo.lineLength
+ / (sensorInfo.pixelRate / 1e6);
+ const ControlInfo &v4l2Exposure = sensorControls.find(V4L2_CID_EXPOSURE)->second;
+ int32_t minExposure = v4l2Exposure.min().get<int32_t>() * lineDuration;
+ int32_t maxExposure = v4l2Exposure.max().get<int32_t>() * lineDuration;
+ int32_t defExposure = v4l2Exposure.def().get<int32_t>() * lineDuration;
+
+ /*
+ * \todo Report the actual exposure time, use the default for the
+ * moment.
+ */
+ data->exposureTime_ = defExposure;
+
+ controls[&controls::ExposureTime] = ControlInfo(minExposure, maxExposure,
+ defExposure);
+
+ /*
+ * Compute the frame duration limits.
+ *
+ * The frame length is computed assuming a fixed line length combined
+ * with the vertical frame sizes.
+ */
+ const ControlInfo &v4l2HBlank = sensorControls.find(V4L2_CID_HBLANK)->second;
+ uint32_t hblank = v4l2HBlank.def().get<int32_t>();
+ uint32_t lineLength = sensorInfo.outputSize.width + hblank;
+
+ const ControlInfo &v4l2VBlank = sensorControls.find(V4L2_CID_VBLANK)->second;
+ std::array<uint32_t, 3> frameHeights{
+ v4l2VBlank.min().get<int32_t>() + sensorInfo.outputSize.height,
+ v4l2VBlank.max().get<int32_t>() + sensorInfo.outputSize.height,
+ v4l2VBlank.def().get<int32_t>() + sensorInfo.outputSize.height,
+ };
+
+ std::array<int64_t, 3> frameDurations;
+ for (unsigned int i = 0; i < frameHeights.size(); ++i) {
+ uint64_t frameSize = lineLength * frameHeights[i];
+ frameDurations[i] = frameSize / (sensorInfo.pixelRate / 1000000U);
+ }
+
+ controls[&controls::FrameDurationLimits] =
+ ControlInfo(frameDurations[0],
+ frameDurations[1],
+ frameDurations[2]);
+
+ /*
+ * Compute the scaler crop limits.
+ *
+ * Initialize the control use the 'Viewfinder' configuration (1280x720)
+ * as the pipeline output resolution and the full sensor size as input
+ * frame (see the todo note in the validate() function about the usage
+ * of the sensor's full frame as ImgU input).
+ */
+
+ /*
+ * The maximum scaler crop rectangle is the analogue crop used to
+ * produce the maximum frame size.
+ */
+ const Rectangle &analogueCrop = sensorInfo.analogCrop;
+ Rectangle maxCrop = analogueCrop;
+
+ /*
+ * As the ImgU cannot up-scale, the minimum selection rectangle has to
+ * be as large as the pipeline output size. Use the default viewfinder
+ * configuration as the desired output size and calculate the minimum
+ * rectangle required to satisfy the ImgU processing margins, unless the
+ * sensor resolution is smaller.
+ *
+ * \todo This implementation is based on the same assumptions about the
+ * ImgU pipeline configuration described in then viewfinder and main
+ * output sizes calculation in the validate() function.
+ */
+
+ /* The strictly smaller size than the sensor resolution, aligned to margins. */
+ Size minSize = Size(sensor->resolution().width - 1,
+ sensor->resolution().height - 1)
+ .alignedDownTo(IMGU_OUTPUT_WIDTH_MARGIN,
+ IMGU_OUTPUT_HEIGHT_MARGIN);
+
+ /*
+ * Either the smallest margin-aligned size larger than the viewfinder
+ * size or the adjusted sensor resolution.
+ */
+ minSize = Size(IPU3ViewfinderSize.width + 1,
+ IPU3ViewfinderSize.height + 1)
+ .alignedUpTo(IMGU_OUTPUT_WIDTH_MARGIN,
+ IMGU_OUTPUT_HEIGHT_MARGIN)
+ .boundedTo(minSize);
+
+ /*
+ * Re-scale in the sensor's native coordinates. Report (0,0) as
+ * top-left corner as we allow application to freely pan the crop area.
+ */
+ Rectangle minCrop = Rectangle(minSize).scaledBy(analogueCrop.size(),
+ sensorInfo.outputSize);
+
+ controls[&controls::ScalerCrop] = ControlInfo(minCrop, maxCrop, maxCrop);
+
+ data->controlInfo_ = std::move(controls);
+
+ return 0;
+}
+
+/**
* \brief Initialise ImgU and CIO2 devices associated with cameras
*
* Initialise the two ImgU instances and create cameras with an associated
@@ -768,9 +1104,53 @@ int PipelineHandlerIPU3::registerCameras()
if (ret)
continue;
+ ret = data->loadIPA();
+ if (ret)
+ continue;
+
/* Initialize the camera properties. */
data->properties_ = cio2->sensor()->properties();
+ ret = initControls(data.get());
+ if (ret)
+ continue;
+
+ /*
+ * \todo Read delay values from the sensor itself or from a
+ * a sensor database. For now use generic values taken from
+ * the Raspberry Pi and listed as 'generic values'.
+ */
+ std::unordered_map<uint32_t, DelayedControls::ControlParams> params = {
+ { V4L2_CID_ANALOGUE_GAIN, { 1, false } },
+ { V4L2_CID_EXPOSURE, { 2, false } },
+ };
+
+ data->delayedCtrls_ =
+ std::make_unique<DelayedControls>(cio2->sensor()->device(),
+ params);
+ data->cio2_.frameStart().connect(data->delayedCtrls_.get(),
+ &DelayedControls::applyControls);
+
+ /* Convert the sensor rotation to a transformation */
+ int32_t rotation = 0;
+ if (data->properties_.contains(properties::Rotation))
+ rotation = data->properties_.get(properties::Rotation);
+ else
+ LOG(IPU3, Warning) << "Rotation control not exposed by "
+ << cio2->sensor()->id()
+ << ". Assume rotation 0";
+
+ bool success;
+ data->rotationTransform_ = transformFromRotation(rotation, &success);
+ if (!success)
+ LOG(IPU3, Warning) << "Invalid rotation of " << rotation
+ << " degrees: ignoring";
+
+ ControlList ctrls = cio2->sensor()->getControls({ V4L2_CID_HFLIP });
+ if (!ctrls.empty())
+ /* We assume the sensor supports VFLIP too. */
+ data->supportsFlips_ = true;
+
/**
* \todo Dynamically assign ImgU and output devices to each
* stream and camera; as of now, limit support to two cameras
@@ -789,12 +1169,18 @@ int PipelineHandlerIPU3::registerCameras()
*/
data->cio2_.bufferReady().connect(data.get(),
&IPU3CameraData::cio2BufferReady);
+ data->cio2_.bufferAvailable.connect(
+ data.get(), &IPU3CameraData::queuePendingRequests);
data->imgu_->input_->bufferReady.connect(&data->cio2_,
&CIO2Device::tryReturnBuffer);
data->imgu_->output_->bufferReady.connect(data.get(),
&IPU3CameraData::imguOutputBufferReady);
data->imgu_->viewfinder_->bufferReady.connect(data.get(),
&IPU3CameraData::imguOutputBufferReady);
+ data->imgu_->param_->bufferReady.connect(data.get(),
+ &IPU3CameraData::paramBufferReady);
+ data->imgu_->stat_->bufferReady.connect(data.get(),
+ &IPU3CameraData::statBufferReady);
/* Create and register the Camera instance. */
std::string cameraId = cio2->sensor()->id();
@@ -814,6 +1200,75 @@ int PipelineHandlerIPU3::registerCameras()
return numCameras ? 0 : -ENODEV;
}
+int IPU3CameraData::loadIPA()
+{
+ ipa_ = IPAManager::createIPA<ipa::ipu3::IPAProxyIPU3>(pipe_, 1, 1);
+ if (!ipa_)
+ return -ENOENT;
+
+ ipa_->queueFrameAction.connect(this, &IPU3CameraData::queueFrameAction);
+
+ CameraSensor *sensor = cio2_.sensor();
+ int ret = ipa_->init(IPASettings{ "", sensor->model() });
+ if (ret) {
+ LOG(IPU3, Error) << "Failed to initialise the IPU3 IPA";
+ return ret;
+ }
+
+ return 0;
+}
+
+void IPU3CameraData::queueFrameAction(unsigned int id,
+ const ipa::ipu3::IPU3Action &action)
+{
+ switch (action.op) {
+ case ipa::ipu3::ActionSetSensorControls: {
+ const ControlList &controls = action.controls;
+ delayedCtrls_->push(controls);
+ break;
+ }
+ case ipa::ipu3::ActionParamFilled: {
+ IPU3Frames::Info *info = frameInfos_.find(id);
+ if (!info)
+ break;
+
+ /* Queue all buffers from the request aimed for the ImgU. */
+ for (auto it : info->request->buffers()) {
+ const Stream *stream = it.first;
+ FrameBuffer *outbuffer = it.second;
+
+ if (stream == &outStream_)
+ imgu_->output_->queueBuffer(outbuffer);
+ else if (stream == &vfStream_)
+ imgu_->viewfinder_->queueBuffer(outbuffer);
+ }
+
+ imgu_->param_->queueBuffer(info->paramBuffer);
+ imgu_->stat_->queueBuffer(info->statBuffer);
+ imgu_->input_->queueBuffer(info->rawBuffer);
+
+ break;
+ }
+ case ipa::ipu3::ActionMetadataReady: {
+ IPU3Frames::Info *info = frameInfos_.find(id);
+ if (!info)
+ break;
+
+ Request *request = info->request;
+ request->metadata().merge(action.controls);
+
+ info->metadataProcessed = true;
+ if (frameInfos_.tryComplete(info))
+ pipe_->completeRequest(request);
+
+ break;
+ }
+ default:
+ LOG(IPU3, Error) << "Unknown action " << action.op;
+ break;
+ }
+}
+
/* -----------------------------------------------------------------------------
* Buffer Ready slots
*/
@@ -826,14 +1281,24 @@ int PipelineHandlerIPU3::registerCameras()
*/
void IPU3CameraData::imguOutputBufferReady(FrameBuffer *buffer)
{
- Request *request = buffer->request();
-
- if (!pipe_->completeBuffer(camera_, request, buffer))
- /* Request not completed yet, return here. */
+ IPU3Frames::Info *info = frameInfos_.find(buffer);
+ if (!info)
return;
- /* Mark the request as complete. */
- pipe_->completeRequest(camera_, request);
+ Request *request = info->request;
+
+ pipe_->completeBuffer(request, buffer);
+
+ request->metadata().set(controls::draft::PipelineDepth, 3);
+ /* \todo Move the ExposureTime control to the IPA. */
+ request->metadata().set(controls::ExposureTime, exposureTime_);
+ /* \todo Actually apply the scaler crop region to the ImgU. */
+ if (request->controls().contains(controls::ScalerCrop))
+ cropRegion_ = request->controls().get(controls::ScalerCrop);
+ request->metadata().set(controls::ScalerCrop, cropRegion_);
+
+ if (frameInfos_.tryComplete(info))
+ pipe_->completeRequest(request);
}
/**
@@ -845,27 +1310,93 @@ void IPU3CameraData::imguOutputBufferReady(FrameBuffer *buffer)
*/
void IPU3CameraData::cio2BufferReady(FrameBuffer *buffer)
{
- /* \todo Handle buffer failures when state is set to BufferError. */
- if (buffer->metadata().status == FrameMetadata::FrameCancelled)
+ IPU3Frames::Info *info = frameInfos_.find(buffer);
+ if (!info)
return;
- Request *request = buffer->request();
+ Request *request = info->request;
/*
- * If the request contains a buffer for the RAW stream only, complete it
- * now as there's no need for ImgU processing.
+ * Record the sensor's timestamp in the request metadata.
+ *
+ * \todo The sensor timestamp should be better estimated by connecting
+ * to the V4L2Device::frameStart signal.
*/
- if (request->findBuffer(&rawStream_)) {
- bool isComplete = pipe_->completeBuffer(camera_, request, buffer);
- if (isComplete) {
- pipe_->completeRequest(camera_, request);
- return;
+ request->metadata().set(controls::SensorTimestamp,
+ buffer->metadata().timestamp);
+
+ /* If the buffer is cancelled force a complete of the whole request. */
+ if (buffer->metadata().status == FrameMetadata::FrameCancelled) {
+ for (auto it : request->buffers()) {
+ FrameBuffer *b = it.second;
+ b->cancel();
+ pipe_->completeBuffer(request, b);
}
+
+ frameInfos_.remove(info);
+ pipe_->completeRequest(request);
+ return;
+ }
+
+ if (request->findBuffer(&rawStream_))
+ pipe_->completeBuffer(request, buffer);
+
+ ipa::ipu3::IPU3Event ev;
+ ev.op = ipa::ipu3::EventFillParams;
+ ev.frame = info->id;
+ ev.bufferId = info->paramBuffer->cookie();
+ ipa_->processEvent(ev);
+}
+
+void IPU3CameraData::paramBufferReady(FrameBuffer *buffer)
+{
+ IPU3Frames::Info *info = frameInfos_.find(buffer);
+ if (!info)
+ return;
+
+ info->paramDequeued = true;
+
+ /*
+ * tryComplete() will delete info if it completes the IPU3Frame.
+ * In that event, we must have obtained the Request before hand.
+ *
+ * \todo Improve the FrameInfo API to avoid this type of issue
+ */
+ Request *request = info->request;
+
+ if (frameInfos_.tryComplete(info))
+ pipe_->completeRequest(request);
+}
+
+void IPU3CameraData::statBufferReady(FrameBuffer *buffer)
+{
+ IPU3Frames::Info *info = frameInfos_.find(buffer);
+ if (!info)
+ return;
+
+ Request *request = info->request;
+
+ if (buffer->metadata().status == FrameMetadata::FrameCancelled) {
+ info->metadataProcessed = true;
+
+ /*
+ * tryComplete() will delete info if it completes the IPU3Frame.
+ * In that event, we must have obtained the Request before hand.
+ */
+ if (frameInfos_.tryComplete(info))
+ pipe_->completeRequest(request);
+
+ return;
}
- imgu_->input_->queueBuffer(buffer);
+ ipa::ipu3::IPU3Event ev;
+ ev.op = ipa::ipu3::EventStatReady;
+ ev.frame = info->id;
+ ev.bufferId = info->statBuffer->cookie();
+ ev.frameTimestamp = request->metadata().get(controls::SensorTimestamp);
+ ipa_->processEvent(ev);
}
-REGISTER_PIPELINE_HANDLER(PipelineHandlerIPU3);
+REGISTER_PIPELINE_HANDLER(PipelineHandlerIPU3)
} /* namespace libcamera */
diff --git a/src/libcamera/pipeline/ipu3/meson.build b/src/libcamera/pipeline/ipu3/meson.build
index d60e07ae..a1b0b31a 100644
--- a/src/libcamera/pipeline/ipu3/meson.build
+++ b/src/libcamera/pipeline/ipu3/meson.build
@@ -2,6 +2,7 @@
libcamera_sources += files([
'cio2.cpp',
+ 'frames.cpp',
'imgu.cpp',
'ipu3.cpp',
])
diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
index 46424493..30dc5b97 100644
--- a/src/libcamera/pipeline/meson.build
+++ b/src/libcamera/pipeline/meson.build
@@ -1,5 +1,5 @@
# SPDX-License-Identifier: CC0-1.0
-foreach pipeline : get_option('pipelines')
+foreach pipeline : pipelines
subdir(pipeline)
endforeach
diff --git a/src/libcamera/pipeline/raspberrypi/dma_heaps.cpp b/src/libcamera/pipeline/raspberrypi/dma_heaps.cpp
index 4d5dd6cb..573ea11d 100644
--- a/src/libcamera/pipeline/raspberrypi/dma_heaps.cpp
+++ b/src/libcamera/pipeline/raspberrypi/dma_heaps.cpp
@@ -14,7 +14,7 @@
#include <sys/ioctl.h>
#include <unistd.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
/*
* /dev/dma-heap/linux,cma is the dma-heap allocator, which allows dmaheap-cma
diff --git a/src/libcamera/pipeline/raspberrypi/meson.build b/src/libcamera/pipeline/raspberrypi/meson.build
index 7c5b6ff7..f1a2f5ee 100644
--- a/src/libcamera/pipeline/raspberrypi/meson.build
+++ b/src/libcamera/pipeline/raspberrypi/meson.build
@@ -4,5 +4,4 @@ libcamera_sources += files([
'dma_heaps.cpp',
'raspberrypi.cpp',
'rpi_stream.cpp',
- 'staggered_ctrl.cpp',
])
diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
index d4d04c0d..f821d8fe 100644
--- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
+++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
@@ -1,12 +1,13 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
- * Copyright (C) 2019-2020, Raspberry Pi (Trading) Ltd.
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Ltd.
*
* raspberrypi.cpp - Pipeline handler for Raspberry Pi devices
*/
#include <algorithm>
#include <assert.h>
#include <fcntl.h>
+#include <memory>
#include <mutex>
#include <queue>
#include <sys/mman.h>
@@ -17,25 +18,29 @@
#include <libcamera/file_descriptor.h>
#include <libcamera/formats.h>
#include <libcamera/ipa/raspberrypi.h>
+#include <libcamera/ipa/raspberrypi_ipa_interface.h>
+#include <libcamera/ipa/raspberrypi_ipa_proxy.h>
#include <libcamera/logging.h>
#include <libcamera/property_ids.h>
#include <libcamera/request.h>
+#include <libcamera/base/utils.h>
+
+#include <linux/bcm2835-isp.h>
#include <linux/videodev2.h>
#include "libcamera/internal/bayer_format.h"
#include "libcamera/internal/camera_sensor.h"
+#include "libcamera/internal/delayed_controls.h"
#include "libcamera/internal/device_enumerator.h"
+#include "libcamera/internal/framebuffer.h"
#include "libcamera/internal/ipa_manager.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/pipeline_handler.h"
-#include "libcamera/internal/utils.h"
-#include "libcamera/internal/v4l2_controls.h"
#include "libcamera/internal/v4l2_videodevice.h"
#include "dma_heaps.h"
#include "rpi_stream.h"
-#include "staggered_ctrl.h"
namespace libcamera {
@@ -73,7 +78,7 @@ V4L2DeviceFormat findBestMode(V4L2VideoDevice::Formats &formatsMap,
const Size &req)
{
double bestScore = std::numeric_limits<double>::max(), score;
- V4L2DeviceFormat bestMode = {};
+ V4L2DeviceFormat bestMode;
#define PENALTY_AR 1500.0
#define PENALTY_8BIT 2000.0
@@ -133,7 +138,7 @@ class RPiCameraData : public CameraData
{
public:
RPiCameraData(PipelineHandler *pipe)
- : CameraData(pipe), sensor_(nullptr), state_(State::Stopped),
+ : CameraData(pipe), state_(State::Stopped),
supportsFlips_(false), flipsAlterBayerOrder_(false),
dropFrameCount_(0), ispOutputCount_(0)
{
@@ -141,10 +146,15 @@ public:
void frameStarted(uint32_t sequence);
- int loadIPA();
+ int loadIPA(ipa::RPi::SensorConfig *sensorConfig);
int configureIPA(const CameraConfiguration *config);
- void queueFrameAction(unsigned int frame, const IPAOperationData &action);
+ void statsMetadataComplete(uint32_t bufferId, const ControlList &controls);
+ void runIsp(uint32_t bufferId);
+ void embeddedComplete(uint32_t bufferId);
+ void setIspControls(const ControlList &controls);
+ void setDelayedControls(const ControlList &controls);
+ void setSensorControls(ControlList &controls);
/* bufferComplete signal handlers. */
void unicamBufferDequeue(FrameBuffer *buffer);
@@ -155,8 +165,11 @@ public:
void handleStreamBuffer(FrameBuffer *buffer, RPi::Stream *stream);
void handleExternalBuffer(FrameBuffer *buffer, RPi::Stream *stream);
void handleState();
+ void applyScalerCrop(const ControlList &controls);
+
+ std::unique_ptr<ipa::RPi::IPAProxyRPi> ipa_;
- CameraSensor *sensor_;
+ std::unique_ptr<CameraSensor> sensor_;
/* Array of Unicam and ISP device streams and associated buffers/streams. */
RPi::Device<Unicam, 2> unicam_;
RPi::Device<Isp, 4> isp_;
@@ -169,8 +182,7 @@ public:
RPi::DmaHeap dmaHeap_;
FileDescriptor lsTable_;
- RPi::StaggeredCtrl staggeredCtrl_;
- uint32_t expectedSequence_;
+ std::unique_ptr<DelayedControls> delayedCtrls_;
bool sensorMetadata_;
/*
@@ -180,7 +192,13 @@ public:
*/
enum class State { Stopped, Idle, Busy, IpaComplete };
State state_;
- std::queue<FrameBuffer *> bayerQueue_;
+
+ struct BayerFrame {
+ FrameBuffer *buffer;
+ ControlList controls;
+ };
+
+ std::queue<BayerFrame> bayerQueue_;
std::queue<FrameBuffer *> embeddedQueue_;
std::deque<Request *> requestQueue_;
@@ -193,14 +211,20 @@ public:
bool flipsAlterBayerOrder_;
BayerFormat::Order nativeBayerOrder_;
+ /* For handling digital zoom. */
+ IPACameraSensorInfo sensorInfo_;
+ Rectangle ispCrop_; /* crop in ISP (camera mode) pixels */
+ Rectangle scalerCrop_; /* crop in sensor native pixels */
+ Size ispMinCropSize_;
+
unsigned int dropFrameCount_;
private:
void checkRequestCompleted();
+ void fillRequestMetadata(const ControlList &bufferControls,
+ Request *request);
void tryRunPipeline();
- void tryFlushQueues();
- FrameBuffer *updateQueue(std::queue<FrameBuffer *> &q, uint64_t timestamp,
- RPi::Stream *stream);
+ bool findMatchingBuffers(BayerFrame &bayerFrame, FrameBuffer *&embeddedBuffer);
unsigned int ispOutputCount_;
};
@@ -230,7 +254,7 @@ public:
int exportFrameBuffers(Camera *camera, Stream *stream,
std::vector<std::unique_ptr<FrameBuffer>> *buffers) override;
- int start(Camera *camera) override;
+ int start(Camera *camera, const ControlList *controls) override;
void stop(Camera *camera) override;
int queueRequestDevice(Camera *camera, Request *request) override;
@@ -344,7 +368,7 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
*/
V4L2PixelFormat fourcc = sensorFormat.fourcc;
if (data_->flipsAlterBayerOrder_) {
- BayerFormat bayer(fourcc);
+ BayerFormat bayer = BayerFormat::fromV4L2PixelFormat(fourcc);
bayer.order = data_->nativeBayerOrder_;
bayer = bayer.transform(combined);
fourcc = bayer.toV4L2PixelFormat();
@@ -422,7 +446,7 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
status = Adjusted;
}
- V4L2DeviceFormat format = {};
+ V4L2DeviceFormat format;
format.fourcc = dev->toV4L2PixelFormat(cfg.pixelFormat);
format.size = cfg.size;
@@ -481,8 +505,16 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
break;
case StreamRole::VideoRecording:
+ /*
+ * The colour denoise algorithm requires the analysis
+ * image, produced by the second ISP output, to be in
+ * YUV420 format. Select this format as the default, to
+ * maximize chances that it will be picked by
+ * applications and enable usage of the colour denoise
+ * algorithm.
+ */
fmts = data->isp_[Isp::Output0].dev()->formats();
- pixelFormat = formats::NV12;
+ pixelFormat = formats::YUV420;
size = { 1920, 1080 };
bufferCount = 4;
outCount++;
@@ -512,9 +544,9 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
/* Translate the V4L2PixelFormat to PixelFormat. */
std::map<PixelFormat, std::vector<SizeRange>> deviceFormats;
for (const auto &format : fmts) {
- PixelFormat pixelFormat = format.first.toPixelFormat();
- if (pixelFormat.isValid())
- deviceFormats[pixelFormat] = format.second;
+ PixelFormat pf = format.first.toPixelFormat();
+ if (pf.isValid())
+ deviceFormats[pf] = format.second;
}
/* Add the stream format based on the device node used for the use case. */
@@ -578,6 +610,13 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
if (ret)
return ret;
+ /*
+ * The control ranges associated with the sensor may need updating
+ * after a format change.
+ * \todo Use the CameraSensor::setFormat API instead.
+ */
+ data->sensor_->updateControlInfo();
+
LOG(RPI, Info) << "Sensor: " << camera->id()
<< " - Selected mode: " << sensorFormat.toString();
@@ -586,73 +625,108 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
* because of flips in the sensor.
*/
ret = data->isp_[Isp::Input].dev()->setFormat(&sensorFormat);
+ if (ret)
+ return ret;
/*
* See which streams are requested, and route the user
* StreamConfiguration appropriately.
*/
- V4L2DeviceFormat format = {};
+ V4L2DeviceFormat format;
+ bool output0Set = false, output1Set = false;
for (unsigned i = 0; i < config->size(); i++) {
StreamConfiguration &cfg = config->at(i);
if (isRaw(cfg.pixelFormat)) {
cfg.setStream(&data->unicam_[Unicam::Image]);
- /*
- * We must set both Unicam streams as external, even
- * though the application may only request RAW frames.
- * This is because we match timestamps on both streams
- * to synchronise buffers.
- */
data->unicam_[Unicam::Image].setExternal(true);
- data->unicam_[Unicam::Embedded].setExternal(true);
continue;
}
- if (i == maxIndex) {
- /* ISP main output format. */
- V4L2VideoDevice *dev = data->isp_[Isp::Output0].dev();
- V4L2PixelFormat fourcc = dev->toV4L2PixelFormat(cfg.pixelFormat);
- format.size = cfg.size;
- format.fourcc = fourcc;
+ /* The largest resolution gets routed to the ISP Output 0 node. */
+ RPi::Stream *stream = i == maxIndex ? &data->isp_[Isp::Output0]
+ : &data->isp_[Isp::Output1];
- ret = dev->setFormat(&format);
- if (ret)
- return -EINVAL;
+ V4L2PixelFormat fourcc = stream->dev()->toV4L2PixelFormat(cfg.pixelFormat);
+ format.size = cfg.size;
+ format.fourcc = fourcc;
- if (format.size != cfg.size || format.fourcc != fourcc) {
- LOG(RPI, Error)
- << "Failed to set format on ISP capture0 device: "
- << format.toString();
- return -EINVAL;
- }
+ LOG(RPI, Debug) << "Setting " << stream->name() << " to "
+ << format.toString();
- cfg.setStream(&data->isp_[Isp::Output0]);
- data->isp_[Isp::Output0].setExternal(true);
+ ret = stream->dev()->setFormat(&format);
+ if (ret)
+ return -EINVAL;
+
+ if (format.size != cfg.size || format.fourcc != fourcc) {
+ LOG(RPI, Error)
+ << "Failed to set requested format on " << stream->name()
+ << ", returned " << format.toString();
+ return -EINVAL;
}
- /*
- * ISP second output format. This fallthrough means that if a
- * second output stream has not been configured, we simply use
- * the Output0 configuration.
- */
- V4L2VideoDevice *dev = data->isp_[Isp::Output1].dev();
- format.fourcc = dev->toV4L2PixelFormat(cfg.pixelFormat);
- format.size = cfg.size;
+ cfg.setStream(stream);
+ stream->setExternal(true);
+
+ if (i != maxIndex)
+ output1Set = true;
+ else
+ output0Set = true;
+ }
- ret = dev->setFormat(&format);
+ /*
+ * If ISP::Output0 stream has not been configured by the application,
+ * we must allow the hardware to generate an output so that the data
+ * flow in the pipeline handler remains consistent, and we still generate
+ * statistics for the IPA to use. So enable the output at a very low
+ * resolution for internal use.
+ *
+ * \todo Allow the pipeline to work correctly without Output0 and only
+ * statistics coming from the hardware.
+ */
+ if (!output0Set) {
+ maxSize = Size(320, 240);
+ format = {};
+ format.size = maxSize;
+ format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420, false);
+ ret = data->isp_[Isp::Output0].dev()->setFormat(&format);
if (ret) {
LOG(RPI, Error)
- << "Failed to set format on ISP capture1 device: "
- << format.toString();
- return ret;
+ << "Failed to set default format on ISP Output0: "
+ << ret;
+ return -EINVAL;
}
- /*
- * If we have not yet provided a stream for this config, it
- * means this is to be routed from Output1.
- */
- if (!cfg.stream()) {
- cfg.setStream(&data->isp_[Isp::Output1]);
- data->isp_[Isp::Output1].setExternal(true);
+
+ LOG(RPI, Debug) << "Defaulting ISP Output0 format to "
+ << format.toString();
+ }
+
+ /*
+ * If ISP::Output1 stream has not been requested by the application, we
+ * set it up for internal use now. This second stream will be used for
+ * fast colour denoise, and must be a quarter resolution of the ISP::Output0
+ * stream. However, also limit the maximum size to 1200 pixels in the
+ * larger dimension, just to avoid being wasteful with buffer allocations
+ * and memory bandwidth.
+ *
+ * \todo If Output 1 format is not YUV420, Output 1 ought to be disabled as
+ * colour denoise will not run.
+ */
+ if (!output1Set) {
+ V4L2DeviceFormat output1Format = format;
+ constexpr Size maxDimensions(1200, 1200);
+ const Size limit = maxDimensions.boundedToAspectRatio(format.size);
+
+ output1Format.size = (format.size / 2).boundedTo(limit).alignedDownTo(2, 2);
+
+ LOG(RPI, Debug) << "Setting ISP Output1 (internal) to "
+ << output1Format.toString();
+
+ ret = data->isp_[Isp::Output1].dev()->setFormat(&output1Format);
+ if (ret) {
+ LOG(RPI, Error) << "Failed to set format on ISP Output1: "
+ << ret;
+ return -EINVAL;
}
}
@@ -666,37 +740,57 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
return ret;
}
- /* Unicam embedded data output format. */
- format = {};
- format.fourcc = V4L2PixelFormat(V4L2_META_FMT_SENSOR_DATA);
- LOG(RPI, Debug) << "Setting embedded data format.";
- ret = data->unicam_[Unicam::Embedded].dev()->setFormat(&format);
- if (ret) {
- LOG(RPI, Error) << "Failed to set format on Unicam embedded: "
- << format.toString();
- return ret;
- }
+ /* Figure out the smallest selection the ISP will allow. */
+ Rectangle testCrop(0, 0, 1, 1);
+ data->isp_[Isp::Input].dev()->setSelection(V4L2_SEL_TGT_CROP, &testCrop);
+ data->ispMinCropSize_ = testCrop.size();
/* Adjust aspect ratio by providing crops on the input image. */
- Rectangle crop{ 0, 0, sensorFormat.size };
-
- int ar = maxSize.height * sensorFormat.size.width - maxSize.width * sensorFormat.size.height;
- if (ar > 0)
- crop.width = maxSize.width * sensorFormat.size.height / maxSize.height;
- else if (ar < 0)
- crop.height = maxSize.height * sensorFormat.size.width / maxSize.width;
-
- crop.width &= ~1;
- crop.height &= ~1;
+ Size size = sensorFormat.size.boundedToAspectRatio(maxSize);
+ Rectangle crop = size.centeredTo(Rectangle(sensorFormat.size).center());
+ data->ispCrop_ = crop;
- crop.x = (sensorFormat.size.width - crop.width) >> 1;
- crop.y = (sensorFormat.size.height - crop.height) >> 1;
data->isp_[Isp::Input].dev()->setSelection(V4L2_SEL_TGT_CROP, &crop);
ret = data->configureIPA(config);
if (ret)
LOG(RPI, Error) << "Failed to configure the IPA: " << ret;
+ /*
+ * Configure the Unicam embedded data output format only if the sensor
+ * supports it.
+ */
+ if (data->sensorMetadata_) {
+ format = {};
+ format.fourcc = V4L2PixelFormat(V4L2_META_FMT_SENSOR_DATA);
+
+ LOG(RPI, Debug) << "Setting embedded data format.";
+ ret = data->unicam_[Unicam::Embedded].dev()->setFormat(&format);
+ if (ret) {
+ LOG(RPI, Error) << "Failed to set format on Unicam embedded: "
+ << format.toString();
+ return ret;
+ }
+
+ /*
+ * If a RAW/Bayer stream has been requested by the application,
+ * we must set both Unicam streams as external, even though the
+ * application may only request RAW frames. This is because we
+ * match timestamps on both streams to synchronise buffers.
+ */
+ if (rawStream)
+ data->unicam_[Unicam::Embedded].setExternal(true);
+ }
+
+ /*
+ * Update the ScalerCropMaximum to the correct value for this camera mode.
+ * For us, it's the same as the "analogue crop".
+ *
+ * \todo Make this property the ScalerCrop maximum value when dynamic
+ * controls are available and set it at validate() time
+ */
+ data->properties_.set(properties::ScalerCropMaximum, data->sensorInfo_.analogCrop);
+
return ret;
}
@@ -712,7 +806,7 @@ int PipelineHandlerRPi::exportFrameBuffers([[maybe_unused]] Camera *camera, Stre
return ret;
}
-int PipelineHandlerRPi::start(Camera *camera)
+int PipelineHandlerRPi::start(Camera *camera, const ControlList *controls)
{
RPiCameraData *data = cameraData(camera);
int ret;
@@ -725,18 +819,25 @@ int PipelineHandlerRPi::start(Camera *camera)
return ret;
}
- ret = queueAllBuffers(camera);
- if (ret) {
- LOG(RPI, Error) << "Failed to queue buffers";
- stop(camera);
- return ret;
- }
+ /* Check if a ScalerCrop control was specified. */
+ if (controls)
+ data->applyScalerCrop(*controls);
/* Start the IPA. */
- ret = data->ipa_->start();
+ ipa::RPi::StartConfig startConfig;
+ data->ipa_->start(controls ? *controls : ControlList{}, &startConfig);
+
+ /* Apply any gain/exposure settings that the IPA may have passed back. */
+ if (!startConfig.controls.empty())
+ data->setSensorControls(startConfig.controls);
+
+ /* Configure the number of dropped frames required on startup. */
+ data->dropFrameCount_ = startConfig.dropFrameCount;
+
+ /* We need to set the dropFrameCount_ before queueing buffers. */
+ ret = queueAllBuffers(camera);
if (ret) {
- LOG(RPI, Error)
- << "Failed to start IPA for " << camera->id();
+ LOG(RPI, Error) << "Failed to queue buffers";
stop(camera);
return ret;
}
@@ -757,14 +858,10 @@ int PipelineHandlerRPi::start(Camera *camera)
data->unicam_[Unicam::Image].dev()->setFrameStartEnabled(true);
/*
- * Write the last set of gain and exposure values to the camera before
- * starting. First check that the staggered ctrl has been initialised
- * by configure().
+ * Reset the delayed controls with the gain and exposure values set by
+ * the IPA.
*/
- ASSERT(data->staggeredCtrl_);
- data->staggeredCtrl_.reset();
- data->staggeredCtrl_.write();
- data->expectedSequence_ = 0;
+ data->delayedCtrls_->reset();
data->state_ = RPiCameraData::State::Idle;
@@ -789,7 +886,9 @@ void PipelineHandlerRPi::stop(Camera *camera)
/* Disable SOF event generation. */
data->unicam_[Unicam::Image].dev()->setFrameStartEnabled(false);
- /* This also stops the streams. */
+ for (auto const stream : data->streams_)
+ stream->dev()->streamOff();
+
data->clearIncompleteRequests();
data->bayerQueue_ = {};
data->embeddedQueue_ = {};
@@ -881,18 +980,6 @@ bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)
data->isp_[Isp::Output1] = RPi::Stream("ISP Output1", isp_->getEntityByName("bcm2835-isp0-capture2"));
data->isp_[Isp::Stats] = RPi::Stream("ISP Stats", isp_->getEntityByName("bcm2835-isp0-capture3"));
- /* This is just for convenience so that we can easily iterate over all streams. */
- for (auto &stream : data->unicam_)
- data->streams_.push_back(&stream);
- for (auto &stream : data->isp_)
- data->streams_.push_back(&stream);
-
- /* Open all Unicam and ISP streams. */
- for (auto const stream : data->streams_) {
- if (stream->dev()->open())
- return false;
- }
-
/* Wire up all the buffer connections. */
data->unicam_[Unicam::Image].dev()->frameStart.connect(data.get(), &RPiCameraData::frameStarted);
data->unicam_[Unicam::Image].dev()->bufferReady.connect(data.get(), &RPiCameraData::unicamBufferDequeue);
@@ -905,7 +992,7 @@ bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)
/* Identify the sensor. */
for (MediaEntity *entity : unicam_->entities()) {
if (entity->function() == MEDIA_ENT_F_CAM_SENSOR) {
- data->sensor_ = new CameraSensor(entity);
+ data->sensor_ = std::make_unique<CameraSensor>(entity);
break;
}
}
@@ -916,17 +1003,57 @@ bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)
if (data->sensor_->init())
return false;
- if (data->loadIPA()) {
+ ipa::RPi::SensorConfig sensorConfig;
+ if (data->loadIPA(&sensorConfig)) {
LOG(RPI, Error) << "Failed to load a suitable IPA library";
return false;
}
+ /*
+ * Open all Unicam and ISP streams. The exception is the embedded data
+ * stream, which only gets opened below if the IPA reports that the sensor
+ * supports embedded data.
+ *
+ * The below grouping is just for convenience so that we can easily
+ * iterate over all streams in one go.
+ */
+ data->streams_.push_back(&data->unicam_[Unicam::Image]);
+ if (sensorConfig.sensorMetadata)
+ data->streams_.push_back(&data->unicam_[Unicam::Embedded]);
+
+ for (auto &stream : data->isp_)
+ data->streams_.push_back(&stream);
+
+ for (auto stream : data->streams_) {
+ if (stream->dev()->open())
+ return false;
+ }
+
+ /*
+ * Setup our delayed control writer with the sensor default
+ * gain and exposure delays. Mark VBLANK for priority write.
+ */
+ std::unordered_map<uint32_t, DelayedControls::ControlParams> params = {
+ { V4L2_CID_ANALOGUE_GAIN, { sensorConfig.gainDelay, false } },
+ { V4L2_CID_EXPOSURE, { sensorConfig.exposureDelay, false } },
+ { V4L2_CID_VBLANK, { sensorConfig.vblankDelay, true } }
+ };
+ data->delayedCtrls_ = std::make_unique<DelayedControls>(data->unicam_[Unicam::Image].dev(), params);
+ data->sensorMetadata_ = sensorConfig.sensorMetadata;
+
/* Register the controls that the Raspberry Pi IPA can handle. */
data->controlInfo_ = RPi::Controls;
/* Initialize the camera properties. */
data->properties_ = data->sensor_->properties();
/*
+ * Set a default value for the ScalerCropMaximum property to show
+ * that we support its use, however, initialise it to zero because
+ * it's not meaningful until a camera mode has been chosen.
+ */
+ data->properties_.set(properties::ScalerCropMaximum, Rectangle{});
+
+ /*
* We cache three things about the sensor in relation to transforms
* (meaning horizontal and vertical flips).
*
@@ -949,14 +1076,14 @@ bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)
ControlList ctrls(dev->controls());
ctrls.set(V4L2_CID_HFLIP, 0);
ctrls.set(V4L2_CID_VFLIP, 0);
- dev->setControls(&ctrls);
+ data->setSensorControls(ctrls);
}
/* Look for a valid Bayer format. */
BayerFormat bayerFormat;
for (const auto &iter : dev->formats()) {
V4L2PixelFormat v4l2Format = iter.first;
- bayerFormat = BayerFormat(v4l2Format);
+ bayerFormat = BayerFormat::fromV4L2PixelFormat(v4l2Format);
if (bayerFormat.isValid())
break;
}
@@ -1008,7 +1135,7 @@ int PipelineHandlerRPi::queueAllBuffers(Camera *camera)
*/
unsigned int i;
for (i = 0; i < data->dropFrameCount_; i++) {
- int ret = stream->queueBuffer(nullptr);
+ ret = stream->queueBuffer(nullptr);
if (ret)
return ret;
}
@@ -1025,7 +1152,7 @@ int PipelineHandlerRPi::prepareBuffers(Camera *camera)
/*
* Decide how many internal buffers to allocate. For now, simply look
- * at how many external buffers will be provided. Will need to improve
+ * at how many external buffers will be provided. We'll need to improve
* this logic. However, we really must have all streams allocate the same
* number of buffers to simplify error handling in queueRequestDevice().
*/
@@ -1044,8 +1171,10 @@ int PipelineHandlerRPi::prepareBuffers(Camera *camera)
* Pass the stats and embedded data buffers to the IPA. No other
* buffers need to be passed.
*/
- mapBuffers(camera, data->isp_[Isp::Stats].getBuffers(), RPi::BufferMask::STATS);
- mapBuffers(camera, data->unicam_[Unicam::Embedded].getBuffers(), RPi::BufferMask::EMBEDDED_DATA);
+ mapBuffers(camera, data->isp_[Isp::Stats].getBuffers(), ipa::RPi::MaskStats);
+ if (data->sensorMetadata_)
+ mapBuffers(camera, data->unicam_[Unicam::Embedded].getBuffers(),
+ ipa::RPi::MaskEmbeddedData);
return 0;
}
@@ -1062,8 +1191,8 @@ void PipelineHandlerRPi::mapBuffers(Camera *camera, const RPi::BufferMap &buffer
* handler and the IPA.
*/
for (auto const &it : buffers) {
- ipaBuffers.push_back({ .id = mask | it.first,
- .planes = it.second->planes() });
+ ipaBuffers.push_back(IPABuffer(mask | it.first,
+ it.second->planes()));
data->ipaBuffers_.insert(mask | it.first);
}
@@ -1088,22 +1217,36 @@ void RPiCameraData::frameStarted(uint32_t sequence)
LOG(RPI, Debug) << "frame start " << sequence;
/* Write any controls for the next frame as soon as we can. */
- staggeredCtrl_.write();
+ delayedCtrls_->applyControls(sequence);
}
-int RPiCameraData::loadIPA()
+int RPiCameraData::loadIPA(ipa::RPi::SensorConfig *sensorConfig)
{
- ipa_ = IPAManager::createIPA(pipe_, 1, 1);
+ ipa_ = IPAManager::createIPA<ipa::RPi::IPAProxyRPi>(pipe_, 1, 1);
+
if (!ipa_)
return -ENOENT;
- ipa_->queueFrameAction.connect(this, &RPiCameraData::queueFrameAction);
+ ipa_->statsMetadataComplete.connect(this, &RPiCameraData::statsMetadataComplete);
+ ipa_->runIsp.connect(this, &RPiCameraData::runIsp);
+ ipa_->embeddedComplete.connect(this, &RPiCameraData::embeddedComplete);
+ ipa_->setIspControls.connect(this, &RPiCameraData::setIspControls);
+ ipa_->setDelayedControls.connect(this, &RPiCameraData::setDelayedControls);
- IPASettings settings{
- .configurationFile = ipa_->configurationFile(sensor_->model() + ".json")
- };
+ /*
+ * The configuration (tuning file) is made from the sensor name unless
+ * the environment variable overrides it.
+ */
+ std::string configurationFile;
+ char const *configFromEnv = utils::secure_getenv("LIBCAMERA_RPI_TUNING_FILE");
+ if (!configFromEnv || *configFromEnv == '\0')
+ configurationFile = ipa_->configurationFile(sensor_->model() + ".json");
+ else
+ configurationFile = std::string(configFromEnv);
- return ipa_->init(settings);
+ IPASettings settings(configurationFile, sensor_->model());
+
+ return ipa_->init(settings, sensorConfig);
}
int RPiCameraData::configureIPA(const CameraConfiguration *config)
@@ -1113,20 +1256,16 @@ int RPiCameraData::configureIPA(const CameraConfiguration *config)
static_cast<const RPiCameraConfiguration *>(config);
std::map<unsigned int, IPAStream> streamConfig;
- std::map<unsigned int, const ControlInfoMap &> entityControls;
- IPAOperationData ipaConfig = {};
+ std::map<unsigned int, ControlInfoMap> entityControls;
+ ipa::RPi::IPAConfig ipaConfig;
- /* Get the device format to pass to the IPA. */
- V4L2DeviceFormat sensorFormat;
- unicam_[Unicam::Image].dev()->getFormat(&sensorFormat);
/* Inform IPA of stream configuration and sensor controls. */
unsigned int i = 0;
for (auto const &stream : isp_) {
if (stream.isExternal()) {
- streamConfig[i++] = {
- .pixelFormat = stream.configuration().pixelFormat,
- .size = stream.configuration().size
- };
+ streamConfig[i++] = IPAStream(
+ stream.configuration().pixelFormat,
+ stream.configuration().size);
}
}
@@ -1134,142 +1273,143 @@ int RPiCameraData::configureIPA(const CameraConfiguration *config)
entityControls.emplace(1, isp_[Isp::Input].dev()->controls());
/* Always send the user transform to the IPA. */
- ipaConfig.data = { static_cast<unsigned int>(config->transform) };
+ ipaConfig.transform = static_cast<unsigned int>(config->transform);
/* Allocate the lens shading table via dmaHeap and pass to the IPA. */
if (!lsTable_.isValid()) {
- lsTable_ = dmaHeap_.alloc("ls_grid", RPi::MaxLsGridSize);
+ lsTable_ = dmaHeap_.alloc("ls_grid", ipa::RPi::MaxLsGridSize);
if (!lsTable_.isValid())
return -ENOMEM;
/* Allow the IPA to mmap the LS table via the file descriptor. */
- ipaConfig.operation = RPi::IPA_CONFIG_LS_TABLE;
- ipaConfig.data.push_back(static_cast<unsigned int>(lsTable_.fd()));
+ /*
+ * \todo Investigate if mapping the lens shading table buffer
+ * could be handled with mapBuffers().
+ */
+ ipaConfig.lsTableHandle = lsTable_;
}
- CameraSensorInfo sensorInfo = {};
- int ret = sensor_->sensorInfo(&sensorInfo);
+ /* We store the IPACameraSensorInfo for digital zoom calculations. */
+ int ret = sensor_->sensorInfo(&sensorInfo_);
if (ret) {
LOG(RPI, Error) << "Failed to retrieve camera sensor info";
return ret;
}
/* Ready the IPA - it must know about the sensor resolution. */
- IPAOperationData result;
-
- ipa_->configure(sensorInfo, streamConfig, entityControls, ipaConfig,
- &result);
-
- unsigned int resultIdx = 0;
- if (result.operation & RPi::IPA_CONFIG_STAGGERED_WRITE) {
- /*
- * Setup our staggered control writer with the sensor default
- * gain and exposure delays.
- */
- if (!staggeredCtrl_) {
- staggeredCtrl_.init(unicam_[Unicam::Image].dev(),
- { { V4L2_CID_ANALOGUE_GAIN, result.data[resultIdx++] },
- { V4L2_CID_EXPOSURE, result.data[resultIdx++] } });
- sensorMetadata_ = result.data[resultIdx++];
- }
-
- /*
- * Configure the H/V flip controls based on the combination of
- * the sensor and user transform.
- */
- if (supportsFlips_) {
- ControlList ctrls(unicam_[Unicam::Image].dev()->controls());
- ctrls.set(V4L2_CID_HFLIP,
- static_cast<int32_t>(!!(rpiConfig->combinedTransform_ & Transform::HFlip)));
- ctrls.set(V4L2_CID_VFLIP,
- static_cast<int32_t>(!!(rpiConfig->combinedTransform_ & Transform::VFlip)));
- unicam_[Unicam::Image].dev()->setControls(&ctrls);
- }
+ ControlList controls;
+ ret = ipa_->configure(sensorInfo_, streamConfig, entityControls, ipaConfig,
+ &controls);
+ if (ret < 0) {
+ LOG(RPI, Error) << "IPA configuration failed!";
+ return -EPIPE;
}
- if (result.operation & RPi::IPA_CONFIG_SENSOR) {
- const ControlList &ctrls = result.controls[0];
- if (!staggeredCtrl_.set(ctrls))
- LOG(RPI, Error) << "V4L2 staggered set failed";
+ /*
+ * Configure the H/V flip controls based on the combination of
+ * the sensor and user transform.
+ */
+ if (supportsFlips_) {
+ controls.set(V4L2_CID_HFLIP,
+ static_cast<int32_t>(!!(rpiConfig->combinedTransform_ & Transform::HFlip)));
+ controls.set(V4L2_CID_VFLIP,
+ static_cast<int32_t>(!!(rpiConfig->combinedTransform_ & Transform::VFlip)));
}
- if (result.operation & RPi::IPA_CONFIG_DROP_FRAMES) {
- /* Configure the number of dropped frames required on startup. */
- dropFrameCount_ = result.data[resultIdx++];
- }
+ if (!controls.empty())
+ setSensorControls(controls);
return 0;
}
-void RPiCameraData::queueFrameAction([[maybe_unused]] unsigned int frame,
- const IPAOperationData &action)
+void RPiCameraData::statsMetadataComplete(uint32_t bufferId, const ControlList &controls)
{
- /*
- * The following actions can be handled when the pipeline handler is in
- * a stopped state.
- */
- switch (action.operation) {
- case RPi::IPA_ACTION_V4L2_SET_STAGGERED: {
- const ControlList &controls = action.controls[0];
- if (!staggeredCtrl_.set(controls))
- LOG(RPI, Error) << "V4L2 staggered set failed";
- goto done;
- }
+ if (state_ == State::Stopped)
+ return;
- case RPi::IPA_ACTION_V4L2_SET_ISP: {
- ControlList controls = action.controls[0];
- isp_[Isp::Input].dev()->setControls(&controls);
- goto done;
- }
- }
+ FrameBuffer *buffer = isp_[Isp::Stats].getBuffers().at(bufferId);
+
+ handleStreamBuffer(buffer, &isp_[Isp::Stats]);
+
+ /* Add to the Request metadata buffer what the IPA has provided. */
+ Request *request = requestQueue_.front();
+ request->metadata().merge(controls);
+ state_ = State::IpaComplete;
+ handleState();
+}
+
+void RPiCameraData::runIsp(uint32_t bufferId)
+{
if (state_ == State::Stopped)
- goto done;
+ return;
- /*
- * The following actions must not be handled when the pipeline handler
- * is in a stopped state.
- */
- switch (action.operation) {
- case RPi::IPA_ACTION_STATS_METADATA_COMPLETE: {
- unsigned int bufferId = action.data[0];
- FrameBuffer *buffer = isp_[Isp::Stats].getBuffers().at(bufferId);
-
- handleStreamBuffer(buffer, &isp_[Isp::Stats]);
- /* Fill the Request metadata buffer with what the IPA has provided */
- requestQueue_.front()->metadata() = std::move(action.controls[0]);
- state_ = State::IpaComplete;
- break;
- }
+ FrameBuffer *buffer = unicam_[Unicam::Image].getBuffers().at(bufferId);
- case RPi::IPA_ACTION_EMBEDDED_COMPLETE: {
- unsigned int bufferId = action.data[0];
- FrameBuffer *buffer = unicam_[Unicam::Embedded].getBuffers().at(bufferId);
- handleStreamBuffer(buffer, &unicam_[Unicam::Embedded]);
- break;
- }
+ LOG(RPI, Debug) << "Input re-queue to ISP, buffer id " << bufferId
+ << ", timestamp: " << buffer->metadata().timestamp;
- case RPi::IPA_ACTION_RUN_ISP: {
- unsigned int bufferId = action.data[0];
- FrameBuffer *buffer = unicam_[Unicam::Image].getBuffers().at(bufferId);
+ isp_[Isp::Input].queueBuffer(buffer);
+ ispOutputCount_ = 0;
+ handleState();
+}
- LOG(RPI, Debug) << "Input re-queue to ISP, buffer id " << bufferId
- << ", timestamp: " << buffer->metadata().timestamp;
+void RPiCameraData::embeddedComplete(uint32_t bufferId)
+{
+ if (state_ == State::Stopped)
+ return;
- isp_[Isp::Input].queueBuffer(buffer);
- ispOutputCount_ = 0;
- break;
- }
+ FrameBuffer *buffer = unicam_[Unicam::Embedded].getBuffers().at(bufferId);
+ handleStreamBuffer(buffer, &unicam_[Unicam::Embedded]);
+ handleState();
+}
- default:
- LOG(RPI, Error) << "Unknown action " << action.operation;
- break;
+void RPiCameraData::setIspControls(const ControlList &controls)
+{
+ ControlList ctrls = controls;
+
+ if (ctrls.contains(V4L2_CID_USER_BCM2835_ISP_LENS_SHADING)) {
+ ControlValue &value =
+ const_cast<ControlValue &>(ctrls.get(V4L2_CID_USER_BCM2835_ISP_LENS_SHADING));
+ Span<uint8_t> s = value.data();
+ bcm2835_isp_lens_shading *ls =
+ reinterpret_cast<bcm2835_isp_lens_shading *>(s.data());
+ ls->dmabuf = lsTable_.fd();
}
-done:
+ isp_[Isp::Input].dev()->setControls(&ctrls);
+ handleState();
+}
+
+void RPiCameraData::setDelayedControls(const ControlList &controls)
+{
+ if (!delayedCtrls_->push(controls))
+ LOG(RPI, Error) << "V4L2 DelayedControl set failed";
handleState();
}
+void RPiCameraData::setSensorControls(ControlList &controls)
+{
+ /*
+ * We need to ensure that if both VBLANK and EXPOSURE are present, the
+ * former must be written ahead of, and separately from EXPOSURE to avoid
+ * V4L2 rejecting the latter. This is identical to what DelayedControls
+ * does with the priority write flag.
+ *
+ * As a consequence of the below logic, VBLANK gets set twice, and we
+ * rely on the v4l2 framework to not pass the second control set to the
+ * driver as the actual control value has not changed.
+ */
+ if (controls.contains(V4L2_CID_EXPOSURE) && controls.contains(V4L2_CID_VBLANK)) {
+ ControlList vblank_ctrl;
+
+ vblank_ctrl.set(V4L2_CID_VBLANK, controls.get(V4L2_CID_VBLANK));
+ unicam_[Unicam::Image].dev()->setControls(&vblank_ctrl);
+ }
+
+ unicam_[Unicam::Image].dev()->setControls(&controls);
+}
+
void RPiCameraData::unicamBufferDequeue(FrameBuffer *buffer)
{
RPi::Stream *stream = nullptr;
@@ -1294,31 +1434,19 @@ void RPiCameraData::unicamBufferDequeue(FrameBuffer *buffer)
<< ", timestamp: " << buffer->metadata().timestamp;
if (stream == &unicam_[Unicam::Image]) {
- bayerQueue_.push(buffer);
- } else {
- embeddedQueue_.push(buffer);
-
- std::unordered_map<uint32_t, int32_t> ctrl;
- int offset = buffer->metadata().sequence - expectedSequence_;
- staggeredCtrl_.get(ctrl, offset);
-
- expectedSequence_ = buffer->metadata().sequence + 1;
-
/*
- * Sensor metadata is unavailable, so put the expected ctrl
- * values (accounting for the staggered delays) into the empty
- * metadata buffer.
+ * Lookup the sensor controls used for this frame sequence from
+ * DelayedControl and queue them along with the frame buffer.
*/
- if (!sensorMetadata_) {
- const FrameBuffer &fb = buffer->planes();
- uint32_t *mem = static_cast<uint32_t *>(::mmap(nullptr, fb.planes()[0].length,
- PROT_READ | PROT_WRITE,
- MAP_SHARED,
- fb.planes()[0].fd.fd(), 0));
- mem[0] = ctrl[V4L2_CID_EXPOSURE];
- mem[1] = ctrl[V4L2_CID_ANALOGUE_GAIN];
- munmap(mem, fb.planes()[0].length);
- }
+ ControlList ctrl = delayedCtrls_->get(buffer->metadata().sequence);
+ /*
+ * Add the frame timestamp to the ControlList for the IPA to use
+ * as it does not receive the FrameBuffer object.
+ */
+ ctrl.set(controls::SensorTimestamp, buffer->metadata().timestamp);
+ bayerQueue_.push({ buffer, std::move(ctrl) });
+ } else {
+ embeddedQueue_.push(buffer);
}
handleState();
@@ -1366,10 +1494,7 @@ void RPiCameraData::ispOutputDequeue(FrameBuffer *buffer)
* application until after the IPA signals so.
*/
if (stream == &isp_[Isp::Stats]) {
- IPAOperationData op;
- op.operation = RPi::IPA_EVENT_SIGNAL_STAT_READY;
- op.data = { RPi::BufferMask::STATS | static_cast<unsigned int>(index) };
- ipa_->processEvent(op);
+ ipa_->signalStatReady(ipa::RPi::MaskStats | static_cast<unsigned int>(index));
} else {
/* Any other ISP output can be handed back to the application now. */
handleStreamBuffer(buffer, stream);
@@ -1387,51 +1512,19 @@ void RPiCameraData::ispOutputDequeue(FrameBuffer *buffer)
void RPiCameraData::clearIncompleteRequests()
{
/*
- * Queue up any buffers passed in the request.
- * This is needed because streamOff() will then mark the buffers as
- * cancelled.
- */
- for (auto const request : requestQueue_) {
- for (auto const stream : streams_) {
- if (!stream->isExternal())
- continue;
-
- FrameBuffer *buffer = request->findBuffer(stream);
- if (buffer)
- stream->queueBuffer(buffer);
- }
- }
-
- /* Stop all streams. */
- for (auto const stream : streams_)
- stream->dev()->streamOff();
-
- /*
* All outstanding requests (and associated buffers) must be returned
- * back to the pipeline. The buffers would have been marked as
- * cancelled by the call to streamOff() earlier.
+ * back to the application.
*/
while (!requestQueue_.empty()) {
Request *request = requestQueue_.front();
- /*
- * A request could be partially complete,
- * i.e. we have returned some buffers, but still waiting
- * for others or waiting for metadata.
- */
- for (auto const stream : streams_) {
- if (!stream->isExternal())
- continue;
- FrameBuffer *buffer = request->findBuffer(stream);
- /*
- * Has the buffer already been handed back to the
- * request? If not, do so now.
- */
- if (buffer && buffer->request())
- pipe_->completeBuffer(camera_, request, buffer);
+ for (auto &b : request->buffers()) {
+ FrameBuffer *buffer = b.second;
+ buffer->cancel();
+ pipe_->completeBuffer(request, buffer);
}
- pipe_->completeRequest(camera_, request);
+ pipe_->completeRequest(request);
requestQueue_.pop_front();
}
}
@@ -1455,7 +1548,7 @@ void RPiCameraData::handleStreamBuffer(FrameBuffer *buffer, RPi::Stream *stream)
* Tag the buffer as completed, returning it to the
* application.
*/
- pipe_->completeBuffer(camera_, request, buffer);
+ pipe_->completeBuffer(request, buffer);
} else {
/*
* This buffer was not part of the Request, or there is no
@@ -1473,7 +1566,7 @@ void RPiCameraData::handleExternalBuffer(FrameBuffer *buffer, RPi::Stream *strea
{
unsigned int id = stream->getBufferId(buffer);
- if (!(id & RPi::BufferMask::EXTERNAL_BUFFER))
+ if (!(id & ipa::RPi::MaskExternalBuffer))
return;
/* Stop the Stream object from tracking the buffer. */
@@ -1494,11 +1587,10 @@ void RPiCameraData::handleState()
* No break here, we want to try running the pipeline again.
* The fallthrough clause below suppresses compiler warnings.
*/
- /* Fall through */
+ [[fallthrough]];
case State::Idle:
tryRunPipeline();
- tryFlushQueues();
break;
}
}
@@ -1519,7 +1611,7 @@ void RPiCameraData::checkRequestCompleted()
if (state_ != State::IpaComplete)
return;
- pipe_->completeRequest(camera_, request);
+ pipe_->completeRequest(request);
requestQueue_.pop_front();
requestCompleted = true;
}
@@ -1539,142 +1631,233 @@ void RPiCameraData::checkRequestCompleted()
}
}
-void RPiCameraData::tryRunPipeline()
+void RPiCameraData::applyScalerCrop(const ControlList &controls)
{
- FrameBuffer *bayerBuffer, *embeddedBuffer;
- IPAOperationData op;
-
- /* If any of our request or buffer queues are empty, we cannot proceed. */
- if (state_ != State::Idle || requestQueue_.empty() ||
- bayerQueue_.empty() || embeddedQueue_.empty())
- return;
+ if (controls.contains(controls::ScalerCrop)) {
+ Rectangle nativeCrop = controls.get<Rectangle>(controls::ScalerCrop);
- /* Start with the front of the bayer buffer queue. */
- bayerBuffer = bayerQueue_.front();
+ if (!nativeCrop.width || !nativeCrop.height)
+ nativeCrop = { 0, 0, 1, 1 };
- /*
- * Find the embedded data buffer with a matching timestamp to pass to
- * the IPA. Any embedded buffers with a timestamp lower than the
- * current bayer buffer will be removed and re-queued to the driver.
- */
- embeddedBuffer = updateQueue(embeddedQueue_, bayerBuffer->metadata().timestamp,
- &unicam_[Unicam::Embedded]);
-
- if (!embeddedBuffer) {
- LOG(RPI, Debug) << "Could not find matching embedded buffer";
+ /* Create a version of the crop scaled to ISP (camera mode) pixels. */
+ Rectangle ispCrop = nativeCrop.translatedBy(-sensorInfo_.analogCrop.topLeft());
+ ispCrop.scaleBy(sensorInfo_.outputSize, sensorInfo_.analogCrop.size());
/*
- * Look the other way, try to match a bayer buffer with the
- * first embedded buffer in the queue. This will also do some
- * housekeeping on the bayer image queue - clear out any
- * buffers that are older than the first buffer in the embedded
- * queue.
- *
- * But first check if the embedded queue has emptied out.
+ * The crop that we set must be:
+ * 1. At least as big as ispMinCropSize_, once that's been
+ * enlarged to the same aspect ratio.
+ * 2. With the same mid-point, if possible.
+ * 3. But it can't go outside the sensor area.
*/
- if (embeddedQueue_.empty())
- return;
+ Size minSize = ispMinCropSize_.expandedToAspectRatio(nativeCrop.size());
+ Size size = ispCrop.size().expandedTo(minSize);
+ ispCrop = size.centeredTo(ispCrop.center()).enclosedIn(Rectangle(sensorInfo_.outputSize));
- embeddedBuffer = embeddedQueue_.front();
- bayerBuffer = updateQueue(bayerQueue_, embeddedBuffer->metadata().timestamp,
- &unicam_[Unicam::Image]);
+ if (ispCrop != ispCrop_) {
+ isp_[Isp::Input].dev()->setSelection(V4L2_SEL_TGT_CROP, &ispCrop);
+ ispCrop_ = ispCrop;
- if (!bayerBuffer) {
- LOG(RPI, Debug) << "Could not find matching bayer buffer - ending.";
- return;
+ /*
+ * Also update the ScalerCrop in the metadata with what we actually
+ * used. But we must first rescale that from ISP (camera mode) pixels
+ * back into sensor native pixels.
+ */
+ scalerCrop_ = ispCrop_.scaledBy(sensorInfo_.analogCrop.size(),
+ sensorInfo_.outputSize);
+ scalerCrop_.translateBy(sensorInfo_.analogCrop.topLeft());
}
}
+}
+
+void RPiCameraData::fillRequestMetadata(const ControlList &bufferControls,
+ Request *request)
+{
+ request->metadata().set(controls::SensorTimestamp,
+ bufferControls.get(controls::SensorTimestamp));
+
+ request->metadata().set(controls::ScalerCrop, scalerCrop_);
+}
+
+void RPiCameraData::tryRunPipeline()
+{
+ FrameBuffer *embeddedBuffer;
+ BayerFrame bayerFrame;
+
+ /* If any of our request or buffer queues are empty, we cannot proceed. */
+ if (state_ != State::Idle || requestQueue_.empty() ||
+ bayerQueue_.empty() || (embeddedQueue_.empty() && sensorMetadata_))
+ return;
+
+ if (!findMatchingBuffers(bayerFrame, embeddedBuffer))
+ return;
/* Take the first request from the queue and action the IPA. */
Request *request = requestQueue_.front();
+ /* See if a new ScalerCrop value needs to be applied. */
+ applyScalerCrop(request->controls());
+
+ /*
+ * Clear the request metadata and fill it with some initial non-IPA
+ * related controls. We clear it first because the request metadata
+ * may have been populated if we have dropped the previous frame.
+ */
+ request->metadata().clear();
+ fillRequestMetadata(bayerFrame.controls, request);
+
/*
* Process all the user controls by the IPA. Once this is complete, we
* queue the ISP output buffer listed in the request to start the HW
* pipeline.
*/
- op.operation = RPi::IPA_EVENT_QUEUE_REQUEST;
- op.controls = { request->controls() };
- ipa_->processEvent(op);
-
- /* Ready to use the buffers, pop them off the queue. */
- bayerQueue_.pop();
- embeddedQueue_.pop();
+ ipa_->signalQueueRequest(request->controls());
/* Set our state to say the pipeline is active. */
state_ = State::Busy;
- unsigned int bayerId = unicam_[Unicam::Image].getBufferId(bayerBuffer);
- unsigned int embeddedId = unicam_[Unicam::Embedded].getBufferId(embeddedBuffer);
+ unsigned int bayerId = unicam_[Unicam::Image].getBufferId(bayerFrame.buffer);
+
+ LOG(RPI, Debug) << "Signalling signalIspPrepare:"
+ << " Bayer buffer id: " << bayerId;
+
+ ipa::RPi::ISPConfig ispPrepare;
+ ispPrepare.bayerBufferId = ipa::RPi::MaskBayerData | bayerId;
+ ispPrepare.controls = std::move(bayerFrame.controls);
+
+ if (embeddedBuffer) {
+ unsigned int embeddedId = unicam_[Unicam::Embedded].getBufferId(embeddedBuffer);
+
+ ispPrepare.embeddedBufferId = ipa::RPi::MaskEmbeddedData | embeddedId;
+ ispPrepare.embeddedBufferPresent = true;
- LOG(RPI, Debug) << "Signalling RPi::IPA_EVENT_SIGNAL_ISP_PREPARE:"
- << " Bayer buffer id: " << bayerId
- << " Embedded buffer id: " << embeddedId;
+ LOG(RPI, Debug) << "Signalling signalIspPrepare:"
+ << " Bayer buffer id: " << embeddedId;
+ }
- op.operation = RPi::IPA_EVENT_SIGNAL_ISP_PREPARE;
- op.data = { RPi::BufferMask::EMBEDDED_DATA | embeddedId,
- RPi::BufferMask::BAYER_DATA | bayerId };
- ipa_->processEvent(op);
+ ipa_->signalIspPrepare(ispPrepare);
}
-void RPiCameraData::tryFlushQueues()
+bool RPiCameraData::findMatchingBuffers(BayerFrame &bayerFrame, FrameBuffer *&embeddedBuffer)
{
- /*
- * It is possible for us to end up in a situation where all available
- * Unicam buffers have been dequeued but do not match. This can happen
- * when the system is heavily loaded and we get out of lock-step with
- * the two channels.
- *
- * In such cases, the best thing to do is the re-queue all the buffers
- * and give a chance for the hardware to return to lock-step. We do have
- * to drop all interim frames.
- */
- if (unicam_[Unicam::Image].getBuffers().size() == bayerQueue_.size() &&
- unicam_[Unicam::Embedded].getBuffers().size() == embeddedQueue_.size()) {
- /* This cannot happen when Unicam streams are external. */
- assert(!unicam_[Unicam::Image].isExternal());
+ unsigned int embeddedRequeueCount = 0, bayerRequeueCount = 0;
- LOG(RPI, Warning) << "Flushing all buffer queues!";
-
- while (!bayerQueue_.empty()) {
- unicam_[Unicam::Image].queueBuffer(bayerQueue_.front());
- bayerQueue_.pop();
- }
+ /* Loop until we find a matching bayer and embedded data buffer. */
+ while (!bayerQueue_.empty()) {
+ /* Start with the front of the bayer queue. */
+ FrameBuffer *bayerBuffer = bayerQueue_.front().buffer;
+ /*
+ * Find the embedded data buffer with a matching timestamp to pass to
+ * the IPA. Any embedded buffers with a timestamp lower than the
+ * current bayer buffer will be removed and re-queued to the driver.
+ */
+ uint64_t ts = bayerBuffer->metadata().timestamp;
+ embeddedBuffer = nullptr;
while (!embeddedQueue_.empty()) {
- unicam_[Unicam::Embedded].queueBuffer(embeddedQueue_.front());
- embeddedQueue_.pop();
+ FrameBuffer *b = embeddedQueue_.front();
+ if (!unicam_[Unicam::Embedded].isExternal() && b->metadata().timestamp < ts) {
+ embeddedQueue_.pop();
+ unicam_[Unicam::Embedded].queueBuffer(b);
+ embeddedRequeueCount++;
+ LOG(RPI, Warning) << "Dropping unmatched input frame in stream "
+ << unicam_[Unicam::Embedded].name();
+ } else if (unicam_[Unicam::Embedded].isExternal() || b->metadata().timestamp == ts) {
+ /* We pop the item from the queue lower down. */
+ embeddedBuffer = b;
+ break;
+ } else {
+ break; /* Only higher timestamps from here. */
+ }
}
- }
-}
-FrameBuffer *RPiCameraData::updateQueue(std::queue<FrameBuffer *> &q, uint64_t timestamp,
- RPi::Stream *stream)
-{
- /*
- * If the unicam streams are external (both have be to the same), then we
- * can only return out the top buffer in the queue, and assume they have
- * been synced by queuing at the same time. We cannot drop these frames,
- * as they may have been provided externally.
- */
- while (!q.empty()) {
- FrameBuffer *b = q.front();
- if (!stream->isExternal() && b->metadata().timestamp < timestamp) {
- q.pop();
- stream->queueBuffer(b);
- LOG(RPI, Warning) << "Dropping unmatched input frame in stream "
- << stream->name();
- } else if (stream->isExternal() || b->metadata().timestamp == timestamp) {
- /* The calling function will pop the item from the queue. */
- return b;
+ if (!embeddedBuffer) {
+ bool flushedBuffers = false;
+
+ LOG(RPI, Debug) << "Could not find matching embedded buffer";
+
+ if (!sensorMetadata_) {
+ /*
+ * If there is no sensor metadata, simply return the
+ * first bayer frame in the queue.
+ */
+ LOG(RPI, Debug) << "Returning bayer frame without a match";
+ bayerFrame = std::move(bayerQueue_.front());
+ bayerQueue_.pop();
+ embeddedBuffer = nullptr;
+ return true;
+ }
+
+ if (!embeddedQueue_.empty()) {
+ /*
+ * Not found a matching embedded buffer for the bayer buffer in
+ * the front of the queue. This buffer is now orphaned, so requeue
+ * it back to the device.
+ */
+ unicam_[Unicam::Image].queueBuffer(bayerQueue_.front().buffer);
+ bayerQueue_.pop();
+ bayerRequeueCount++;
+ LOG(RPI, Warning) << "Dropping unmatched input frame in stream "
+ << unicam_[Unicam::Image].name();
+ }
+
+ /*
+ * If we have requeued all available embedded data buffers in this loop,
+ * then we are fully out of sync, so might as well requeue all the pending
+ * bayer buffers.
+ */
+ if (embeddedRequeueCount == unicam_[Unicam::Embedded].getBuffers().size()) {
+ /* The embedded queue must be empty at this point! */
+ ASSERT(embeddedQueue_.empty());
+
+ LOG(RPI, Warning) << "Flushing bayer stream!";
+ while (!bayerQueue_.empty()) {
+ unicam_[Unicam::Image].queueBuffer(bayerQueue_.front().buffer);
+ bayerQueue_.pop();
+ }
+ flushedBuffers = true;
+ }
+
+ /*
+ * Similar to the above, if we have requeued all available bayer buffers in
+ * the loop, then we are fully out of sync, so might as well requeue all the
+ * pending embedded data buffers.
+ */
+ if (bayerRequeueCount == unicam_[Unicam::Image].getBuffers().size()) {
+ /* The bayer queue must be empty at this point! */
+ ASSERT(bayerQueue_.empty());
+
+ LOG(RPI, Warning) << "Flushing embedded data stream!";
+ while (!embeddedQueue_.empty()) {
+ unicam_[Unicam::Embedded].queueBuffer(embeddedQueue_.front());
+ embeddedQueue_.pop();
+ }
+ flushedBuffers = true;
+ }
+
+ /*
+ * If the embedded queue has become empty, we cannot do any more.
+ * Similarly, if we have flushed any one of our queues, we cannot do
+ * any more. Return from here without a buffer pair.
+ */
+ if (embeddedQueue_.empty() || flushedBuffers)
+ return false;
} else {
- break; /* Only higher timestamps from here. */
+ /*
+ * We have found a matching bayer and embedded data buffer, so
+ * nothing more to do apart from assigning the bayer frame and
+ * popping the buffers from the queue.
+ */
+ bayerFrame = std::move(bayerQueue_.front());
+ bayerQueue_.pop();
+ embeddedQueue_.pop();
+ return true;
}
}
- return nullptr;
+ return false;
}
-REGISTER_PIPELINE_HANDLER(PipelineHandlerRPi);
+REGISTER_PIPELINE_HANDLER(PipelineHandlerRPi)
} /* namespace libcamera */
diff --git a/src/libcamera/pipeline/raspberrypi/rpi_stream.cpp b/src/libcamera/pipeline/raspberrypi/rpi_stream.cpp
index 1a42cc17..b3265d0e 100644
--- a/src/libcamera/pipeline/raspberrypi/rpi_stream.cpp
+++ b/src/libcamera/pipeline/raspberrypi/rpi_stream.cpp
@@ -6,7 +6,9 @@
*/
#include "rpi_stream.h"
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
+
+#include <libcamera/ipa/raspberrypi_ipa_interface.h>
namespace libcamera {
@@ -70,7 +72,7 @@ int Stream::getBufferId(FrameBuffer *buffer) const
void Stream::setExternalBuffer(FrameBuffer *buffer)
{
- bufferMap_.emplace(RPi::BufferMask::EXTERNAL_BUFFER | id_.get(), buffer);
+ bufferMap_.emplace(ipa::RPi::MaskExternalBuffer | id_.get(), buffer);
}
void Stream::removeExternalBuffer(FrameBuffer *buffer)
@@ -78,7 +80,7 @@ void Stream::removeExternalBuffer(FrameBuffer *buffer)
int id = getBufferId(buffer);
/* Ensure we have this buffer in the stream, and it is marked external. */
- ASSERT(id != -1 && (id & RPi::BufferMask::EXTERNAL_BUFFER));
+ ASSERT(id != -1 && (id & ipa::RPi::MaskExternalBuffer));
bufferMap_.erase(id);
}
@@ -163,9 +165,9 @@ void Stream::returnBuffer(FrameBuffer *buffer)
* If so, do it now as availableBuffers_ will not be empty.
*/
while (!requestBuffers_.empty()) {
- FrameBuffer *buffer = requestBuffers_.front();
+ FrameBuffer *requestBuffer = requestBuffers_.front();
- if (!buffer) {
+ if (!requestBuffer) {
/*
* We want to queue an internal buffer, but none
* are available. Can't do anything, quit the loop.
@@ -177,12 +179,12 @@ void Stream::returnBuffer(FrameBuffer *buffer)
* We want to queue an internal buffer, and at least one
* is available.
*/
- buffer = availableBuffers_.front();
+ requestBuffer = availableBuffers_.front();
availableBuffers_.pop();
}
requestBuffers_.pop();
- queueToDevice(buffer);
+ queueToDevice(requestBuffer);
}
}
diff --git a/src/libcamera/pipeline/raspberrypi/rpi_stream.h b/src/libcamera/pipeline/raspberrypi/rpi_stream.h
index 0b502f64..f1ac715f 100644
--- a/src/libcamera/pipeline/raspberrypi/rpi_stream.h
+++ b/src/libcamera/pipeline/raspberrypi/rpi_stream.h
@@ -13,6 +13,7 @@
#include <vector>
#include <libcamera/ipa/raspberrypi.h>
+#include <libcamera/ipa/raspberrypi_ipa_interface.h>
#include <libcamera/stream.h>
#include "libcamera/internal/v4l2_videodevice.h"
@@ -31,13 +32,13 @@ class Stream : public libcamera::Stream
{
public:
Stream()
- : id_(RPi::BufferMask::ID)
+ : id_(ipa::RPi::MaskID)
{
}
Stream(const char *name, MediaEntity *dev, bool importOnly = false)
: external_(false), importOnly_(importOnly), name_(name),
- dev_(std::make_unique<V4L2VideoDevice>(dev)), id_(RPi::BufferMask::ID)
+ dev_(std::make_unique<V4L2VideoDevice>(dev)), id_(ipa::RPi::MaskID)
{
}
diff --git a/src/libcamera/pipeline/raspberrypi/staggered_ctrl.cpp b/src/libcamera/pipeline/raspberrypi/staggered_ctrl.cpp
deleted file mode 100644
index 0572acc9..00000000
--- a/src/libcamera/pipeline/raspberrypi/staggered_ctrl.cpp
+++ /dev/null
@@ -1,174 +0,0 @@
-/* SPDX-License-Identifier: LGPL-2.1-or-later */
-/*
- * Copyright (C) 2020, Raspberry Pi (Trading) Ltd.
- *
- * staggered_ctrl.cpp - Helper for writing staggered ctrls to a V4L2 device.
- */
-
-#include "staggered_ctrl.h"
-
-#include <algorithm>
-
-#include <libcamera/controls.h>
-
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/utils.h"
-#include "libcamera/internal/v4l2_videodevice.h"
-
-namespace libcamera {
-
-LOG_DEFINE_CATEGORY(RPI_S_W);
-
-namespace RPi {
-
-void StaggeredCtrl::init(V4L2VideoDevice *dev,
- std::initializer_list<std::pair<const uint32_t, uint8_t>> delayList)
-{
- std::lock_guard<std::mutex> lock(lock_);
-
- dev_ = dev;
- delay_ = delayList;
- ctrl_.clear();
-
- /* Find the largest delay across all controls. */
- maxDelay_ = 0;
- for (auto const &p : delay_) {
- LOG(RPI_S_W, Info) << "Init ctrl "
- << utils::hex(p.first) << " with delay "
- << static_cast<int>(p.second);
- maxDelay_ = std::max(maxDelay_, p.second);
- }
-
- init_ = true;
-}
-
-void StaggeredCtrl::reset()
-{
- std::lock_guard<std::mutex> lock(lock_);
-
- int lastSetCount = setCount_;
- std::unordered_map<uint32_t, int32_t> lastVal;
-
- /* Reset the counters. */
- setCount_ = getCount_ = 0;
-
- /* Look for the last set values. */
- for (auto const &c : ctrl_)
- lastVal[c.first] = c.second[lastSetCount].value;
-
- /* Apply the last set values as the next to be applied. */
- ctrl_.clear();
- for (auto &c : lastVal)
- ctrl_[c.first][setCount_] = CtrlInfo(c.second);
-}
-
-bool StaggeredCtrl::set(uint32_t ctrl, int32_t value)
-{
- std::lock_guard<std::mutex> lock(lock_);
-
- /* Can we find this ctrl as one that is registered? */
- if (delay_.find(ctrl) == delay_.end())
- return false;
-
- ctrl_[ctrl][setCount_].value = value;
- ctrl_[ctrl][setCount_].updated = true;
-
- return true;
-}
-
-bool StaggeredCtrl::set(std::initializer_list<std::pair<const uint32_t, int32_t>> ctrlList)
-{
- std::lock_guard<std::mutex> lock(lock_);
-
- for (auto const &p : ctrlList) {
- /* Can we find this ctrl? */
- if (delay_.find(p.first) == delay_.end())
- return false;
-
- ctrl_[p.first][setCount_] = CtrlInfo(p.second);
- }
-
- return true;
-}
-
-bool StaggeredCtrl::set(const ControlList &controls)
-{
- std::lock_guard<std::mutex> lock(lock_);
-
- for (auto const &p : controls) {
- /* Can we find this ctrl? */
- if (delay_.find(p.first) == delay_.end())
- return false;
-
- ctrl_[p.first][setCount_] = CtrlInfo(p.second.get<int32_t>());
- LOG(RPI_S_W, Debug) << "Setting ctrl "
- << utils::hex(p.first) << " to "
- << ctrl_[p.first][setCount_].value
- << " at index "
- << setCount_;
- }
-
- return true;
-}
-
-int StaggeredCtrl::write()
-{
- std::lock_guard<std::mutex> lock(lock_);
- ControlList controls(dev_->controls());
-
- for (auto &p : ctrl_) {
- int delayDiff = maxDelay_ - delay_[p.first];
- int index = std::max<int>(0, setCount_ - delayDiff);
-
- if (p.second[index].updated) {
- /* We need to write this value out. */
- controls.set(p.first, p.second[index].value);
- p.second[index].updated = false;
- LOG(RPI_S_W, Debug) << "Writing ctrl "
- << utils::hex(p.first) << " to "
- << p.second[index].value
- << " at index "
- << index;
- }
- }
-
- nextFrame();
- return dev_->setControls(&controls);
-}
-
-void StaggeredCtrl::get(std::unordered_map<uint32_t, int32_t> &ctrl, uint8_t offset)
-{
- std::lock_guard<std::mutex> lock(lock_);
-
- /* Account for the offset to reset the getCounter. */
- getCount_ += offset + 1;
-
- ctrl.clear();
- for (auto &p : ctrl_) {
- int index = std::max<int>(0, getCount_ - maxDelay_);
- ctrl[p.first] = p.second[index].value;
- LOG(RPI_S_W, Debug) << "Getting ctrl "
- << utils::hex(p.first) << " to "
- << p.second[index].value
- << " at index "
- << index;
- }
-}
-
-void StaggeredCtrl::nextFrame()
-{
- /* Advance the control history to the next frame */
- int prevCount = setCount_;
- setCount_++;
-
- LOG(RPI_S_W, Debug) << "Next frame, set index is " << setCount_;
-
- for (auto &p : ctrl_) {
- p.second[setCount_].value = p.second[prevCount].value;
- p.second[setCount_].updated = false;
- }
-}
-
-} /* namespace RPi */
-
-} /* namespace libcamera */
diff --git a/src/libcamera/pipeline/raspberrypi/staggered_ctrl.h b/src/libcamera/pipeline/raspberrypi/staggered_ctrl.h
deleted file mode 100644
index 382fa31a..00000000
--- a/src/libcamera/pipeline/raspberrypi/staggered_ctrl.h
+++ /dev/null
@@ -1,96 +0,0 @@
-/* SPDX-License-Identifier: LGPL-2.1-or-later */
-/*
- * Copyright (C) 2020, Raspberry Pi (Trading) Ltd.
- *
- * staggered_ctrl.h - Helper for writing staggered ctrls to a V4L2 device.
- */
-#ifndef __LIBCAMERA_PIPELINE_RASPBERRYPI_STAGGERED_CTRL_H__
-#define __LIBCAMERA_PIPELINE_RASPBERRYPI_STAGGERED_CTRL_H__
-
-#include <array>
-#include <initializer_list>
-#include <mutex>
-#include <unordered_map>
-#include <utility>
-
-namespace libcamera {
-
-class ControlList;
-class V4L2VideoDevice;
-
-namespace RPi {
-
-class StaggeredCtrl
-{
-public:
- StaggeredCtrl()
- : init_(false), setCount_(0), getCount_(0), maxDelay_(0)
- {
- }
-
- operator bool() const
- {
- return init_;
- }
-
- void init(V4L2VideoDevice *dev,
- std::initializer_list<std::pair<const uint32_t, uint8_t>> delayList);
- void reset();
-
- void get(std::unordered_map<uint32_t, int32_t> &ctrl, uint8_t offset = 0);
-
- bool set(uint32_t ctrl, int32_t value);
- bool set(std::initializer_list<std::pair<const uint32_t, int32_t>> ctrlList);
- bool set(const ControlList &controls);
-
- int write();
-
-private:
- void nextFrame();
-
- /* listSize must be a power of 2. */
- static constexpr int listSize = (1 << 4);
- struct CtrlInfo {
- CtrlInfo()
- : value(0), updated(false)
- {
- }
-
- CtrlInfo(int32_t value_)
- : value(value_), updated(true)
- {
- }
-
- int32_t value;
- bool updated;
- };
-
- class CircularArray : public std::array<CtrlInfo, listSize>
- {
- public:
- CtrlInfo &operator[](int index)
- {
- return std::array<CtrlInfo, listSize>::operator[](index & (listSize - 1));
- }
-
- const CtrlInfo &operator[](int index) const
- {
- return std::array<CtrlInfo, listSize>::operator[](index & (listSize - 1));
- }
- };
-
- bool init_;
- uint32_t setCount_;
- uint32_t getCount_;
- uint8_t maxDelay_;
- V4L2VideoDevice *dev_;
- std::unordered_map<uint32_t, uint8_t> delay_;
- std::unordered_map<uint32_t, CircularArray> ctrl_;
- std::mutex lock_;
-};
-
-} /* namespace RPi */
-
-} /* namespace libcamera */
-
-#endif /* __LIBCAMERA_PIPELINE_RASPBERRYPI_STAGGERED_CTRL_H__ */
diff --git a/src/libcamera/pipeline/rkisp1/meson.build b/src/libcamera/pipeline/rkisp1/meson.build
index 5cd40d94..cad66535 100644
--- a/src/libcamera/pipeline/rkisp1/meson.build
+++ b/src/libcamera/pipeline/rkisp1/meson.build
@@ -3,5 +3,4 @@
libcamera_sources += files([
'rkisp1.cpp',
'rkisp1_path.cpp',
- 'timeline.cpp',
])
diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
index aec590ff..42911a8f 100644
--- a/src/libcamera/pipeline/rkisp1/rkisp1.cpp
+++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
@@ -14,41 +14,37 @@
#include <linux/media-bus-format.h>
-#include <libcamera/buffer.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
#include <libcamera/camera.h>
#include <libcamera/control_ids.h>
#include <libcamera/formats.h>
-#include <libcamera/ipa/rkisp1.h>
+#include <libcamera/framebuffer.h>
+#include <libcamera/ipa/core_ipa_interface.h>
+#include <libcamera/ipa/rkisp1_ipa_interface.h>
+#include <libcamera/ipa/rkisp1_ipa_proxy.h>
#include <libcamera/request.h>
#include <libcamera/stream.h>
#include "libcamera/internal/camera_sensor.h"
+#include "libcamera/internal/delayed_controls.h"
#include "libcamera/internal/device_enumerator.h"
#include "libcamera/internal/ipa_manager.h"
-#include "libcamera/internal/log.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/pipeline_handler.h"
-#include "libcamera/internal/utils.h"
#include "libcamera/internal/v4l2_subdevice.h"
#include "libcamera/internal/v4l2_videodevice.h"
#include "rkisp1_path.h"
-#include "timeline.h"
namespace libcamera {
LOG_DEFINE_CATEGORY(RkISP1)
class PipelineHandlerRkISP1;
-class RkISP1ActionQueueBuffers;
class RkISP1CameraData;
-enum RkISP1ActionType {
- SetSensor,
- SOE,
- QueueBuffers,
-};
-
struct RkISP1FrameInfo {
unsigned int frame;
Request *request;
@@ -58,7 +54,6 @@ struct RkISP1FrameInfo {
FrameBuffer *mainPathBuffer;
FrameBuffer *selfPathBuffer;
- bool paramFilled;
bool paramDequeued;
bool metadataProcessed;
};
@@ -81,72 +76,34 @@ private:
std::map<unsigned int, RkISP1FrameInfo *> frameInfo_;
};
-class RkISP1Timeline : public Timeline
-{
-public:
- RkISP1Timeline()
- : Timeline()
- {
- setDelay(SetSensor, -1, 5);
- setDelay(SOE, 0, -1);
- setDelay(QueueBuffers, -1, 10);
- }
-
- void bufferReady(FrameBuffer *buffer)
- {
- /*
- * Calculate SOE by taking the end of DMA set by the kernel and applying
- * the time offsets provideprovided by the IPA to find the best estimate
- * of SOE.
- */
-
- ASSERT(frameOffset(SOE) == 0);
-
- utils::time_point soe = std::chrono::time_point<utils::clock>()
- + std::chrono::nanoseconds(buffer->metadata().timestamp)
- + timeOffset(SOE);
-
- notifyStartOfExposure(buffer->metadata().sequence, soe);
- }
-
- void setDelay(unsigned int type, int frame, int msdelay)
- {
- utils::duration delay = std::chrono::milliseconds(msdelay);
- setRawDelay(type, frame, delay);
- }
-};
-
class RkISP1CameraData : public CameraData
{
public:
RkISP1CameraData(PipelineHandler *pipe, RkISP1MainPath *mainPath,
RkISP1SelfPath *selfPath)
- : CameraData(pipe), sensor_(nullptr), frame_(0),
- frameInfo_(pipe), mainPath_(mainPath), selfPath_(selfPath)
+ : CameraData(pipe), frame_(0), frameInfo_(pipe),
+ mainPath_(mainPath), selfPath_(selfPath)
{
}
- ~RkISP1CameraData()
- {
- delete sensor_;
- }
-
- int loadIPA();
+ int loadIPA(unsigned int hwRevision);
Stream mainPathStream_;
Stream selfPathStream_;
- CameraSensor *sensor_;
+ std::unique_ptr<CameraSensor> sensor_;
+ std::unique_ptr<DelayedControls> delayedCtrls_;
unsigned int frame_;
std::vector<IPABuffer> ipaBuffers_;
RkISP1Frames frameInfo_;
- RkISP1Timeline timeline_;
RkISP1MainPath *mainPath_;
RkISP1SelfPath *selfPath_;
+ std::unique_ptr<ipa::rkisp1::IPAProxyRkISP1> ipa_;
+
private:
void queueFrameAction(unsigned int frame,
- const IPAOperationData &action);
+ const ipa::rkisp1::RkISP1Action &action);
void metadataReady(unsigned int frame, const ControlList &metadata);
};
@@ -178,7 +135,6 @@ class PipelineHandlerRkISP1 : public PipelineHandler
{
public:
PipelineHandlerRkISP1(CameraManager *manager);
- ~PipelineHandlerRkISP1();
CameraConfiguration *generateConfiguration(Camera *camera,
const StreamRoles &roles) override;
@@ -187,7 +143,7 @@ public:
int exportFrameBuffers(Camera *camera, Stream *stream,
std::vector<std::unique_ptr<FrameBuffer>> *buffers) override;
- int start(Camera *camera) override;
+ int start(Camera *camera, const ControlList *controls) override;
void stop(Camera *camera) override;
int queueRequestDevice(Camera *camera, Request *request) override;
@@ -201,7 +157,6 @@ private:
PipelineHandler::cameraData(camera));
}
- friend RkISP1ActionQueueBuffers;
friend RkISP1CameraData;
friend RkISP1Frames;
@@ -212,14 +167,15 @@ private:
void bufferReady(FrameBuffer *buffer);
void paramReady(FrameBuffer *buffer);
void statReady(FrameBuffer *buffer);
+ void frameStart(uint32_t sequence);
int allocateBuffers(Camera *camera);
int freeBuffers(Camera *camera);
MediaDevice *media_;
- V4L2Subdevice *isp_;
- V4L2VideoDevice *param_;
- V4L2VideoDevice *stat_;
+ std::unique_ptr<V4L2Subdevice> isp_;
+ std::unique_ptr<V4L2VideoDevice> param_;
+ std::unique_ptr<V4L2VideoDevice> stat_;
RkISP1MainPath mainPath_;
RkISP1SelfPath selfPath_;
@@ -267,7 +223,6 @@ RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *req
info->mainPathBuffer = mainPathBuffer;
info->selfPathBuffer = selfPathBuffer;
info->statBuffer = statBuffer;
- info->paramFilled = false;
info->paramDequeued = false;
info->metadataProcessed = false;
@@ -313,7 +268,8 @@ RkISP1FrameInfo *RkISP1Frames::find(unsigned int frame)
if (itInfo != frameInfo_.end())
return itInfo->second;
- LOG(RkISP1, Error) << "Can't locate info from frame";
+ LOG(RkISP1, Fatal) << "Can't locate info from frame";
+
return nullptr;
}
@@ -329,7 +285,8 @@ RkISP1FrameInfo *RkISP1Frames::find(FrameBuffer *buffer)
return info;
}
- LOG(RkISP1, Error) << "Can't locate info from buffer";
+ LOG(RkISP1, Fatal) << "Can't locate info from buffer";
+
return nullptr;
}
@@ -342,110 +299,60 @@ RkISP1FrameInfo *RkISP1Frames::find(Request *request)
return info;
}
- LOG(RkISP1, Error) << "Can't locate info from request";
+ LOG(RkISP1, Fatal) << "Can't locate info from request";
+
return nullptr;
}
-class RkISP1ActionSetSensor : public FrameAction
-{
-public:
- RkISP1ActionSetSensor(unsigned int frame, CameraSensor *sensor, const ControlList &controls)
- : FrameAction(frame, SetSensor), sensor_(sensor), controls_(controls) {}
-
-protected:
- void run() override
- {
- sensor_->setControls(&controls_);
- }
-
-private:
- CameraSensor *sensor_;
- ControlList controls_;
-};
-
-class RkISP1ActionQueueBuffers : public FrameAction
+int RkISP1CameraData::loadIPA(unsigned int hwRevision)
{
-public:
- RkISP1ActionQueueBuffers(unsigned int frame, RkISP1CameraData *data,
- PipelineHandlerRkISP1 *pipe)
- : FrameAction(frame, QueueBuffers), data_(data), pipe_(pipe)
- {
- }
-
-protected:
- void run() override
- {
- RkISP1FrameInfo *info = data_->frameInfo_.find(frame());
- if (!info)
- LOG(RkISP1, Fatal) << "Frame not known";
-
- /*
- * \todo: If parameters are not filled a better method to handle
- * the situation than queuing a buffer with unknown content
- * should be used.
- *
- * It seems excessive to keep an internal zeroed scratch
- * parameters buffer around as this should not happen unless the
- * devices is under too much load. Perhaps failing the request
- * and returning it to the application with an error code is
- * better than queue it to hardware?
- */
- if (!info->paramFilled)
- LOG(RkISP1, Error)
- << "Parameters not ready on time for frame "
- << frame();
-
- pipe_->param_->queueBuffer(info->paramBuffer);
- pipe_->stat_->queueBuffer(info->statBuffer);
-
- if (info->mainPathBuffer)
- pipe_->mainPath_.queueBuffer(info->mainPathBuffer);
-
- if (info->selfPathBuffer)
- pipe_->selfPath_.queueBuffer(info->selfPathBuffer);
- }
-
-private:
- RkISP1CameraData *data_;
- PipelineHandlerRkISP1 *pipe_;
-};
-
-int RkISP1CameraData::loadIPA()
-{
- ipa_ = IPAManager::createIPA(pipe_, 1, 1);
+ ipa_ = IPAManager::createIPA<ipa::rkisp1::IPAProxyRkISP1>(pipe_, 1, 1);
if (!ipa_)
return -ENOENT;
ipa_->queueFrameAction.connect(this,
&RkISP1CameraData::queueFrameAction);
- ipa_->init(IPASettings{});
+ int ret = ipa_->init(hwRevision);
+ if (ret < 0) {
+ LOG(RkISP1, Error) << "IPA initialization failure";
+ return ret;
+ }
return 0;
}
void RkISP1CameraData::queueFrameAction(unsigned int frame,
- const IPAOperationData &action)
+ const ipa::rkisp1::RkISP1Action &action)
{
- switch (action.operation) {
- case RKISP1_IPA_ACTION_V4L2_SET: {
- const ControlList &controls = action.controls[0];
- timeline_.scheduleAction(std::make_unique<RkISP1ActionSetSensor>(frame,
- sensor_,
- controls));
+ switch (action.op) {
+ case ipa::rkisp1::ActionV4L2Set: {
+ const ControlList &controls = action.controls;
+ delayedCtrls_->push(controls);
break;
}
- case RKISP1_IPA_ACTION_PARAM_FILLED: {
+ case ipa::rkisp1::ActionParamFilled: {
+ PipelineHandlerRkISP1 *pipe = static_cast<PipelineHandlerRkISP1 *>(pipe_);
RkISP1FrameInfo *info = frameInfo_.find(frame);
- if (info)
- info->paramFilled = true;
+ if (!info)
+ break;
+
+ pipe->param_->queueBuffer(info->paramBuffer);
+ pipe->stat_->queueBuffer(info->statBuffer);
+
+ if (info->mainPathBuffer)
+ mainPath_->queueBuffer(info->mainPathBuffer);
+
+ if (info->selfPathBuffer)
+ selfPath_->queueBuffer(info->selfPathBuffer);
+
break;
}
- case RKISP1_IPA_ACTION_METADATA:
- metadataReady(frame, action.controls[0]);
+ case ipa::rkisp1::ActionMetadata:
+ metadataReady(frame, action.controls);
break;
default:
- LOG(RkISP1, Error) << "Unknown action " << action.operation;
+ LOG(RkISP1, Error) << "Unknown action " << action.op;
break;
}
}
@@ -459,7 +366,7 @@ void RkISP1CameraData::metadataReady(unsigned int frame, const ControlList &meta
if (!info)
return;
- info->request->metadata() = metadata;
+ info->request->metadata().merge(metadata);
info->metadataProcessed = true;
pipe->tryCompleteRequest(info->request);
@@ -490,7 +397,7 @@ bool RkISP1CameraConfiguration::fitsAllPaths(const StreamConfiguration &cfg)
CameraConfiguration::Status RkISP1CameraConfiguration::validate()
{
- const CameraSensor *sensor = data_->sensor_;
+ const CameraSensor *sensor = data_->sensor_.get();
Status status = Valid;
if (config_.empty())
@@ -599,16 +506,8 @@ CameraConfiguration::Status RkISP1CameraConfiguration::validate()
}
PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
- : PipelineHandler(manager), isp_(nullptr), param_(nullptr),
- stat_(nullptr)
-{
-}
-
-PipelineHandlerRkISP1::~PipelineHandlerRkISP1()
+ : PipelineHandler(manager)
{
- delete param_;
- delete stat_;
- delete isp_;
}
/* -----------------------------------------------------------------------------
@@ -669,7 +568,7 @@ int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
RkISP1CameraConfiguration *config =
static_cast<RkISP1CameraConfiguration *>(c);
RkISP1CameraData *data = cameraData(camera);
- CameraSensor *sensor = data->sensor_;
+ CameraSensor *sensor = data->sensor_.get();
int ret;
ret = initLinks(camera, sensor, *config);
@@ -698,40 +597,75 @@ int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
if (ret < 0)
return ret;
- LOG(RkISP1, Debug) << "ISP input pad configured with " << format.toString();
+ LOG(RkISP1, Debug)
+ << "ISP input pad configured with " << format.toString()
+ << " crop " << rect.toString();
/* YUYV8_2X8 is required on the ISP source path pad for YUV output. */
format.mbus_code = MEDIA_BUS_FMT_YUYV8_2X8;
- LOG(RkISP1, Debug) << "Configuring ISP output pad with " << format.toString();
+ LOG(RkISP1, Debug)
+ << "Configuring ISP output pad with " << format.toString()
+ << " crop " << rect.toString();
+
+ ret = isp_->setSelection(2, V4L2_SEL_TGT_CROP, &rect);
+ if (ret < 0)
+ return ret;
ret = isp_->setFormat(2, &format);
if (ret < 0)
return ret;
- LOG(RkISP1, Debug) << "ISP output pad configured with " << format.toString();
+ LOG(RkISP1, Debug)
+ << "ISP output pad configured with " << format.toString()
+ << " crop " << rect.toString();
+
+ std::map<unsigned int, IPAStream> streamConfig;
for (const StreamConfiguration &cfg : *config) {
- if (cfg.stream() == &data->mainPathStream_)
+ if (cfg.stream() == &data->mainPathStream_) {
ret = mainPath_.configure(cfg, format);
- else
+ streamConfig[0] = IPAStream(cfg.pixelFormat,
+ cfg.size);
+ } else {
ret = selfPath_.configure(cfg, format);
+ streamConfig[1] = IPAStream(cfg.pixelFormat,
+ cfg.size);
+ }
if (ret)
return ret;
}
- V4L2DeviceFormat paramFormat = {};
+ V4L2DeviceFormat paramFormat;
paramFormat.fourcc = V4L2PixelFormat(V4L2_META_FMT_RK_ISP1_PARAMS);
ret = param_->setFormat(&paramFormat);
if (ret)
return ret;
- V4L2DeviceFormat statFormat = {};
+ V4L2DeviceFormat statFormat;
statFormat.fourcc = V4L2PixelFormat(V4L2_META_FMT_RK_ISP1_STAT_3A);
ret = stat_->setFormat(&statFormat);
if (ret)
return ret;
+ /* Inform IPA of stream configuration and sensor controls. */
+ IPACameraSensorInfo sensorInfo = {};
+ ret = data->sensor_->sensorInfo(&sensorInfo);
+ if (ret) {
+ /* \todo Turn this into a hard failure. */
+ LOG(RkISP1, Warning) << "Camera sensor information not available";
+ sensorInfo = {};
+ ret = 0;
+ }
+
+ std::map<uint32_t, ControlInfoMap> entityControls;
+ entityControls.emplace(0, data->sensor_->controls());
+
+ ret = data->ipa_->configure(sensorInfo, streamConfig, entityControls);
+ if (ret) {
+ LOG(RkISP1, Error) << "failed configuring IPA (" << ret << ")";
+ return ret;
+ }
return 0;
}
@@ -770,15 +704,15 @@ int PipelineHandlerRkISP1::allocateBuffers(Camera *camera)
for (std::unique_ptr<FrameBuffer> &buffer : paramBuffers_) {
buffer->setCookie(ipaBufferId++);
- data->ipaBuffers_.push_back({ .id = buffer->cookie(),
- .planes = buffer->planes() });
+ data->ipaBuffers_.emplace_back(buffer->cookie(),
+ buffer->planes());
availableParamBuffers_.push(buffer.get());
}
for (std::unique_ptr<FrameBuffer> &buffer : statBuffers_) {
buffer->setCookie(ipaBufferId++);
- data->ipaBuffers_.push_back({ .id = buffer->cookie(),
- .planes = buffer->planes() });
+ data->ipaBuffers_.emplace_back(buffer->cookie(),
+ buffer->planes());
availableStatBuffers_.push(buffer.get());
}
@@ -822,7 +756,7 @@ int PipelineHandlerRkISP1::freeBuffers(Camera *camera)
return 0;
}
-int PipelineHandlerRkISP1::start(Camera *camera)
+int PipelineHandlerRkISP1::start(Camera *camera, [[maybe_unused]] const ControlList *controls)
{
RkISP1CameraData *data = cameraData(camera);
int ret;
@@ -861,8 +795,6 @@ int PipelineHandlerRkISP1::start(Camera *camera)
return ret;
}
- std::map<unsigned int, IPAStream> streamConfig;
-
if (data->mainPath_->isEnabled()) {
ret = mainPath_.start();
if (ret) {
@@ -872,11 +804,6 @@ int PipelineHandlerRkISP1::start(Camera *camera)
freeBuffers(camera);
return ret;
}
-
- streamConfig[0] = {
- .pixelFormat = data->mainPathStream_.configuration().pixelFormat,
- .size = data->mainPathStream_.configuration().size,
- };
}
if (data->selfPath_->isEnabled()) {
@@ -889,32 +816,11 @@ int PipelineHandlerRkISP1::start(Camera *camera)
freeBuffers(camera);
return ret;
}
-
- streamConfig[1] = {
- .pixelFormat = data->selfPathStream_.configuration().pixelFormat,
- .size = data->selfPathStream_.configuration().size,
- };
}
- activeCamera_ = camera;
-
- /* Inform IPA of stream configuration and sensor controls. */
- CameraSensorInfo sensorInfo = {};
- ret = data->sensor_->sensorInfo(&sensorInfo);
- if (ret) {
- /* \todo Turn this in an hard failure. */
- LOG(RkISP1, Warning) << "Camera sensor information not available";
- sensorInfo = {};
- ret = 0;
- }
-
- std::map<unsigned int, const ControlInfoMap &> entityControls;
- entityControls.emplace(0, data->sensor_->controls());
-
- IPAOperationData ipaConfig;
- data->ipa_->configure(sensorInfo, streamConfig, entityControls,
- ipaConfig, nullptr);
+ isp_->setFrameStartEnabled(true);
+ activeCamera_ = camera;
return ret;
}
@@ -923,6 +829,10 @@ void PipelineHandlerRkISP1::stop(Camera *camera)
RkISP1CameraData *data = cameraData(camera);
int ret;
+ isp_->setFrameStartEnabled(false);
+
+ data->ipa_->stop();
+
selfPath_.stop();
mainPath_.stop();
@@ -936,10 +846,7 @@ void PipelineHandlerRkISP1::stop(Camera *camera)
LOG(RkISP1, Warning)
<< "Failed to stop parameters for " << camera->id();
- data->ipa_->stop();
-
- data->timeline_.reset();
-
+ ASSERT(data->queuedRequests_.empty());
data->frameInfo_.clear();
freeBuffers(camera);
@@ -955,15 +862,12 @@ int PipelineHandlerRkISP1::queueRequestDevice(Camera *camera, Request *request)
if (!info)
return -ENOENT;
- IPAOperationData op;
- op.operation = RKISP1_IPA_EVENT_QUEUE_REQUEST;
- op.data = { data->frame_, info->paramBuffer->cookie() };
- op.controls = { request->controls() };
- data->ipa_->processEvent(op);
-
- data->timeline_.scheduleAction(std::make_unique<RkISP1ActionQueueBuffers>(data->frame_,
- data,
- this));
+ ipa::rkisp1::RkISP1Event ev;
+ ev.op = ipa::rkisp1::EventQueueRequest;
+ ev.frame = data->frame_;
+ ev.bufferId = info->paramBuffer->cookie();
+ ev.controls = request->controls();
+ data->ipa_->processEvent(ev);
data->frame_++;
@@ -1033,7 +937,7 @@ int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
data->controlInfo_ = std::move(ctrls);
- data->sensor_ = new CameraSensor(sensor);
+ data->sensor_ = std::make_unique<CameraSensor>(sensor);
ret = data->sensor_->init();
if (ret)
return ret;
@@ -1041,7 +945,23 @@ int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor)
/* Initialize the camera properties. */
data->properties_ = data->sensor_->properties();
- ret = data->loadIPA();
+ /*
+ * \todo Read dealy values from the sensor itself or from a
+ * a sensor database. For now use generic values taken from
+ * the Raspberry Pi and listed as generic values.
+ */
+ std::unordered_map<uint32_t, DelayedControls::ControlParams> params = {
+ { V4L2_CID_ANALOGUE_GAIN, { 1, false } },
+ { V4L2_CID_EXPOSURE, { 2, false } },
+ };
+
+ data->delayedCtrls_ =
+ std::make_unique<DelayedControls>(data->sensor_->device(),
+ params);
+ isp_->frameStart.connect(data->delayedCtrls_.get(),
+ &DelayedControls::applyControls);
+
+ ret = data->loadIPA(media_->hwRevision());
if (ret)
return ret;
@@ -1073,6 +993,12 @@ bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
if (!media_)
return false;
+ if (!media_->hwRevision()) {
+ LOG(RkISP1, Error)
+ << "The rkisp1 driver is too old, v5.11 or newer is required";
+ return false;
+ }
+
/* Create the V4L2 subdevices we will need. */
isp_ = V4L2Subdevice::fromEntityName(media_, "rkisp1_isp");
if (isp_->open() < 0)
@@ -1107,10 +1033,13 @@ bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
if (!pad)
return false;
- for (MediaLink *link : pad->links())
- createCamera(link->source()->entity());
+ bool registered = false;
+ for (MediaLink *link : pad->links()) {
+ if (!createCamera(link->source()->entity()))
+ registered = true;
+ }
- return true;
+ return registered;
}
/* -----------------------------------------------------------------------------
@@ -1135,27 +1064,34 @@ void PipelineHandlerRkISP1::tryCompleteRequest(Request *request)
data->frameInfo_.destroy(info->frame);
- completeRequest(activeCamera_, request);
+ completeRequest(request);
}
void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
{
- ASSERT(activeCamera_);
Request *request = buffer->request();
- completeBuffer(activeCamera_, request, buffer);
+ /*
+ * Record the sensor's timestamp in the request metadata.
+ *
+ * \todo The sensor timestamp should be better estimated by connecting
+ * to the V4L2Device::frameStart signal.
+ */
+ request->metadata().set(controls::SensorTimestamp,
+ buffer->metadata().timestamp);
+
+ completeBuffer(request, buffer);
tryCompleteRequest(request);
}
void PipelineHandlerRkISP1::paramReady(FrameBuffer *buffer)
{
- if (buffer->metadata().status == FrameMetadata::FrameCancelled)
- return;
-
ASSERT(activeCamera_);
RkISP1CameraData *data = cameraData(activeCamera_);
RkISP1FrameInfo *info = data->frameInfo_.find(buffer);
+ if (!info)
+ return;
info->paramDequeued = true;
tryCompleteRequest(info->request);
@@ -1163,9 +1099,6 @@ void PipelineHandlerRkISP1::paramReady(FrameBuffer *buffer)
void PipelineHandlerRkISP1::statReady(FrameBuffer *buffer)
{
- if (buffer->metadata().status == FrameMetadata::FrameCancelled)
- return;
-
ASSERT(activeCamera_);
RkISP1CameraData *data = cameraData(activeCamera_);
@@ -1173,17 +1106,22 @@ void PipelineHandlerRkISP1::statReady(FrameBuffer *buffer)
if (!info)
return;
- data->timeline_.bufferReady(buffer);
+ if (buffer->metadata().status == FrameMetadata::FrameCancelled) {
+ info->metadataProcessed = true;
+ tryCompleteRequest(info->request);
+ return;
+ }
if (data->frame_ <= buffer->metadata().sequence)
data->frame_ = buffer->metadata().sequence + 1;
- IPAOperationData op;
- op.operation = RKISP1_IPA_EVENT_SIGNAL_STAT_BUFFER;
- op.data = { info->frame, info->statBuffer->cookie() };
- data->ipa_->processEvent(op);
+ ipa::rkisp1::RkISP1Event ev;
+ ev.op = ipa::rkisp1::EventSignalStatBuffer;
+ ev.frame = info->frame;
+ ev.bufferId = info->statBuffer->cookie();
+ data->ipa_->processEvent(ev);
}
-REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1);
+REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1)
} /* namespace libcamera */
diff --git a/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp b/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
index ff995286..25f482eb 100644
--- a/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
+++ b/src/libcamera/pipeline/rkisp1/rkisp1_path.cpp
@@ -24,16 +24,10 @@ RkISP1Path::RkISP1Path(const char *name, const Span<const PixelFormat> &formats,
const Size &minResolution, const Size &maxResolution)
: name_(name), running_(false), formats_(formats),
minResolution_(minResolution), maxResolution_(maxResolution),
- resizer_(nullptr), video_(nullptr), link_(nullptr)
+ link_(nullptr)
{
}
-RkISP1Path::~RkISP1Path()
-{
- delete video_;
- delete resizer_;
-}
-
bool RkISP1Path::init(MediaDevice *media)
{
std::string resizer = std::string("rkisp1_resizer_") + name_ + "path";
@@ -85,7 +79,7 @@ CameraConfiguration::Status RkISP1Path::validate(StreamConfiguration *cfg)
cfg->size.expandTo(minResolution_);
cfg->bufferCount = RKISP1_BUFFER_COUNT;
- V4L2DeviceFormat format = {};
+ V4L2DeviceFormat format;
format.fourcc = video_->toV4L2PixelFormat(cfg->pixelFormat);
format.size = cfg->size;
@@ -117,9 +111,14 @@ int RkISP1Path::configure(const StreamConfiguration &config,
if (ret < 0)
return ret;
+ Rectangle rect(0, 0, ispFormat.size);
+ ret = resizer_->setSelection(0, V4L2_SEL_TGT_CROP, &rect);
+ if (ret < 0)
+ return ret;
+
LOG(RkISP1, Debug)
<< "Configured " << name_ << " resizer input pad with "
- << ispFormat.toString();
+ << ispFormat.toString() << " crop " << rect.toString();
ispFormat.size = config.size;
@@ -146,7 +145,7 @@ int RkISP1Path::configure(const StreamConfiguration &config,
<< ispFormat.toString();
const PixelFormatInfo &info = PixelFormatInfo::info(config.pixelFormat);
- V4L2DeviceFormat outputFormat = {};
+ V4L2DeviceFormat outputFormat;
outputFormat.fourcc = video_->toV4L2PixelFormat(config.pixelFormat);
outputFormat.size = config.size;
outputFormat.planesCount = info.numPlanes();
@@ -219,7 +218,7 @@ constexpr std::array<PixelFormat, 6> RKISP1_RSZ_MP_FORMATS{
constexpr Size RKISP1_RSZ_SP_SRC_MIN{ 32, 16 };
constexpr Size RKISP1_RSZ_SP_SRC_MAX{ 1920, 1920 };
-constexpr std::array<PixelFormat, 7> RKISP1_RSZ_SP_FORMATS{
+constexpr std::array<PixelFormat, 8> RKISP1_RSZ_SP_FORMATS{
formats::YUYV,
formats::NV16,
formats::NV61,
@@ -227,7 +226,7 @@ constexpr std::array<PixelFormat, 7> RKISP1_RSZ_SP_FORMATS{
formats::NV12,
formats::R8,
formats::RGB565,
- /* \todo Add support for BGR888 */
+ formats::XRGB8888,
};
} /* namespace */
diff --git a/src/libcamera/pipeline/rkisp1/rkisp1_path.h b/src/libcamera/pipeline/rkisp1/rkisp1_path.h
index 8f443e51..91757600 100644
--- a/src/libcamera/pipeline/rkisp1/rkisp1_path.h
+++ b/src/libcamera/pipeline/rkisp1/rkisp1_path.h
@@ -7,13 +7,15 @@
#ifndef __LIBCAMERA_PIPELINE_RKISP1_PATH_H__
#define __LIBCAMERA_PIPELINE_RKISP1_PATH_H__
+#include <memory>
#include <vector>
+#include <libcamera/base/signal.h>
+#include <libcamera/base/span.h>
+
#include <libcamera/camera.h>
#include <libcamera/geometry.h>
#include <libcamera/pixel_format.h>
-#include <libcamera/signal.h>
-#include <libcamera/span.h>
#include "libcamera/internal/media_object.h"
#include "libcamera/internal/v4l2_videodevice.h"
@@ -30,7 +32,6 @@ class RkISP1Path
public:
RkISP1Path(const char *name, const Span<const PixelFormat> &formats,
const Size &minResolution, const Size &maxResolution);
- ~RkISP1Path();
bool init(MediaDevice *media);
@@ -65,8 +66,8 @@ private:
const Size minResolution_;
const Size maxResolution_;
- V4L2Subdevice *resizer_;
- V4L2VideoDevice *video_;
+ std::unique_ptr<V4L2Subdevice> resizer_;
+ std::unique_ptr<V4L2VideoDevice> video_;
MediaLink *link_;
};
diff --git a/src/libcamera/pipeline/rkisp1/timeline.cpp b/src/libcamera/pipeline/rkisp1/timeline.cpp
deleted file mode 100644
index 6b83bbe5..00000000
--- a/src/libcamera/pipeline/rkisp1/timeline.cpp
+++ /dev/null
@@ -1,227 +0,0 @@
-/* SPDX-License-Identifier: LGPL-2.1-or-later */
-/*
- * Copyright (C) 2019, Google Inc.
- *
- * timeline.cpp - Timeline for per-frame control
- */
-
-#include "timeline.h"
-
-#include "libcamera/internal/log.h"
-
-/**
- * \file timeline.h
- * \brief Timeline for per-frame control
- */
-
-namespace libcamera {
-
-LOG_DEFINE_CATEGORY(Timeline)
-
-/**
- * \class FrameAction
- * \brief Action that can be schedule on a Timeline
- *
- * A frame action is an event schedule to be executed on a Timeline. A frame
- * action has two primal attributes a frame number and a type.
- *
- * The frame number describes the frame to which the action is associated. The
- * type is a numerical ID which identifies the action within the pipeline and
- * IPA protocol.
- */
-
-/**
- * \class Timeline
- * \brief Executor of FrameAction
- *
- * The timeline has three primary functions:
- *
- * 1. Keep track of the Start of Exposure (SOE) for every frame processed by
- * the hardware. Using this information it shall keep an up-to-date estimate
- * of the frame interval (time between two consecutive SOE events).
- *
- * The estimated frame interval together with recorded SOE events are the
- * foundation for how the timeline schedule FrameAction at specific points
- * in time.
- * \todo Improve the frame interval estimation algorithm.
- *
- * 2. Keep track of current delays for different types of actions. The delays
- * for different actions might differ during a capture session. Exposure time
- * effects the over all FPS and different ISP parameters might impacts its
- * processing time.
- *
- * The action type delays shall be updated by the IPA in conjunction with
- * how it changes the capture parameters.
- *
- * 3. Schedule actions on the timeline. This is the process of taking a
- * FrameAction which contains an abstract description of what frame and
- * what type of action it contains and turning that into an time point
- * and make sure the action is executed at that time.
- */
-
-Timeline::Timeline()
- : frameInterval_(0)
-{
- timer_.timeout.connect(this, &Timeline::timeout);
-}
-
-/**
- * \brief Reset and stop the timeline
- *
- * The timeline needs to be reset when the timeline should no longer execute
- * actions. A timeline should be reset between two capture sessions to prevent
- * the old capture session to effect the second one.
- */
-void Timeline::reset()
-{
- timer_.stop();
-
- actions_.clear();
- history_.clear();
-}
-
-/**
- * \brief Schedule an action on the timeline
- * \param[in] action FrameAction to schedule
- *
- * The act of scheduling an action to the timeline is the process of taking
- * the properties of the action (type, frame and time offsets) and translating
- * that to a time point using the current values for the action type timings
- * value recorded in the timeline. If an action is scheduled too late, execute
- * it immediately.
- */
-void Timeline::scheduleAction(std::unique_ptr<FrameAction> action)
-{
- unsigned int lastFrame;
- utils::time_point lastTime;
-
- if (history_.empty()) {
- lastFrame = 0;
- lastTime = std::chrono::steady_clock::now();
- } else {
- lastFrame = history_.back().first;
- lastTime = history_.back().second;
- }
-
- /*
- * Calculate when the action shall be schedule by first finding out how
- * many frames in the future the action acts on and then add the actions
- * frame offset. After the spatial frame offset is found out translate
- * that to a time point by using the last estimated start of exposure
- * (SOE) as the fixed offset. Lastly add the action time offset to the
- * time point.
- */
- int frame = action->frame() - lastFrame + frameOffset(action->type());
- utils::time_point deadline = lastTime + frame * frameInterval_
- + timeOffset(action->type());
-
- utils::time_point now = std::chrono::steady_clock::now();
- if (deadline < now) {
- LOG(Timeline, Warning)
- << "Action scheduled too late "
- << utils::time_point_to_string(deadline)
- << ", run now " << utils::time_point_to_string(now);
- action->run();
- } else {
- actions_.emplace(deadline, std::move(action));
- updateDeadline();
- }
-}
-
-void Timeline::notifyStartOfExposure(unsigned int frame, utils::time_point time)
-{
- history_.push_back(std::make_pair(frame, time));
-
- if (history_.size() <= HISTORY_DEPTH / 2)
- return;
-
- while (history_.size() > HISTORY_DEPTH)
- history_.pop_front();
-
- /* Update esitmated time between two start of exposures. */
- utils::duration sumExposures(0);
- unsigned int numExposures = 0;
-
- utils::time_point lastTime;
- for (auto it = history_.begin(); it != history_.end(); it++) {
- if (it != history_.begin()) {
- sumExposures += it->second - lastTime;
- numExposures++;
- }
-
- lastTime = it->second;
- }
-
- frameInterval_ = sumExposures;
- if (numExposures)
- frameInterval_ /= numExposures;
-}
-
-int Timeline::frameOffset(unsigned int type) const
-{
- const auto it = delays_.find(type);
- if (it == delays_.end()) {
- LOG(Timeline, Error)
- << "No frame offset set for action type " << type;
- return 0;
- }
-
- return it->second.first;
-}
-
-utils::duration Timeline::timeOffset(unsigned int type) const
-{
- const auto it = delays_.find(type);
- if (it == delays_.end()) {
- LOG(Timeline, Error)
- << "No time offset set for action type " << type;
- return utils::duration::zero();
- }
-
- return it->second.second;
-}
-
-void Timeline::setRawDelay(unsigned int type, int frame, utils::duration time)
-{
- delays_[type] = std::make_pair(frame, time);
-}
-
-void Timeline::updateDeadline()
-{
- if (actions_.empty())
- return;
-
- const utils::time_point &deadline = actions_.begin()->first;
-
- if (timer_.isRunning() && deadline >= timer_.deadline())
- return;
-
- if (deadline <= std::chrono::steady_clock::now()) {
- timeout(&timer_);
- return;
- }
-
- timer_.start(deadline);
-}
-
-void Timeline::timeout([[maybe_unused]] Timer *timer)
-{
- utils::time_point now = std::chrono::steady_clock::now();
-
- for (auto it = actions_.begin(); it != actions_.end();) {
- const utils::time_point &sched = it->first;
-
- if (sched > now)
- break;
-
- FrameAction *action = it->second.get();
-
- action->run();
-
- it = actions_.erase(it);
- }
-
- updateDeadline();
-}
-
-} /* namespace libcamera */
diff --git a/src/libcamera/pipeline/rkisp1/timeline.h b/src/libcamera/pipeline/rkisp1/timeline.h
deleted file mode 100644
index 88f99329..00000000
--- a/src/libcamera/pipeline/rkisp1/timeline.h
+++ /dev/null
@@ -1,72 +0,0 @@
-/* SPDX-License-Identifier: LGPL-2.1-or-later */
-/*
- * Copyright (C) 2019, Google Inc.
- *
- * timeline.h - Timeline for per-frame controls
- */
-#ifndef __LIBCAMERA_TIMELINE_H__
-#define __LIBCAMERA_TIMELINE_H__
-
-#include <list>
-#include <map>
-
-#include <libcamera/timer.h>
-
-#include "libcamera/internal/utils.h"
-
-namespace libcamera {
-
-class FrameAction
-{
-public:
- FrameAction(unsigned int frame, unsigned int type)
- : frame_(frame), type_(type) {}
-
- virtual ~FrameAction() {}
-
- unsigned int frame() const { return frame_; }
- unsigned int type() const { return type_; }
-
- virtual void run() = 0;
-
-private:
- unsigned int frame_;
- unsigned int type_;
-};
-
-class Timeline
-{
-public:
- Timeline();
- virtual ~Timeline() {}
-
- virtual void reset();
- virtual void scheduleAction(std::unique_ptr<FrameAction> action);
- virtual void notifyStartOfExposure(unsigned int frame, utils::time_point time);
-
- utils::duration frameInterval() const { return frameInterval_; }
-
-protected:
- int frameOffset(unsigned int type) const;
- utils::duration timeOffset(unsigned int type) const;
-
- void setRawDelay(unsigned int type, int frame, utils::duration time);
-
- std::map<unsigned int, std::pair<int, utils::duration>> delays_;
-
-private:
- static constexpr unsigned int HISTORY_DEPTH = 10;
-
- void timeout(Timer *timer);
- void updateDeadline();
-
- std::list<std::pair<unsigned int, utils::time_point>> history_;
- std::multimap<utils::time_point, std::unique_ptr<FrameAction>> actions_;
- utils::duration frameInterval_;
-
- Timer timer_;
-};
-
-} /* namespace libcamera */
-
-#endif /* __LIBCAMERA_TIMELINE_H__ */
diff --git a/src/libcamera/pipeline/simple/converter.cpp b/src/libcamera/pipeline/simple/converter.cpp
index 75fb297e..b5e34c4c 100644
--- a/src/libcamera/pipeline/simple/converter.cpp
+++ b/src/libcamera/pipeline/simple/converter.cpp
@@ -10,21 +10,173 @@
#include <algorithm>
#include <limits.h>
-#include <libcamera/buffer.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/signal.h>
+#include <libcamera/base/utils.h>
+
+#include <libcamera/framebuffer.h>
#include <libcamera/geometry.h>
-#include <libcamera/signal.h>
#include <libcamera/stream.h>
-#include "libcamera/internal/log.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/v4l2_videodevice.h"
namespace libcamera {
-LOG_DECLARE_CATEGORY(SimplePipeline);
+LOG_DECLARE_CATEGORY(SimplePipeline)
+
+/* -----------------------------------------------------------------------------
+ * SimpleConverter::Stream
+ */
+
+SimpleConverter::Stream::Stream(SimpleConverter *converter, unsigned int index)
+ : converter_(converter), index_(index)
+{
+ m2m_ = std::make_unique<V4L2M2MDevice>(converter->deviceNode_);
+
+ m2m_->output()->bufferReady.connect(this, &Stream::outputBufferReady);
+ m2m_->capture()->bufferReady.connect(this, &Stream::captureBufferReady);
+
+ int ret = m2m_->open();
+ if (ret < 0)
+ m2m_.reset();
+}
+
+int SimpleConverter::Stream::configure(const StreamConfiguration &inputCfg,
+ const StreamConfiguration &outputCfg)
+{
+ V4L2PixelFormat videoFormat =
+ m2m_->output()->toV4L2PixelFormat(inputCfg.pixelFormat);
+
+ V4L2DeviceFormat format;
+ format.fourcc = videoFormat;
+ format.size = inputCfg.size;
+ format.planesCount = 1;
+ format.planes[0].bpl = inputCfg.stride;
+
+ int ret = m2m_->output()->setFormat(&format);
+ if (ret < 0) {
+ LOG(SimplePipeline, Error)
+ << "Failed to set input format: " << strerror(-ret);
+ return ret;
+ }
+
+ if (format.fourcc != videoFormat || format.size != inputCfg.size ||
+ format.planes[0].bpl != inputCfg.stride) {
+ LOG(SimplePipeline, Error)
+ << "Input format not supported (requested "
+ << inputCfg.size.toString() << "-" << videoFormat.toString()
+ << ", got " << format.toString() << ")";
+ return -EINVAL;
+ }
+
+ /* Set the pixel format and size on the output. */
+ videoFormat = m2m_->capture()->toV4L2PixelFormat(outputCfg.pixelFormat);
+ format = {};
+ format.fourcc = videoFormat;
+ format.size = outputCfg.size;
+
+ ret = m2m_->capture()->setFormat(&format);
+ if (ret < 0) {
+ LOG(SimplePipeline, Error)
+ << "Failed to set output format: " << strerror(-ret);
+ return ret;
+ }
+
+ if (format.fourcc != videoFormat || format.size != outputCfg.size) {
+ LOG(SimplePipeline, Error)
+ << "Output format not supported";
+ return -EINVAL;
+ }
+
+ inputBufferCount_ = inputCfg.bufferCount;
+ outputBufferCount_ = outputCfg.bufferCount;
+
+ return 0;
+}
+
+int SimpleConverter::Stream::exportBuffers(unsigned int count,
+ std::vector<std::unique_ptr<FrameBuffer>> *buffers)
+{
+ return m2m_->capture()->exportBuffers(count, buffers);
+}
+
+int SimpleConverter::Stream::start()
+{
+ int ret = m2m_->output()->importBuffers(inputBufferCount_);
+ if (ret < 0)
+ return ret;
+
+ ret = m2m_->capture()->importBuffers(outputBufferCount_);
+ if (ret < 0) {
+ stop();
+ return ret;
+ }
+
+ ret = m2m_->output()->streamOn();
+ if (ret < 0) {
+ stop();
+ return ret;
+ }
+
+ ret = m2m_->capture()->streamOn();
+ if (ret < 0) {
+ stop();
+ return ret;
+ }
+
+ return 0;
+}
+
+void SimpleConverter::Stream::stop()
+{
+ m2m_->capture()->streamOff();
+ m2m_->output()->streamOff();
+ m2m_->capture()->releaseBuffers();
+ m2m_->output()->releaseBuffers();
+}
+
+int SimpleConverter::Stream::queueBuffers(FrameBuffer *input,
+ FrameBuffer *output)
+{
+ int ret = m2m_->output()->queueBuffer(input);
+ if (ret < 0)
+ return ret;
+
+ ret = m2m_->capture()->queueBuffer(output);
+ if (ret < 0)
+ return ret;
+
+ return 0;
+}
+
+std::string SimpleConverter::Stream::logPrefix() const
+{
+ return "stream" + std::to_string(index_);
+}
+
+void SimpleConverter::Stream::outputBufferReady(FrameBuffer *buffer)
+{
+ auto it = converter_->queue_.find(buffer);
+ if (it == converter_->queue_.end())
+ return;
+
+ if (!--it->second) {
+ converter_->inputBufferReady.emit(buffer);
+ converter_->queue_.erase(it);
+ }
+}
+
+void SimpleConverter::Stream::captureBufferReady(FrameBuffer *buffer)
+{
+ converter_->outputBufferReady.emit(buffer);
+}
+
+/* -----------------------------------------------------------------------------
+ * SimpleConverter
+ */
SimpleConverter::SimpleConverter(MediaDevice *media)
- : m2m_(nullptr)
{
/*
* Locate the video node. There's no need to validate the pipeline
@@ -38,29 +190,14 @@ SimpleConverter::SimpleConverter(MediaDevice *media)
if (it == entities.end())
return;
- m2m_ = new V4L2M2MDevice((*it)->deviceNode());
+ deviceNode_ = (*it)->deviceNode();
- m2m_->output()->bufferReady.connect(this, &SimpleConverter::outputBufferReady);
- m2m_->capture()->bufferReady.connect(this, &SimpleConverter::captureBufferReady);
-}
-
-SimpleConverter::~SimpleConverter()
-{
- delete m2m_;
-}
-
-int SimpleConverter::open()
-{
- if (!m2m_)
- return -ENODEV;
-
- return m2m_->open();
-}
-
-void SimpleConverter::close()
-{
- if (m2m_)
- m2m_->close();
+ m2m_ = std::make_unique<V4L2M2MDevice>(deviceNode_);
+ int ret = m2m_->open();
+ if (ret < 0) {
+ m2m_.reset();
+ return;
+ }
}
std::vector<PixelFormat> SimpleConverter::formats(PixelFormat input)
@@ -72,11 +209,11 @@ std::vector<PixelFormat> SimpleConverter::formats(PixelFormat input)
* Set the format on the input side (V4L2 output) of the converter to
* enumerate the conversion capabilities on its output (V4L2 capture).
*/
- V4L2DeviceFormat format;
- format.fourcc = m2m_->output()->toV4L2PixelFormat(input);
- format.size = { 1, 1 };
+ V4L2DeviceFormat v4l2Format;
+ v4l2Format.fourcc = m2m_->output()->toV4L2PixelFormat(input);
+ v4l2Format.size = { 1, 1 };
- int ret = m2m_->output()->setFormat(&format);
+ int ret = m2m_->output()->setFormat(&v4l2Format);
if (ret < 0) {
LOG(SimplePipeline, Error)
<< "Failed to set format: " << strerror(-ret);
@@ -139,80 +276,71 @@ SizeRange SimpleConverter::sizes(const Size &input)
return sizes;
}
-int SimpleConverter::configure(PixelFormat inputFormat, const Size &inputSize,
- StreamConfiguration *cfg)
+std::tuple<unsigned int, unsigned int>
+SimpleConverter::strideAndFrameSize(const PixelFormat &pixelFormat,
+ const Size &size)
{
V4L2DeviceFormat format;
- int ret;
+ format.fourcc = m2m_->capture()->toV4L2PixelFormat(pixelFormat);
+ format.size = size;
- V4L2PixelFormat videoFormat = m2m_->output()->toV4L2PixelFormat(inputFormat);
- format.fourcc = videoFormat;
- format.size = inputSize;
+ int ret = m2m_->capture()->tryFormat(&format);
+ if (ret < 0)
+ return std::make_tuple(0, 0);
- ret = m2m_->output()->setFormat(&format);
- if (ret < 0) {
- LOG(SimplePipeline, Error)
- << "Failed to set input format: " << strerror(-ret);
- return ret;
- }
+ return std::make_tuple(format.planes[0].bpl, format.planes[0].size);
+}
- if (format.fourcc != videoFormat || format.size != inputSize) {
- LOG(SimplePipeline, Error)
- << "Input format not supported";
- return -EINVAL;
- }
+int SimpleConverter::configure(const StreamConfiguration &inputCfg,
+ const std::vector<std::reference_wrapper<StreamConfiguration>> &outputCfgs)
+{
+ int ret = 0;
- /* Set the pixel format and size on the output. */
- videoFormat = m2m_->capture()->toV4L2PixelFormat(cfg->pixelFormat);
- format.fourcc = videoFormat;
- format.size = cfg->size;
+ streams_.clear();
+ streams_.reserve(outputCfgs.size());
- ret = m2m_->capture()->setFormat(&format);
- if (ret < 0) {
- LOG(SimplePipeline, Error)
- << "Failed to set output format: " << strerror(-ret);
- return ret;
- }
+ for (unsigned int i = 0; i < outputCfgs.size(); ++i) {
+ Stream &stream = streams_.emplace_back(this, i);
- if (format.fourcc != videoFormat || format.size != cfg->size) {
- LOG(SimplePipeline, Error)
- << "Output format not supported";
- return -EINVAL;
+ if (!stream.isValid()) {
+ LOG(SimplePipeline, Error)
+ << "Failed to create stream " << i;
+ ret = -EINVAL;
+ break;
+ }
+
+ ret = stream.configure(inputCfg, outputCfgs[i]);
+ if (ret < 0)
+ break;
}
- cfg->stride = format.planes[0].bpl;
+ if (ret < 0) {
+ streams_.clear();
+ return ret;
+ }
return 0;
}
-int SimpleConverter::exportBuffers(unsigned int count,
+int SimpleConverter::exportBuffers(unsigned int output, unsigned int count,
std::vector<std::unique_ptr<FrameBuffer>> *buffers)
{
- return m2m_->capture()->exportBuffers(count, buffers);
+ if (output >= streams_.size())
+ return -EINVAL;
+
+ return streams_[output].exportBuffers(count, buffers);
}
-int SimpleConverter::start(unsigned int count)
+int SimpleConverter::start()
{
- int ret = m2m_->output()->importBuffers(count);
- if (ret < 0)
- return ret;
-
- ret = m2m_->capture()->importBuffers(count);
- if (ret < 0) {
- stop();
- return ret;
- }
-
- ret = m2m_->output()->streamOn();
- if (ret < 0) {
- stop();
- return ret;
- }
+ int ret;
- ret = m2m_->capture()->streamOn();
- if (ret < 0) {
- stop();
- return ret;
+ for (Stream &stream : streams_) {
+ ret = stream.start();
+ if (ret < 0) {
+ stop();
+ return ret;
+ }
}
return 0;
@@ -220,60 +348,52 @@ int SimpleConverter::start(unsigned int count)
void SimpleConverter::stop()
{
- m2m_->capture()->streamOff();
- m2m_->output()->streamOff();
- m2m_->capture()->releaseBuffers();
- m2m_->output()->releaseBuffers();
+ for (Stream &stream : utils::reverse(streams_))
+ stream.stop();
}
-int SimpleConverter::queueBuffers(FrameBuffer *input, FrameBuffer *output)
+int SimpleConverter::queueBuffers(FrameBuffer *input,
+ const std::map<unsigned int, FrameBuffer *> &outputs)
{
- int ret = m2m_->output()->queueBuffer(input);
- if (ret < 0)
- return ret;
+ unsigned int mask = 0;
+ int ret;
- ret = m2m_->capture()->queueBuffer(output);
- if (ret < 0)
- return ret;
+ /*
+ * Validate the outputs as a sanity check: at least one output is
+ * required, all outputs must reference a valid stream and no two
+ * outputs can reference the same stream.
+ */
+ if (outputs.empty())
+ return -EINVAL;
- return 0;
-}
+ for (auto [index, buffer] : outputs) {
+ if (!buffer)
+ return -EINVAL;
+ if (index >= streams_.size())
+ return -EINVAL;
+ if (mask & (1 << index))
+ return -EINVAL;
-void SimpleConverter::captureBufferReady(FrameBuffer *buffer)
-{
- if (!outputDoneQueue_.empty()) {
- FrameBuffer *other = outputDoneQueue_.front();
- outputDoneQueue_.pop();
- bufferReady.emit(other, buffer);
- } else {
- captureDoneQueue_.push(buffer);
+ mask |= 1 << index;
}
-}
-void SimpleConverter::outputBufferReady(FrameBuffer *buffer)
-{
- if (!captureDoneQueue_.empty()) {
- FrameBuffer *other = captureDoneQueue_.front();
- captureDoneQueue_.pop();
- bufferReady.emit(buffer, other);
- } else {
- outputDoneQueue_.push(buffer);
+ /* Queue the input and output buffers to all the streams. */
+ for (auto [index, buffer] : outputs) {
+ ret = streams_[index].queueBuffers(input, buffer);
+ if (ret < 0)
+ return ret;
}
-}
-std::tuple<unsigned int, unsigned int>
-SimpleConverter::strideAndFrameSize(const Size &size,
- const PixelFormat &pixelFormat)
-{
- V4L2DeviceFormat format = {};
- format.fourcc = m2m_->capture()->toV4L2PixelFormat(pixelFormat);
- format.size = size;
-
- int ret = m2m_->capture()->tryFormat(&format);
- if (ret < 0)
- return std::make_tuple(0, 0);
+ /*
+ * Add the input buffer to the queue, with the number of streams as a
+ * reference count. Completion of the input buffer will be signalled by
+ * the stream that releases the last reference.
+ */
+ queue_.emplace(std::piecewise_construct,
+ std::forward_as_tuple(input),
+ std::forward_as_tuple(outputs.size()));
- return std::make_tuple(format.planes[0].bpl, format.planes[0].size);
+ return 0;
}
} /* namespace libcamera */
diff --git a/src/libcamera/pipeline/simple/converter.h b/src/libcamera/pipeline/simple/converter.h
index 78296680..276a2a29 100644
--- a/src/libcamera/pipeline/simple/converter.h
+++ b/src/libcamera/pipeline/simple/converter.h
@@ -8,13 +8,17 @@
#ifndef __LIBCAMERA_PIPELINE_SIMPLE_CONVERTER_H__
#define __LIBCAMERA_PIPELINE_SIMPLE_CONVERTER_H__
+#include <functional>
+#include <map>
#include <memory>
-#include <queue>
+#include <string>
#include <tuple>
#include <vector>
#include <libcamera/pixel_format.h>
-#include <libcamera/signal.h>
+
+#include <libcamera/base/log.h>
+#include <libcamera/base/signal.h>
namespace libcamera {
@@ -29,37 +33,67 @@ class SimpleConverter
{
public:
SimpleConverter(MediaDevice *media);
- ~SimpleConverter();
- int open();
- void close();
+ bool isValid() const { return m2m_ != nullptr; }
std::vector<PixelFormat> formats(PixelFormat input);
SizeRange sizes(const Size &input);
- int configure(PixelFormat inputFormat, const Size &inputSize,
- StreamConfiguration *cfg);
- int exportBuffers(unsigned int count,
+ std::tuple<unsigned int, unsigned int>
+ strideAndFrameSize(const PixelFormat &pixelFormat, const Size &size);
+
+ int configure(const StreamConfiguration &inputCfg,
+ const std::vector<std::reference_wrapper<StreamConfiguration>> &outputCfg);
+ int exportBuffers(unsigned int ouput, unsigned int count,
std::vector<std::unique_ptr<FrameBuffer>> *buffers);
- int start(unsigned int count);
+ int start();
void stop();
- int queueBuffers(FrameBuffer *input, FrameBuffer *output);
+ int queueBuffers(FrameBuffer *input,
+ const std::map<unsigned int, FrameBuffer *> &outputs);
- std::tuple<unsigned int, unsigned int>
- strideAndFrameSize(const Size &size, const PixelFormat &pixelFormat);
-
- Signal<FrameBuffer *, FrameBuffer *> bufferReady;
+ Signal<FrameBuffer *> inputBufferReady;
+ Signal<FrameBuffer *> outputBufferReady;
private:
- void captureBufferReady(FrameBuffer *buffer);
- void outputBufferReady(FrameBuffer *buffer);
+ class Stream : protected Loggable
+ {
+ public:
+ Stream(SimpleConverter *converter, unsigned int index);
+
+ bool isValid() const { return m2m_ != nullptr; }
+
+ int configure(const StreamConfiguration &inputCfg,
+ const StreamConfiguration &outputCfg);
+ int exportBuffers(unsigned int count,
+ std::vector<std::unique_ptr<FrameBuffer>> *buffers);
+
+ int start();
+ void stop();
+
+ int queueBuffers(FrameBuffer *input, FrameBuffer *output);
+
+ protected:
+ std::string logPrefix() const override;
+
+ private:
+ void captureBufferReady(FrameBuffer *buffer);
+ void outputBufferReady(FrameBuffer *buffer);
+
+ SimpleConverter *converter_;
+ unsigned int index_;
+ std::unique_ptr<V4L2M2MDevice> m2m_;
+
+ unsigned int inputBufferCount_;
+ unsigned int outputBufferCount_;
+ };
- V4L2M2MDevice *m2m_;
+ std::string deviceNode_;
+ std::unique_ptr<V4L2M2MDevice> m2m_;
- std::queue<FrameBuffer *> captureDoneQueue_;
- std::queue<FrameBuffer *> outputDoneQueue_;
+ std::vector<Stream> streams_;
+ std::map<FrameBuffer *, unsigned int> queue_;
};
} /* namespace libcamera */
diff --git a/src/libcamera/pipeline/simple/simple.cpp b/src/libcamera/pipeline/simple/simple.cpp
index 10223a9b..b29fff98 100644
--- a/src/libcamera/pipeline/simple/simple.cpp
+++ b/src/libcamera/pipeline/simple/simple.cpp
@@ -15,18 +15,21 @@
#include <set>
#include <string>
#include <string.h>
+#include <unordered_map>
#include <utility>
#include <vector>
#include <linux/media-bus-format.h>
+#include <libcamera/base/log.h>
+
#include <libcamera/camera.h>
+#include <libcamera/control_ids.h>
#include <libcamera/request.h>
#include <libcamera/stream.h>
#include "libcamera/internal/camera_sensor.h"
#include "libcamera/internal/device_enumerator.h"
-#include "libcamera/internal/log.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/pipeline_handler.h"
#include "libcamera/internal/v4l2_subdevice.h"
@@ -38,18 +41,107 @@ namespace libcamera {
LOG_DEFINE_CATEGORY(SimplePipeline)
+/* -----------------------------------------------------------------------------
+ *
+ * Overview
+ * --------
+ *
+ * The SimplePipelineHandler relies on generic kernel APIs to control a camera
+ * device, without any device-specific code and with limited device-specific
+ * static data.
+ *
+ * To qualify for support by the simple pipeline handler, a device shall
+ *
+ * - be supported by V4L2 drivers, exposing the Media Controller API, the V4L2
+ * subdev APIs and the media bus format-based enumeration extension for the
+ * VIDIOC_ENUM_FMT ioctl ;
+ * - not expose any device-specific API from drivers to userspace ;
+ * - include one or more camera sensor media entities and one or more video
+ * capture devices ;
+ * - have a capture pipeline with linear paths from the camera sensors to the
+ * video capture devices ; and
+ * - have an optional memory-to-memory device to perform format conversion
+ * and/or scaling, exposed as a V4L2 M2M device.
+ *
+ * As devices that require a specific pipeline handler may still match the
+ * above characteristics, the simple pipeline handler doesn't attempt to
+ * automatically determine which devices it can support. It instead relies on
+ * an explicit list of supported devices, provided in the supportedDevices
+ * array.
+ *
+ * When matching a device, the pipeline handler enumerates all camera sensors
+ * and attempts, for each of them, to find a path to a video capture video node.
+ * It does so by using a breadth-first search to find the shortest path from the
+ * sensor device to a valid capture device. This is guaranteed to produce a
+ * valid path on devices with one only option and is a good heuristic on more
+ * complex devices to skip paths that aren't suitable for the simple pipeline
+ * handler. For instance, on the IPU-based i.MX6, the shortest path will skip
+ * encoders and image converters, and it will end in a CSI capture device.
+ * A more complex graph search algorithm could be implemented if a device that
+ * would otherwise be compatible with the pipeline handler isn't correctly
+ * handled by this heuristic.
+ *
+ * Once the camera data instances have been created, the match() function
+ * creates a V4L2Subdevice instance for each entity used by any of the cameras
+ * and stores the instances in SimplePipelineHandler::subdevs_, accessible by
+ * the SimpleCameraData class through the SimplePipelineHandler::subdev()
+ * function. This avoids duplication of subdev instances between different
+ * cameras when the same entity is used in multiple paths. A similar mechanism
+ * is used for V4L2VideoDevice instances, but instances are in this case created
+ * on demand when accessed through SimplePipelineHandler::video() instead of all
+ * in one go at initialization time.
+ *
+ * Finally, all camera data instances are initialized to gather information
+ * about the possible pipeline configurations for the corresponding camera. If
+ * valid pipeline configurations are found, a Camera is registered for the
+ * SimpleCameraData instance.
+ *
+ * Pipeline Configuration
+ * ----------------------
+ *
+ * The simple pipeline handler configures the pipeline by propagating V4L2
+ * subdev formats from the camera sensor to the video node. The format is first
+ * set on the camera sensor's output, using the native camera sensor
+ * resolution. Then, on every link in the pipeline, the format is retrieved on
+ * the link source and set unmodified on the link sink.
+ *
+ * When initializating the camera data, this above procedure is repeated for
+ * every media bus format supported by the camera sensor. Upon reaching the
+ * video node, the pixel formats compatible with the media bus format are
+ * enumerated. Each of those pixel formats corresponds to one possible pipeline
+ * configuration, stored as an instance of SimpleCameraData::Configuration in
+ * the SimpleCameraData::formats_ map.
+ *
+ * Format Conversion and Scaling
+ * -----------------------------
+ *
+ * The capture pipeline isn't expected to include a scaler, and if a scaler is
+ * available, it is ignored when configuring the pipeline. However, the simple
+ * pipeline handler supports optional memory-to-memory converters to scale the
+ * image and convert it to a different pixel format. If such a converter is
+ * present, the pipeline handler enumerates, for each pipeline configuration,
+ * the pixel formats and sizes that the converter can produce for the output of
+ * the capture video node, and stores the information in the outputFormats and
+ * outputSizes of the SimpleCameraData::Configuration structure.
+ */
+
class SimplePipelineHandler;
struct SimplePipelineInfo {
const char *driver;
- const char *converter;
+ /*
+ * Each converter in the list contains the name
+ * and the number of streams it supports.
+ */
+ std::vector<std::pair<const char *, unsigned int>> converters;
};
namespace {
static const SimplePipelineInfo supportedDevices[] = {
- { "imx7-csi", "pxp" },
- { "sun6i-csi", nullptr },
+ { "imx7-csi", { { "pxp", 1 } } },
+ { "qcom-camss", {} },
+ { "sun6i-csi", {} },
};
} /* namespace */
@@ -57,16 +149,22 @@ static const SimplePipelineInfo supportedDevices[] = {
class SimpleCameraData : public CameraData
{
public:
- SimpleCameraData(SimplePipelineHandler *pipe, MediaEntity *sensor);
+ SimpleCameraData(SimplePipelineHandler *pipe,
+ unsigned int numStreams,
+ MediaEntity *sensor);
bool isValid() const { return sensor_ != nullptr; }
- std::set<Stream *> streams() { return { &stream_ }; }
int init();
int setupLinks();
int setupFormats(V4L2SubdeviceFormat *format,
V4L2Subdevice::Whence whence);
+ unsigned int streamIndex(const Stream *stream) const
+ {
+ return stream - &streams_.front();
+ }
+
struct Entity {
MediaEntity *entity;
MediaLink *link;
@@ -74,18 +172,23 @@ public:
struct Configuration {
uint32_t code;
- PixelFormat pixelFormat;
+ PixelFormat captureFormat;
Size captureSize;
+ std::vector<PixelFormat> outputFormats;
SizeRange outputSizes;
};
- Stream stream_;
+ std::vector<Stream> streams_;
std::unique_ptr<CameraSensor> sensor_;
std::list<Entity> entities_;
V4L2VideoDevice *video_;
std::vector<Configuration> configs_;
- std::map<PixelFormat, Configuration> formats_;
+ std::map<PixelFormat, const Configuration *> formats_;
+
+ std::vector<std::unique_ptr<FrameBuffer>> converterBuffers_;
+ bool useConverter_;
+ std::queue<std::map<unsigned int, FrameBuffer *>> converterQueue_;
};
class SimpleCameraConfiguration : public CameraConfiguration
@@ -95,7 +198,10 @@ public:
Status validate() override;
- const V4L2SubdeviceFormat &sensorFormat() { return sensorFormat_; }
+ const SimpleCameraData::Configuration *pipeConfig() const
+ {
+ return pipeConfig_;
+ }
bool needConversion() const { return needConversion_; }
@@ -108,7 +214,7 @@ private:
std::shared_ptr<Camera> camera_;
const SimpleCameraData *data_;
- V4L2SubdeviceFormat sensorFormat_;
+ const SimpleCameraData::Configuration *pipeConfig_;
bool needConversion_;
};
@@ -116,7 +222,6 @@ class SimplePipelineHandler : public PipelineHandler
{
public:
SimplePipelineHandler(CameraManager *manager);
- ~SimplePipelineHandler();
CameraConfiguration *generateConfiguration(Camera *camera,
const StreamRoles &roles) override;
@@ -125,40 +230,38 @@ public:
int exportFrameBuffers(Camera *camera, Stream *stream,
std::vector<std::unique_ptr<FrameBuffer>> *buffers) override;
- int start(Camera *camera) override;
+ int start(Camera *camera, const ControlList *controls) override;
void stop(Camera *camera) override;
bool match(DeviceEnumerator *enumerator) override;
V4L2VideoDevice *video(const MediaEntity *entity);
V4L2Subdevice *subdev(const MediaEntity *entity);
- SimpleConverter *converter() { return converter_; }
+ SimpleConverter *converter() { return converter_.get(); }
protected:
int queueRequestDevice(Camera *camera, Request *request) override;
private:
+ static constexpr unsigned int kNumInternalBuffers = 3;
+
SimpleCameraData *cameraData(const Camera *camera)
{
return static_cast<SimpleCameraData *>(
PipelineHandler::cameraData(camera));
}
- int initLinks();
-
- int createCamera(MediaEntity *sensor);
+ std::vector<MediaEntity *> locateSensors();
void bufferReady(FrameBuffer *buffer);
- void converterDone(FrameBuffer *input, FrameBuffer *output);
+ void converterInputDone(FrameBuffer *buffer);
+ void converterOutputDone(FrameBuffer *buffer);
MediaDevice *media_;
std::map<const MediaEntity *, std::unique_ptr<V4L2VideoDevice>> videos_;
std::map<const MediaEntity *, V4L2Subdevice> subdevs_;
- SimpleConverter *converter_;
- bool useConverter_;
- std::vector<std::unique_ptr<FrameBuffer>> converterBuffers_;
- std::queue<FrameBuffer *> converterQueue_;
+ std::unique_ptr<SimpleConverter> converter_;
Camera *activeCamera_;
};
@@ -168,80 +271,85 @@ private:
*/
SimpleCameraData::SimpleCameraData(SimplePipelineHandler *pipe,
+ unsigned int numStreams,
MediaEntity *sensor)
- : CameraData(pipe)
+ : CameraData(pipe), streams_(numStreams)
{
int ret;
/*
- * Walk the pipeline towards the video node and store all entities
- * along the way.
+ * Find the shortest path from the camera sensor to a video capture
+ * device using the breadth-first search algorithm. This heuristic will
+ * be most likely to skip paths that aren't suitable for the simple
+ * pipeline handler on more complex devices, and is guaranteed to
+ * produce a valid path on all devices that have a single option.
+ *
+ * For instance, on the IPU-based i.MX6Q, the shortest path will skip
+ * encoders and image converters, and will end in a CSI capture device.
*/
- MediaEntity *source = sensor;
+ std::unordered_set<MediaEntity *> visited;
+ std::queue<MediaEntity *> queue;
- while (source) {
- /* If we have reached a video node, we're done. */
- if (source->function() == MEDIA_ENT_F_IO_V4L)
- break;
+ /* Remember at each entity where we came from. */
+ std::unordered_map<MediaEntity *, Entity> parents;
+ MediaEntity *entity = nullptr;
- /* Use the first output pad that has links. */
- MediaPad *sourcePad = nullptr;
- for (MediaPad *pad : source->pads()) {
- if ((pad->flags() & MEDIA_PAD_FL_SOURCE) &&
- !pad->links().empty()) {
- sourcePad = pad;
- break;
- }
- }
+ queue.push(sensor);
- if (!sourcePad)
- return;
+ while (!queue.empty()) {
+ entity = queue.front();
+ queue.pop();
- /*
- * Use the first link that is enabled or can be enabled (not
- * immutable).
- */
- MediaLink *sourceLink = nullptr;
- for (MediaLink *link : sourcePad->links()) {
- if ((link->flags() & MEDIA_LNK_FL_ENABLED) ||
- !(link->flags() & MEDIA_LNK_FL_IMMUTABLE)) {
- sourceLink = link;
- break;
- }
+ /* Found the capture device. */
+ if (entity->function() == MEDIA_ENT_F_IO_V4L) {
+ LOG(SimplePipeline, Debug)
+ << "Found capture device " << entity->name();
+ video_ = pipe->video(entity);
+ break;
}
- if (!sourceLink)
- return;
-
- entities_.push_back({ source, sourceLink });
-
- source = sourceLink->sink()->entity();
+ /* The actual breadth-first search algorithm. */
+ visited.insert(entity);
+ for (MediaPad *pad : entity->pads()) {
+ if (!(pad->flags() & MEDIA_PAD_FL_SOURCE))
+ continue;
- /* Avoid infinite loops. */
- auto iter = std::find_if(entities_.begin(), entities_.end(),
- [&](const Entity &entity) {
- return entity.entity == source;
- });
- if (iter != entities_.end()) {
- LOG(SimplePipeline, Info) << "Loop detected in pipeline";
- return;
+ for (MediaLink *link : pad->links()) {
+ MediaEntity *next = link->sink()->entity();
+ if (visited.find(next) == visited.end()) {
+ queue.push(next);
+ parents.insert({ next, { entity, link } });
+ }
+ }
}
}
- /*
- * We have a valid pipeline, get the video device and create the camera
- * sensor.
- */
- video_ = pipe->video(source);
if (!video_)
return;
+ /*
+ * With the parents, we can follow back our way from the capture device
+ * to the sensor.
+ */
+ for (auto it = parents.find(entity); it != parents.end();
+ it = parents.find(entity)) {
+ const Entity &e = it->second;
+ entities_.push_front(e);
+ entity = e.entity;
+ }
+
+ /* Finally also remember the sensor. */
sensor_ = std::make_unique<CameraSensor>(sensor);
ret = sensor_->init();
if (ret) {
sensor_.reset();
return;
}
+
+ LOG(SimplePipeline, Debug)
+ << "Found pipeline: "
+ << utils::join(entities_, " -> ",
+ [](const Entity &e) { return e.entity->name(); });
}
int SimpleCameraData::init()
@@ -288,13 +396,6 @@ int SimpleCameraData::init()
})
<< " ]";
- /*
- * Store the configuration in the formats_ map, mapping the
- * PixelFormat to the corresponding configuration. Any
- * previously stored value is overwritten, as the pipeline
- * handler currently doesn't care about how a particular
- * PixelFormat is achieved.
- */
for (const auto &videoFormat : videoFormats) {
PixelFormat pixelFormat = videoFormat.first.toPixelFormat();
if (!pixelFormat)
@@ -302,27 +403,40 @@ int SimpleCameraData::init()
Configuration config;
config.code = code;
- config.pixelFormat = pixelFormat;
+ config.captureFormat = pixelFormat;
config.captureSize = format.size;
if (!converter) {
+ config.outputFormats = { pixelFormat };
config.outputSizes = config.captureSize;
- formats_[pixelFormat] = config;
- continue;
+ } else {
+ config.outputFormats = converter->formats(pixelFormat);
+ config.outputSizes = converter->sizes(format.size);
}
- config.outputSizes = converter->sizes(format.size);
-
- for (PixelFormat format : converter->formats(pixelFormat))
- formats_[format] = config;
+ configs_.push_back(config);
}
}
- if (formats_.empty()) {
+ if (configs_.empty()) {
LOG(SimplePipeline, Error) << "No valid configuration found";
return -EINVAL;
}
+ /*
+ * Map the pixel formats to configurations. Any previously stored value
+ * is overwritten, as the pipeline handler currently doesn't care about
+ * how a particular PixelFormat is achieved.
+ */
+ for (const Configuration &config : configs_) {
+ formats_[config.captureFormat] = &config;
+
+ for (PixelFormat fmt : config.outputFormats)
+ formats_[fmt] = &config;
+ }
+
+ properties_ = sensor_->properties();
+
return 0;
}
@@ -427,7 +541,7 @@ int SimpleCameraData::setupFormats(V4L2SubdeviceFormat *format,
SimpleCameraConfiguration::SimpleCameraConfiguration(Camera *camera,
SimpleCameraData *data)
: CameraConfiguration(), camera_(camera->shared_from_this()),
- data_(data)
+ data_(data), pipeConfig_(nullptr)
{
}
@@ -444,63 +558,96 @@ CameraConfiguration::Status SimpleCameraConfiguration::validate()
}
/* Cap the number of entries to the available streams. */
- if (config_.size() > 1) {
- config_.resize(1);
+ if (config_.size() > data_->streams_.size()) {
+ config_.resize(data_->streams_.size());
status = Adjusted;
}
- StreamConfiguration &cfg = config_[0];
-
- /* Adjust the pixel format. */
- auto it = data_->formats_.find(cfg.pixelFormat);
- if (it == data_->formats_.end())
- it = data_->formats_.begin();
-
- PixelFormat pixelFormat = it->first;
- if (cfg.pixelFormat != pixelFormat) {
- LOG(SimplePipeline, Debug) << "Adjusting pixel format";
- cfg.pixelFormat = pixelFormat;
- status = Adjusted;
- }
+ /*
+ * Pick a configuration for the pipeline based on the pixel format for
+ * the streams (ordered from highest to lowest priority). Default to
+ * the first pipeline configuration if no streams requests a supported
+ * pixel format.
+ */
+ pipeConfig_ = data_->formats_.begin()->second;
- const SimpleCameraData::Configuration &pipeConfig = it->second;
- if (!pipeConfig.outputSizes.contains(cfg.size)) {
- LOG(SimplePipeline, Debug)
- << "Adjusting size from " << cfg.size.toString()
- << " to " << pipeConfig.captureSize.toString();
- cfg.size = pipeConfig.captureSize;
- status = Adjusted;
+ for (const StreamConfiguration &cfg : config_) {
+ auto it = data_->formats_.find(cfg.pixelFormat);
+ if (it != data_->formats_.end()) {
+ pipeConfig_ = it->second;
+ break;
+ }
}
- needConversion_ = cfg.pixelFormat != pipeConfig.pixelFormat
- || cfg.size != pipeConfig.captureSize;
+ /* Adjust the requested streams. */
+ SimplePipelineHandler *pipe = static_cast<SimplePipelineHandler *>(data_->pipe_);
+ SimpleConverter *converter = pipe->converter();
- cfg.bufferCount = 3;
+ /*
+ * Enable usage of the converter when producing multiple streams, as
+ * the video capture device can't capture to multiple buffers.
+ *
+ * It is possible to produce up to one stream without conversion
+ * (provided the format and size match), at the expense of more complex
+ * buffer handling (including allocation of internal buffers to be used
+ * when a request doesn't contain a buffer for the stream that doesn't
+ * require any conversion, similar to raw capture use cases). This is
+ * left as a future improvement.
+ */
+ needConversion_ = config_.size() > 1;
+
+ for (unsigned int i = 0; i < config_.size(); ++i) {
+ StreamConfiguration &cfg = config_[i];
+
+ /* Adjust the pixel format and size. */
+ auto it = std::find(pipeConfig_->outputFormats.begin(),
+ pipeConfig_->outputFormats.end(),
+ cfg.pixelFormat);
+ if (it == pipeConfig_->outputFormats.end())
+ it = pipeConfig_->outputFormats.begin();
+
+ PixelFormat pixelFormat = *it;
+ if (cfg.pixelFormat != pixelFormat) {
+ LOG(SimplePipeline, Debug) << "Adjusting pixel format";
+ cfg.pixelFormat = pixelFormat;
+ status = Adjusted;
+ }
- /* Set the stride and frameSize. */
- if (!needConversion_) {
- V4L2DeviceFormat format = {};
- format.fourcc = data_->video_->toV4L2PixelFormat(cfg.pixelFormat);
- format.size = cfg.size;
+ if (!pipeConfig_->outputSizes.contains(cfg.size)) {
+ LOG(SimplePipeline, Debug)
+ << "Adjusting size from " << cfg.size.toString()
+ << " to " << pipeConfig_->captureSize.toString();
+ cfg.size = pipeConfig_->captureSize;
+ status = Adjusted;
+ }
- int ret = data_->video_->tryFormat(&format);
- if (ret < 0)
- return Invalid;
+ /* \todo Create a libcamera core class to group format and size */
+ if (cfg.pixelFormat != pipeConfig_->captureFormat ||
+ cfg.size != pipeConfig_->captureSize)
+ needConversion_ = true;
+
+ /* Set the stride, frameSize and bufferCount. */
+ if (needConversion_) {
+ std::tie(cfg.stride, cfg.frameSize) =
+ converter->strideAndFrameSize(cfg.pixelFormat, cfg.size);
+ if (cfg.stride == 0)
+ return Invalid;
+ } else {
+ V4L2DeviceFormat format;
+ format.fourcc = data_->video_->toV4L2PixelFormat(cfg.pixelFormat);
+ format.size = cfg.size;
+
+ int ret = data_->video_->tryFormat(&format);
+ if (ret < 0)
+ return Invalid;
- cfg.stride = format.planes[0].bpl;
- cfg.frameSize = format.planes[0].size;
+ cfg.stride = format.planes[0].bpl;
+ cfg.frameSize = format.planes[0].size;
+ }
- return status;
+ cfg.bufferCount = 3;
}
- SimplePipelineHandler *pipe = static_cast<SimplePipelineHandler *>(data_->pipe_);
- SimpleConverter *converter = pipe->converter();
-
- std::tie(cfg.stride, cfg.frameSize) =
- converter->strideAndFrameSize(cfg.size, cfg.pixelFormat);
- if (cfg.stride == 0)
- return Invalid;
-
return status;
}
@@ -509,15 +656,10 @@ CameraConfiguration::Status SimpleCameraConfiguration::validate()
*/
SimplePipelineHandler::SimplePipelineHandler(CameraManager *manager)
- : PipelineHandler(manager), converter_(nullptr)
+ : PipelineHandler(manager)
{
}
-SimplePipelineHandler::~SimplePipelineHandler()
-{
- delete converter_;
-}
-
CameraConfiguration *SimplePipelineHandler::generateConfiguration(Camera *camera,
const StreamRoles &roles)
{
@@ -534,21 +676,23 @@ CameraConfiguration *SimplePipelineHandler::generateConfiguration(Camera *camera
std::inserter(formats, formats.end()),
[](const auto &format) -> decltype(formats)::value_type {
const PixelFormat &pixelFormat = format.first;
- const Size &size = format.second.captureSize;
+ const Size &size = format.second->captureSize;
return { pixelFormat, { size } };
});
/*
- * Create the stream configuration. Take the first entry in the formats
+ * Create the stream configurations. Take the first entry in the formats
* map as the default, for lack of a better option.
*
* \todo Implement a better way to pick the default format
*/
- StreamConfiguration cfg{ StreamFormats{ formats } };
- cfg.pixelFormat = formats.begin()->first;
- cfg.size = formats.begin()->second[0].max;
+ for ([[maybe_unused]] StreamRole role : roles) {
+ StreamConfiguration cfg{ StreamFormats{ formats } };
+ cfg.pixelFormat = formats.begin()->first;
+ cfg.size = formats.begin()->second[0].max;
- config->addConfiguration(cfg);
+ config->addConfiguration(cfg);
+ }
config->validate();
@@ -561,7 +705,6 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c)
static_cast<SimpleCameraConfiguration *>(c);
SimpleCameraData *data = cameraData(camera);
V4L2VideoDevice *video = data->video_;
- StreamConfiguration &cfg = config->at(0);
int ret;
/*
@@ -572,53 +715,62 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c)
if (ret < 0)
return ret;
- const SimpleCameraData::Configuration &pipeConfig =
- data->formats_[cfg.pixelFormat];
-
- V4L2SubdeviceFormat format{ pipeConfig.code, data->sensor_->resolution() };
+ const SimpleCameraData::Configuration *pipeConfig = config->pipeConfig();
+ V4L2SubdeviceFormat format{ pipeConfig->code, data->sensor_->resolution() };
ret = data->setupFormats(&format, V4L2Subdevice::ActiveFormat);
if (ret < 0)
return ret;
/* Configure the video node. */
- V4L2PixelFormat videoFormat = video->toV4L2PixelFormat(pipeConfig.pixelFormat);
+ V4L2PixelFormat videoFormat = video->toV4L2PixelFormat(pipeConfig->captureFormat);
- V4L2DeviceFormat captureFormat = {};
+ V4L2DeviceFormat captureFormat;
captureFormat.fourcc = videoFormat;
- captureFormat.size = pipeConfig.captureSize;
+ captureFormat.size = pipeConfig->captureSize;
ret = video->setFormat(&captureFormat);
if (ret)
return ret;
+ if (captureFormat.planesCount != 1) {
+ LOG(SimplePipeline, Error)
+ << "Planar formats using non-contiguous memory not supported";
+ return -EINVAL;
+ }
+
if (captureFormat.fourcc != videoFormat ||
- captureFormat.size != pipeConfig.captureSize) {
+ captureFormat.size != pipeConfig->captureSize) {
LOG(SimplePipeline, Error)
<< "Unable to configure capture in "
- << pipeConfig.captureSize.toString() << "-"
+ << pipeConfig->captureSize.toString() << "-"
<< videoFormat.toString();
return -EINVAL;
}
- /* Configure the converter if required. */
- useConverter_ = config->needConversion();
+ /* Configure the converter if needed. */
+ std::vector<std::reference_wrapper<StreamConfiguration>> outputCfgs;
+ data->useConverter_ = config->needConversion();
- if (useConverter_) {
- int ret = converter_->configure(pipeConfig.pixelFormat,
- pipeConfig.captureSize, &cfg);
- if (ret < 0) {
- LOG(SimplePipeline, Error)
- << "Unable to configure converter";
- return ret;
- }
+ for (unsigned int i = 0; i < config->size(); ++i) {
+ StreamConfiguration &cfg = config->at(i);
+
+ cfg.setStream(&data->streams_[i]);
- LOG(SimplePipeline, Debug) << "Using format converter";
+ if (data->useConverter_)
+ outputCfgs.push_back(cfg);
}
- cfg.setStream(&data->stream_);
+ if (outputCfgs.empty())
+ return 0;
- return 0;
+ StreamConfiguration inputCfg;
+ inputCfg.pixelFormat = pipeConfig->captureFormat;
+ inputCfg.size = pipeConfig->captureSize;
+ inputCfg.stride = captureFormat.planes[0].bpl;
+ inputCfg.bufferCount = kNumInternalBuffers;
+
+ return converter_->configure(inputCfg, outputCfgs);
}
int SimplePipelineHandler::exportFrameBuffers(Camera *camera, Stream *stream,
@@ -631,23 +783,31 @@ int SimplePipelineHandler::exportFrameBuffers(Camera *camera, Stream *stream,
* Export buffers on the converter or capture video node, depending on
* whether the converter is used or not.
*/
- if (useConverter_)
- return converter_->exportBuffers(count, buffers);
+ if (data->useConverter_)
+ return converter_->exportBuffers(data->streamIndex(stream),
+ count, buffers);
else
return data->video_->exportBuffers(count, buffers);
}
-int SimplePipelineHandler::start(Camera *camera)
+int SimplePipelineHandler::start(Camera *camera, [[maybe_unused]] const ControlList *controls)
{
SimpleCameraData *data = cameraData(camera);
V4L2VideoDevice *video = data->video_;
- unsigned int count = data->stream_.configuration().bufferCount;
int ret;
- if (useConverter_)
- ret = video->allocateBuffers(count, &converterBuffers_);
- else
- ret = video->importBuffers(count);
+ if (data->useConverter_) {
+ /*
+ * When using the converter allocate a fixed number of internal
+ * buffers.
+ */
+ ret = video->allocateBuffers(kNumInternalBuffers,
+ &data->converterBuffers_);
+ } else {
+ /* Otherwise, prepare for using buffers from the only stream. */
+ Stream *stream = &data->streams_[0];
+ ret = video->importBuffers(stream->configuration().bufferCount);
+ }
if (ret < 0)
return ret;
@@ -657,15 +817,15 @@ int SimplePipelineHandler::start(Camera *camera)
return ret;
}
- if (useConverter_) {
- ret = converter_->start(count);
+ if (data->useConverter_) {
+ ret = converter_->start();
if (ret < 0) {
stop(camera);
return ret;
}
/* Queue all internal buffers for capture. */
- for (std::unique_ptr<FrameBuffer> &buffer : converterBuffers_)
+ for (std::unique_ptr<FrameBuffer> &buffer : data->converterBuffers_)
video->queueBuffer(buffer.get());
}
@@ -679,79 +839,138 @@ void SimplePipelineHandler::stop(Camera *camera)
SimpleCameraData *data = cameraData(camera);
V4L2VideoDevice *video = data->video_;
- if (useConverter_)
+ if (data->useConverter_)
converter_->stop();
video->streamOff();
video->releaseBuffers();
- converterBuffers_.clear();
+ data->converterBuffers_.clear();
activeCamera_ = nullptr;
}
int SimplePipelineHandler::queueRequestDevice(Camera *camera, Request *request)
{
SimpleCameraData *data = cameraData(camera);
- Stream *stream = &data->stream_;
+ int ret;
- FrameBuffer *buffer = request->findBuffer(stream);
- if (!buffer) {
- LOG(SimplePipeline, Error)
- << "Attempt to queue request with invalid stream";
- return -ENOENT;
- }
+ std::map<unsigned int, FrameBuffer *> buffers;
- /*
- * If conversion is needed, push the buffer to the converter queue, it
- * will be handed to the converter in the capture completion handler.
- */
- if (useConverter_) {
- converterQueue_.push(buffer);
- return 0;
+ for (auto &[stream, buffer] : request->buffers()) {
+ /*
+ * If conversion is needed, push the buffer to the converter
+ * queue, it will be handed to the converter in the capture
+ * completion handler.
+ */
+ if (data->useConverter_) {
+ buffers.emplace(data->streamIndex(stream), buffer);
+ } else {
+ ret = data->video_->queueBuffer(buffer);
+ if (ret < 0)
+ return ret;
+ }
}
- return data->video_->queueBuffer(buffer);
+ if (data->useConverter_)
+ data->converterQueue_.push(std::move(buffers));
+
+ return 0;
}
/* -----------------------------------------------------------------------------
* Match and Setup
*/
+std::vector<MediaEntity *> SimplePipelineHandler::locateSensors()
+{
+ std::vector<MediaEntity *> entities;
+
+ /*
+ * Gather all the camera sensor entities based on the function they
+ * expose.
+ */
+ for (MediaEntity *entity : media_->entities()) {
+ if (entity->function() == MEDIA_ENT_F_CAM_SENSOR)
+ entities.push_back(entity);
+ }
+
+ if (entities.empty())
+ return {};
+
+ /*
+ * Sensors can be made of multiple entities. For instance, a raw sensor
+ * can be connected to an ISP, and the combination of both should be
+ * treated as one sensor. To support this, as a crude heuristic, check
+ * the downstream entity from the camera sensor, and if it is an ISP,
+ * use it instead of the sensor.
+ */
+ std::vector<MediaEntity *> sensors;
+
+ for (MediaEntity *entity : entities) {
+ /*
+ * Locate the downstream entity by following the first link
+ * from a source pad.
+ */
+ const MediaLink *link = nullptr;
+
+ for (const MediaPad *pad : entity->pads()) {
+ if ((pad->flags() & MEDIA_PAD_FL_SOURCE) &&
+ !pad->links().empty()) {
+ link = pad->links()[0];
+ break;
+ }
+ }
+
+ if (!link)
+ continue;
+
+ MediaEntity *remote = link->sink()->entity();
+ if (remote->function() == MEDIA_ENT_F_PROC_VIDEO_ISP)
+ sensors.push_back(remote);
+ else
+ sensors.push_back(entity);
+ }
+
+ /*
+ * Remove duplicates, in case multiple sensors are connected to the
+ * same ISP.
+ */
+ std::sort(sensors.begin(), sensors.end());
+ auto last = std::unique(sensors.begin(), sensors.end());
+ sensors.erase(last, sensors.end());
+
+ return sensors;
+}
+
bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
{
+ const SimplePipelineInfo *info = nullptr;
MediaDevice *converter = nullptr;
+ unsigned int numStreams = 1;
- for (const SimplePipelineInfo &info : supportedDevices) {
- DeviceMatch dm(info.driver);
+ for (const SimplePipelineInfo &inf : supportedDevices) {
+ DeviceMatch dm(inf.driver);
media_ = acquireMediaDevice(enumerator, dm);
- if (!media_)
- continue;
-
- if (!info.converter)
+ if (media_) {
+ info = &inf;
break;
-
- DeviceMatch converterMatch(info.converter);
- converter = acquireMediaDevice(enumerator, converterMatch);
- break;
+ }
}
if (!media_)
return false;
- /* Locate the sensors. */
- std::vector<MediaEntity *> sensors;
-
- for (MediaEntity *entity : media_->entities()) {
- switch (entity->function()) {
- case MEDIA_ENT_F_CAM_SENSOR:
- sensors.push_back(entity);
- break;
-
- default:
+ for (const auto &[name, streams] : info->converters) {
+ DeviceMatch converterMatch(name);
+ converter = acquireMediaDevice(enumerator, converterMatch);
+ if (converter) {
+ numStreams = streams;
break;
}
}
+ /* Locate the sensors. */
+ std::vector<MediaEntity *> sensors = locateSensors();
if (sensors.empty()) {
LOG(SimplePipeline, Error) << "No sensor found";
return false;
@@ -759,15 +978,15 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
/* Open the converter, if any. */
if (converter) {
- converter_ = new SimpleConverter(converter);
- if (converter_->open() < 0) {
+ converter_ = std::make_unique<SimpleConverter>(converter);
+ if (!converter_->isValid()) {
LOG(SimplePipeline, Warning)
- << "Failed to open converter, disabling format conversion";
- delete converter_;
- converter_ = nullptr;
+ << "Failed to create converter, disabling format conversion";
+ converter_.reset();
+ } else {
+ converter_->inputBufferReady.connect(this, &SimplePipelineHandler::converterInputDone);
+ converter_->outputBufferReady.connect(this, &SimplePipelineHandler::converterOutputDone);
}
-
- converter_->bufferReady.connect(this, &SimplePipelineHandler::converterDone);
}
/*
@@ -781,7 +1000,7 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
for (MediaEntity *sensor : sensors) {
std::unique_ptr<SimpleCameraData> data =
- std::make_unique<SimpleCameraData>(this, sensor);
+ std::make_unique<SimpleCameraData>(this, numStreams, sensor);
if (!data->isValid()) {
LOG(SimplePipeline, Error)
<< "No valid pipeline for sensor '"
@@ -814,18 +1033,25 @@ bool SimplePipelineHandler::match(DeviceEnumerator *enumerator)
}
/* Initialize each pipeline and register a corresponding camera. */
+ bool registered = false;
+
for (std::unique_ptr<SimpleCameraData> &data : pipelines) {
int ret = data->init();
if (ret < 0)
continue;
+ std::set<Stream *> streams;
+ std::transform(data->streams_.begin(), data->streams_.end(),
+ std::inserter(streams, streams.end()),
+ [](Stream &stream) { return &stream; });
+
std::shared_ptr<Camera> camera =
- Camera::create(this, data->sensor_->id(),
- data->streams());
+ Camera::create(this, data->sensor_->id(), streams);
registerCamera(std::move(camera), std::move(data));
+ registered = true;
}
- return true;
+ return registered;
}
V4L2VideoDevice *SimplePipelineHandler::video(const MediaEntity *entity)
@@ -845,12 +1071,6 @@ V4L2VideoDevice *SimplePipelineHandler::video(const MediaEntity *entity)
if (video->open() < 0)
return nullptr;
- if (video->caps().isMultiplanar()) {
- LOG(SimplePipeline, Error)
- << "V4L2 multiplanar devices are not supported";
- return nullptr;
- }
-
video->bufferReady.connect(this, &SimplePipelineHandler::bufferReady);
auto element = videos_.emplace(entity, std::move(video));
@@ -881,66 +1101,102 @@ void SimplePipelineHandler::bufferReady(FrameBuffer *buffer)
* point converting an erroneous buffer.
*/
if (buffer->metadata().status != FrameMetadata::FrameSuccess) {
- if (useConverter_) {
- /* Requeue the buffer for capture. */
+ if (!data->useConverter_) {
+ /* No conversion, just complete the request. */
+ Request *request = buffer->request();
+ completeBuffer(request, buffer);
+ completeRequest(request);
+ return;
+ }
+
+ /*
+ * The converter is in use. Requeue the internal buffer for
+ * capture (unless the stream is being stopped), and complete
+ * the request with all the user-facing buffers.
+ */
+ if (buffer->metadata().status != FrameMetadata::FrameCancelled)
data->video_->queueBuffer(buffer);
- /*
- * Get the next user-facing buffer to complete the
- * request.
- */
- if (converterQueue_.empty())
- return;
+ if (data->converterQueue_.empty())
+ return;
- buffer = converterQueue_.front();
- converterQueue_.pop();
+ Request *request = nullptr;
+ for (auto &item : data->converterQueue_.front()) {
+ FrameBuffer *outputBuffer = item.second;
+ request = outputBuffer->request();
+ completeBuffer(request, outputBuffer);
}
+ data->converterQueue_.pop();
- Request *request = buffer->request();
- completeBuffer(activeCamera_, request, buffer);
- completeRequest(activeCamera_, request);
+ if (request)
+ completeRequest(request);
return;
}
/*
+ * Record the sensor's timestamp in the request metadata. The request
+ * needs to be obtained from the user-facing buffer, as internal
+ * buffers are free-wheeling and have no request associated with them.
+ *
+ * \todo The sensor timestamp should be better estimated by connecting
+ * to the V4L2Device::frameStart signal if the platform provides it.
+ */
+ Request *request = buffer->request();
+
+ if (data->useConverter_ && !data->converterQueue_.empty()) {
+ const std::map<unsigned int, FrameBuffer *> &outputs =
+ data->converterQueue_.front();
+ if (!outputs.empty()) {
+ FrameBuffer *outputBuffer = outputs.begin()->second;
+ if (outputBuffer)
+ request = outputBuffer->request();
+ }
+ }
+
+ if (request)
+ request->metadata().set(controls::SensorTimestamp,
+ buffer->metadata().timestamp);
+
+ /*
* Queue the captured and the request buffer to the converter if format
* conversion is needed. If there's no queued request, just requeue the
* captured buffer for capture.
*/
- if (useConverter_) {
- if (converterQueue_.empty()) {
+ if (data->useConverter_) {
+ if (data->converterQueue_.empty()) {
data->video_->queueBuffer(buffer);
return;
}
- FrameBuffer *output = converterQueue_.front();
- converterQueue_.pop();
-
- converter_->queueBuffers(buffer, output);
+ converter_->queueBuffers(buffer, data->converterQueue_.front());
+ data->converterQueue_.pop();
return;
}
/* Otherwise simply complete the request. */
- Request *request = buffer->request();
- completeBuffer(activeCamera_, request, buffer);
- completeRequest(activeCamera_, request);
+ completeBuffer(request, buffer);
+ completeRequest(request);
}
-void SimplePipelineHandler::converterDone(FrameBuffer *input,
- FrameBuffer *output)
+void SimplePipelineHandler::converterInputDone(FrameBuffer *buffer)
{
ASSERT(activeCamera_);
SimpleCameraData *data = cameraData(activeCamera_);
- /* Complete the request. */
- Request *request = output->request();
- completeBuffer(activeCamera_, request, output);
- completeRequest(activeCamera_, request);
-
/* Queue the input buffer back for capture. */
- data->video_->queueBuffer(input);
+ data->video_->queueBuffer(buffer);
+}
+
+void SimplePipelineHandler::converterOutputDone(FrameBuffer *buffer)
+{
+ ASSERT(activeCamera_);
+
+ /* Complete the buffer and the request. */
+ Request *request = buffer->request();
+ if (completeBuffer(request, buffer))
+ completeRequest(request);
}
-REGISTER_PIPELINE_HANDLER(SimplePipelineHandler);
+REGISTER_PIPELINE_HANDLER(SimplePipelineHandler)
} /* namespace libcamera */
diff --git a/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp b/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp
index ba0efc8b..0f634b8d 100644
--- a/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp
+++ b/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp
@@ -9,8 +9,12 @@
#include <fstream>
#include <iomanip>
#include <math.h>
+#include <memory>
#include <tuple>
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
#include <libcamera/camera.h>
#include <libcamera/control_ids.h>
#include <libcamera/controls.h>
@@ -19,12 +23,9 @@
#include <libcamera/stream.h>
#include "libcamera/internal/device_enumerator.h"
-#include "libcamera/internal/log.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/pipeline_handler.h"
#include "libcamera/internal/sysfs.h"
-#include "libcamera/internal/utils.h"
-#include "libcamera/internal/v4l2_controls.h"
#include "libcamera/internal/v4l2_videodevice.h"
namespace libcamera {
@@ -35,21 +36,16 @@ class UVCCameraData : public CameraData
{
public:
UVCCameraData(PipelineHandler *pipe)
- : CameraData(pipe), video_(nullptr)
- {
- }
-
- ~UVCCameraData()
+ : CameraData(pipe)
{
- delete video_;
}
- int init(MediaEntity *entity);
+ int init(MediaDevice *media);
void addControl(uint32_t cid, const ControlInfo &v4l2info,
ControlInfoMap::Map *ctrls);
void bufferReady(FrameBuffer *buffer);
- V4L2VideoDevice *video_;
+ std::unique_ptr<V4L2VideoDevice> video_;
Stream stream_;
};
@@ -76,7 +72,7 @@ public:
int exportFrameBuffers(Camera *camera, Stream *stream,
std::vector<std::unique_ptr<FrameBuffer>> *buffers) override;
- int start(Camera *camera) override;
+ int start(Camera *camera, const ControlList *controls) override;
void stop(Camera *camera) override;
int queueRequestDevice(Camera *camera, Request *request) override;
@@ -154,7 +150,7 @@ CameraConfiguration::Status UVCCameraConfiguration::validate()
cfg.bufferCount = 4;
- V4L2DeviceFormat format = {};
+ V4L2DeviceFormat format;
format.fourcc = data_->video_->toV4L2PixelFormat(cfg.pixelFormat);
format.size = cfg.size;
@@ -210,7 +206,7 @@ int PipelineHandlerUVC::configure(Camera *camera, CameraConfiguration *config)
StreamConfiguration &cfg = config->at(0);
int ret;
- V4L2DeviceFormat format = {};
+ V4L2DeviceFormat format;
format.fourcc = data->video_->toV4L2PixelFormat(cfg.pixelFormat);
format.size = cfg.size;
@@ -236,7 +232,7 @@ int PipelineHandlerUVC::exportFrameBuffers(Camera *camera, Stream *stream,
return data->video_->exportBuffers(count, buffers);
}
-int PipelineHandlerUVC::start(Camera *camera)
+int PipelineHandlerUVC::start(Camera *camera, [[maybe_unused]] const ControlList *controls)
{
UVCCameraData *data = cameraData(camera);
unsigned int count = data->stream_.configuration().bufferCount;
@@ -433,7 +429,7 @@ std::string PipelineHandlerUVC::generateId(const UVCCameraData *data)
/* Creata a device ID from the USB devices vendor and product ID. */
std::string deviceId;
- for (const std::string &name : { "idVendor", "idProduct" }) {
+ for (const char *name : { "idVendor", "idProduct" }) {
std::ifstream file(path + "/../" + name);
if (!file.is_open())
@@ -463,18 +459,7 @@ bool PipelineHandlerUVC::match(DeviceEnumerator *enumerator)
std::unique_ptr<UVCCameraData> data = std::make_unique<UVCCameraData>(this);
- /* Locate and initialise the camera data with the default video node. */
- const std::vector<MediaEntity *> &entities = media->entities();
- auto entity = std::find_if(entities.begin(), entities.end(),
- [](MediaEntity *entity) {
- return entity->flags() & MEDIA_ENT_FL_DEFAULT;
- });
- if (entity == entities.end()) {
- LOG(UVC, Error) << "Could not find a default video device";
- return false;
- }
-
- if (data->init(*entity))
+ if (data->init(media))
return false;
/* Create and register the camera. */
@@ -494,12 +479,23 @@ bool PipelineHandlerUVC::match(DeviceEnumerator *enumerator)
return true;
}
-int UVCCameraData::init(MediaEntity *entity)
+int UVCCameraData::init(MediaDevice *media)
{
int ret;
+ /* Locate and initialise the camera data with the default video node. */
+ const std::vector<MediaEntity *> &entities = media->entities();
+ auto entity = std::find_if(entities.begin(), entities.end(),
+ [](MediaEntity *e) {
+ return e->flags() & MEDIA_ENT_FL_DEFAULT;
+ });
+ if (entity == entities.end()) {
+ LOG(UVC, Error) << "Could not find a default video device";
+ return -ENODEV;
+ }
+
/* Create and open the video device. */
- video_ = new V4L2VideoDevice(entity);
+ video_ = std::make_unique<V4L2VideoDevice>(*entity);
ret = video_->open();
if (ret)
return ret;
@@ -511,6 +507,23 @@ int UVCCameraData::init(MediaEntity *entity)
* Until then, treat all UVC cameras as external.
*/
properties_.set(properties::Location, properties::CameraLocationExternal);
+ properties_.set(properties::Model, utils::toAscii(media->model()));
+
+ /*
+ * Get the current format in order to initialize the sensor array
+ * properties.
+ */
+ Size resolution;
+ for (const auto &it : video_->formats()) {
+ const std::vector<SizeRange> &sizeRanges = it.second;
+ for (const SizeRange &sizeRange : sizeRanges) {
+ if (sizeRange.max > resolution)
+ resolution = sizeRange.max;
+ }
+ }
+
+ properties_.set(properties::PixelArraySize, resolution);
+ properties_.set(properties::PixelArrayActiveAreas, { Rectangle(resolution) });
/* Initialise the supported controls. */
ControlInfoMap::Map ctrls;
@@ -649,10 +662,14 @@ void UVCCameraData::bufferReady(FrameBuffer *buffer)
{
Request *request = buffer->request();
- pipe_->completeBuffer(camera_, request, buffer);
- pipe_->completeRequest(camera_, request);
+ /* \todo Use the UVC metadata to calculate a more precise timestamp */
+ request->metadata().set(controls::SensorTimestamp,
+ buffer->metadata().timestamp);
+
+ pipe_->completeBuffer(request, buffer);
+ pipe_->completeRequest(request);
}
-REGISTER_PIPELINE_HANDLER(PipelineHandlerUVC);
+REGISTER_PIPELINE_HANDLER(PipelineHandlerUVC)
} /* namespace libcamera */
diff --git a/src/libcamera/pipeline/vimc/vimc.cpp b/src/libcamera/pipeline/vimc/vimc.cpp
index fc8085f1..12f7517f 100644
--- a/src/libcamera/pipeline/vimc/vimc.cpp
+++ b/src/libcamera/pipeline/vimc/vimc.cpp
@@ -14,23 +14,26 @@
#include <linux/media-bus-format.h>
#include <linux/version.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
#include <libcamera/camera.h>
#include <libcamera/control_ids.h>
#include <libcamera/controls.h>
#include <libcamera/formats.h>
-#include <libcamera/ipa/ipa_interface.h>
-#include <libcamera/ipa/ipa_module_info.h>
#include <libcamera/request.h>
#include <libcamera/stream.h>
+#include <libcamera/ipa/ipa_interface.h>
+#include <libcamera/ipa/ipa_module_info.h>
+#include <libcamera/ipa/vimc_ipa_interface.h>
+#include <libcamera/ipa/vimc_ipa_proxy.h>
+
#include "libcamera/internal/camera_sensor.h"
#include "libcamera/internal/device_enumerator.h"
#include "libcamera/internal/ipa_manager.h"
-#include "libcamera/internal/log.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/pipeline_handler.h"
-#include "libcamera/internal/utils.h"
-#include "libcamera/internal/v4l2_controls.h"
#include "libcamera/internal/v4l2_subdevice.h"
#include "libcamera/internal/v4l2_videodevice.h"
@@ -42,31 +45,22 @@ class VimcCameraData : public CameraData
{
public:
VimcCameraData(PipelineHandler *pipe, MediaDevice *media)
- : CameraData(pipe), media_(media), sensor_(nullptr),
- debayer_(nullptr), scaler_(nullptr), video_(nullptr),
- raw_(nullptr)
- {
- }
-
- ~VimcCameraData()
+ : CameraData(pipe), media_(media)
{
- delete sensor_;
- delete debayer_;
- delete scaler_;
- delete video_;
- delete raw_;
}
int init();
void bufferReady(FrameBuffer *buffer);
MediaDevice *media_;
- CameraSensor *sensor_;
- V4L2Subdevice *debayer_;
- V4L2Subdevice *scaler_;
- V4L2VideoDevice *video_;
- V4L2VideoDevice *raw_;
+ std::unique_ptr<CameraSensor> sensor_;
+ std::unique_ptr<V4L2Subdevice> debayer_;
+ std::unique_ptr<V4L2Subdevice> scaler_;
+ std::unique_ptr<V4L2VideoDevice> video_;
+ std::unique_ptr<V4L2VideoDevice> raw_;
Stream stream_;
+
+ std::unique_ptr<ipa::vimc::IPAProxyVimc> ipa_;
};
class VimcCameraConfiguration : public CameraConfiguration
@@ -92,7 +86,7 @@ public:
int exportFrameBuffers(Camera *camera, Stream *stream,
std::vector<std::unique_ptr<FrameBuffer>> *buffers) override;
- int start(Camera *camera) override;
+ int start(Camera *camera, const ControlList *controls) override;
void stop(Camera *camera) override;
int queueRequestDevice(Camera *camera, Request *request) override;
@@ -172,7 +166,7 @@ CameraConfiguration::Status VimcCameraConfiguration::validate()
cfg.bufferCount = 4;
- V4L2DeviceFormat format = {};
+ V4L2DeviceFormat format;
format.fourcc = data_->video_->toV4L2PixelFormat(cfg.pixelFormat);
format.size = cfg.size;
@@ -276,7 +270,7 @@ int PipelineHandlerVimc::configure(Camera *camera, CameraConfiguration *config)
if (ret)
return ret;
- V4L2DeviceFormat format = {};
+ V4L2DeviceFormat format;
format.fourcc = data->video_->toV4L2PixelFormat(cfg.pixelFormat);
format.size = cfg.size;
@@ -313,7 +307,7 @@ int PipelineHandlerVimc::exportFrameBuffers(Camera *camera, Stream *stream,
return data->video_->exportBuffers(count, buffers);
}
-int PipelineHandlerVimc::start(Camera *camera)
+int PipelineHandlerVimc::start(Camera *camera, [[maybe_unused]] const ControlList *controls)
{
VimcCameraData *data = cameraData(camera);
unsigned int count = data->stream_.configuration().bufferCount;
@@ -428,18 +422,19 @@ bool PipelineHandlerVimc::match(DeviceEnumerator *enumerator)
std::unique_ptr<VimcCameraData> data = std::make_unique<VimcCameraData>(this, media);
- data->ipa_ = IPAManager::createIPA(this, 0, 0);
- if (data->ipa_ != nullptr) {
- std::string conf = data->ipa_->configurationFile("vimc.conf");
- data->ipa_->init(IPASettings{ conf });
- } else {
- LOG(VIMC, Warning) << "no matching IPA found";
- }
-
/* Locate and open the capture video node. */
if (data->init())
return false;
+ data->ipa_ = IPAManager::createIPA<ipa::vimc::IPAProxyVimc>(this, 0, 0);
+ if (!data->ipa_) {
+ LOG(VIMC, Error) << "no matching IPA found";
+ return false;
+ }
+
+ std::string conf = data->ipa_->configurationFile("vimc.conf");
+ data->ipa_->init(IPASettings{ conf, data->sensor_->model() });
+
/* Create and register the camera. */
std::set<Stream *> streams{ &data->stream_ };
std::shared_ptr<Camera> camera =
@@ -466,26 +461,26 @@ int VimcCameraData::init()
return ret;
/* Create and open the camera sensor, debayer, scaler and video device. */
- sensor_ = new CameraSensor(media_->getEntityByName("Sensor B"));
+ sensor_ = std::make_unique<CameraSensor>(media_->getEntityByName("Sensor B"));
ret = sensor_->init();
if (ret)
return ret;
- debayer_ = new V4L2Subdevice(media_->getEntityByName("Debayer B"));
+ debayer_ = V4L2Subdevice::fromEntityName(media_, "Debayer B");
if (debayer_->open())
return -ENODEV;
- scaler_ = new V4L2Subdevice(media_->getEntityByName("Scaler"));
+ scaler_ = V4L2Subdevice::fromEntityName(media_, "Scaler");
if (scaler_->open())
return -ENODEV;
- video_ = new V4L2VideoDevice(media_->getEntityByName("RGB/YUV Capture"));
+ video_ = V4L2VideoDevice::fromEntityName(media_, "RGB/YUV Capture");
if (video_->open())
return -ENODEV;
video_->bufferReady.connect(this, &VimcCameraData::bufferReady);
- raw_ = new V4L2VideoDevice(media_->getEntityByName("Raw Capture 1"));
+ raw_ = V4L2VideoDevice::fromEntityName(media_, "Raw Capture 1");
if (raw_->open())
return -ENODEV;
@@ -529,10 +524,14 @@ void VimcCameraData::bufferReady(FrameBuffer *buffer)
{
Request *request = buffer->request();
- pipe_->completeBuffer(camera_, request, buffer);
- pipe_->completeRequest(camera_, request);
+ /* Record the sensor's timestamp in the request metadata. */
+ request->metadata().set(controls::SensorTimestamp,
+ buffer->metadata().timestamp);
+
+ pipe_->completeBuffer(request, buffer);
+ pipe_->completeRequest(request);
}
-REGISTER_PIPELINE_HANDLER(PipelineHandlerVimc);
+REGISTER_PIPELINE_HANDLER(PipelineHandlerVimc)
} /* namespace libcamera */
diff --git a/src/libcamera/pipeline_handler.cpp b/src/libcamera/pipeline_handler.cpp
index 918aea1e..c9928d44 100644
--- a/src/libcamera/pipeline_handler.cpp
+++ b/src/libcamera/pipeline_handler.cpp
@@ -9,14 +9,16 @@
#include <sys/sysmacros.h>
-#include <libcamera/buffer.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
#include <libcamera/camera.h>
#include <libcamera/camera_manager.h>
+#include <libcamera/framebuffer.h>
#include "libcamera/internal/device_enumerator.h"
-#include "libcamera/internal/log.h"
#include "libcamera/internal/media_device.h"
-#include "libcamera/internal/utils.h"
+#include "libcamera/internal/tracepoints.h"
/**
* \file pipeline_handler.h
@@ -61,16 +63,6 @@ LOG_DEFINE_CATEGORY(Pipeline)
*/
/**
- * \var CameraData::camera_
- * \brief The camera related to this CameraData instance
- *
- * The camera_ pointer provides access to the Camera object that this instance
- * is related to. It is set when the Camera is registered with
- * PipelineHandler::registerCamera() and remains valid until the CameraData
- * instance is destroyed.
- */
-
-/**
* \var CameraData::pipe_
* \brief The pipeline handler related to this CameraData instance
*
@@ -107,11 +99,14 @@ LOG_DEFINE_CATEGORY(Pipeline)
*/
/**
- * \var CameraData::ipa_
- * \brief The IPA module used by the camera
+ * \var CameraData::requestSequence_
+ * \brief The queuing sequence of the request
+ *
+ * When requests are queued, they are given a per-camera sequence number to
+ * facilitate debugging of internal request usage.
*
- * Reference to the Image Processing Algorithms (IPA) operating on the camera's
- * stream(s). If no IPA exists for the camera, this field is set to nullptr.
+ * The requestSequence_ tracks the number of requests queued to a camera
+ * over its lifetime.
*/
/**
@@ -310,8 +305,8 @@ const ControlList &PipelineHandler::properties(const Camera *camera) const
* application.
*
* The configuration is guaranteed to have been validated with
- * CameraConfiguration::valid(). The pipeline handler implementation shall not
- * perform further validation and may rely on any custom field stored in its
+ * CameraConfiguration::validate(). The pipeline handler implementation shall
+ * not perform further validation and may rely on any custom field stored in its
* custom CameraConfiguration derived class.
*
* When configuring the camera the pipeline handler shall associate a Stream
@@ -351,6 +346,7 @@ const ControlList &PipelineHandler::properties(const Camera *camera) const
* \fn PipelineHandler::start()
* \brief Start capturing from a group of streams
* \param[in] camera The camera to start
+ * \param[in] controls Controls to be applied before starting the Camera
*
* Start the group of streams that have been configured for capture by
* \a configure(). The intended caller of this method is the Camera class which
@@ -374,33 +370,52 @@ const ControlList &PipelineHandler::properties(const Camera *camera) const
*/
/**
+ * \brief Determine if the camera has any requests pending
+ * \param[in] camera The camera to check
+ *
+ * This method determines if there are any requests queued to the pipeline
+ * awaiting processing.
+ *
+ * \return True if there are pending requests, or false otherwise
+ */
+bool PipelineHandler::hasPendingRequests(const Camera *camera) const
+{
+ const CameraData *data = cameraData(camera);
+ return !data->queuedRequests_.empty();
+}
+
+/**
* \fn PipelineHandler::queueRequest()
- * \brief Queue a request to the camera
- * \param[in] camera The camera to queue the request to
+ * \brief Queue a request
* \param[in] request The request to queue
*
* This method queues a capture request to the pipeline handler for processing.
* The request is first added to the internal list of queued requests, and
* then passed to the pipeline handler with a call to queueRequestDevice().
+ * If the pipeline handler fails in queuing the request to the hardware the
+ * request is cancelled.
*
* Keeping track of queued requests ensures automatic completion of all requests
* when the pipeline handler is stopped with stop(). Request completion shall be
* signalled by the pipeline handler using the completeRequest() method.
*
* \context This function is called from the CameraManager thread.
- *
- * \return 0 on success or a negative error code otherwise
*/
-int PipelineHandler::queueRequest(Camera *camera, Request *request)
+void PipelineHandler::queueRequest(Request *request)
{
+ LIBCAMERA_TRACEPOINT(request_queue, request);
+
+ Camera *camera = request->camera_;
CameraData *data = cameraData(camera);
data->queuedRequests_.push_back(request);
- int ret = queueRequestDevice(camera, request);
- if (ret)
- data->queuedRequests_.remove(request);
+ request->sequence_ = data->requestSequence_++;
- return ret;
+ int ret = queueRequestDevice(camera, request);
+ if (ret) {
+ request->cancel();
+ completeRequest(request);
+ }
}
/**
@@ -422,7 +437,6 @@ int PipelineHandler::queueRequest(Camera *camera, Request *request)
/**
* \brief Complete a buffer for a request
- * \param[in] camera The camera the request belongs to
* \param[in] request The request the buffer belongs to
* \param[in] buffer The buffer that has completed
*
@@ -438,21 +452,20 @@ int PipelineHandler::queueRequest(Camera *camera, Request *request)
* \return True if all buffers contained in the request have completed, false
* otherwise
*/
-bool PipelineHandler::completeBuffer(Camera *camera, Request *request,
- FrameBuffer *buffer)
+bool PipelineHandler::completeBuffer(Request *request, FrameBuffer *buffer)
{
+ Camera *camera = request->camera_;
camera->bufferCompleted.emit(request, buffer);
return request->completeBuffer(buffer);
}
/**
* \brief Signal request completion
- * \param[in] camera The camera that the request belongs to
* \param[in] request The request that has completed
*
* The pipeline handler shall call this method to notify the \a camera that the
- * request has completed. The request is deleted and shall not be accessed once
- * this method returns.
+ * request has completed. The request is no longer managed by the pipeline
+ * handler and shall not be accessed once this method returns.
*
* This method ensures that requests will be returned to the application in
* submission order, the pipeline handler may call it on any complete request
@@ -460,8 +473,10 @@ bool PipelineHandler::completeBuffer(Camera *camera, Request *request,
*
* \context This function shall be called from the CameraManager thread.
*/
-void PipelineHandler::completeRequest(Camera *camera, Request *request)
+void PipelineHandler::completeRequest(Request *request)
{
+ Camera *camera = request->camera_;
+
request->complete();
CameraData *data = cameraData(camera);
@@ -492,7 +507,6 @@ void PipelineHandler::completeRequest(Camera *camera, Request *request)
void PipelineHandler::registerCamera(std::shared_ptr<Camera> camera,
std::unique_ptr<CameraData> data)
{
- data->camera_ = camera.get();
cameraData_[camera.get()] = std::move(data);
cameras_.push_back(camera);
@@ -689,21 +703,19 @@ void PipelineHandlerFactory::registerType(PipelineHandlerFactory *factory)
std::vector<PipelineHandlerFactory *> &factories = PipelineHandlerFactory::factories();
factories.push_back(factory);
-
- LOG(Pipeline, Debug)
- << "Registered pipeline handler \"" << factory->name() << "\"";
}
/**
* \brief Retrieve the list of all pipeline handler factories
- *
- * The static factories map is defined inside the function to ensures it gets
- * initialized on first use, without any dependency on link order.
- *
* \return the list of pipeline handler factories
*/
std::vector<PipelineHandlerFactory *> &PipelineHandlerFactory::factories()
{
+ /*
+ * The static factories map is defined inside the function to ensure
+ * it gets initialized on first use, without any dependency on
+ * link order.
+ */
static std::vector<PipelineHandlerFactory *> factories;
return factories;
}
diff --git a/src/libcamera/process.cpp b/src/libcamera/process.cpp
index 994190dc..4fe4ad57 100644
--- a/src/libcamera/process.cpp
+++ b/src/libcamera/process.cpp
@@ -20,10 +20,9 @@
#include <unistd.h>
#include <vector>
-#include <libcamera/event_notifier.h>
-
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/utils.h"
+#include <libcamera/base/event_notifier.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
/**
* \file process.h
@@ -41,28 +40,6 @@ LOG_DEFINE_CATEGORY(Process)
* The ProcessManager singleton keeps track of all created Process instances,
* and manages the signal handling involved in terminating processes.
*/
-class ProcessManager
-{
-public:
- void registerProcess(Process *proc);
-
- static ProcessManager *instance();
-
- int writePipe() const;
-
- const struct sigaction &oldsa() const;
-
-private:
- void sighandler(EventNotifier *notifier);
- ProcessManager();
- ~ProcessManager();
-
- std::list<Process *> processes_;
-
- struct sigaction oldsa_;
- EventNotifier *sigEvent_;
- int pipe_[2];
-};
namespace {
@@ -127,8 +104,20 @@ void ProcessManager::registerProcess(Process *proc)
processes_.push_back(proc);
}
+ProcessManager *ProcessManager::self_ = nullptr;
+
+/**
+ * \brief Construct a ProcessManager instance
+ *
+ * The ProcessManager class is meant to only be instantiated once, by the
+ * CameraManager.
+ */
ProcessManager::ProcessManager()
{
+ if (self_)
+ LOG(Process, Fatal)
+ << "Multiple ProcessManager objects are not allowed";
+
sigaction(SIGCHLD, NULL, &oldsa_);
struct sigaction sa;
@@ -145,6 +134,8 @@ ProcessManager::ProcessManager()
<< "Failed to initialize pipe for signal handling";
sigEvent_ = new EventNotifier(pipe_[0], EventNotifier::Read);
sigEvent_->activated.connect(this, &ProcessManager::sighandler);
+
+ self_ = this;
}
ProcessManager::~ProcessManager()
@@ -153,21 +144,21 @@ ProcessManager::~ProcessManager()
delete sigEvent_;
close(pipe_[0]);
close(pipe_[1]);
+
+ self_ = nullptr;
}
/**
* \brief Retrieve the Process manager instance
*
- * The ProcessManager is a singleton and can't be constructed manually. This
- * method shall instead be used to retrieve the single global instance of the
- * manager.
+ * The ProcessManager is constructed by the CameraManager. This function shall
+ * be used to retrieve the single instance of the manager.
*
* \return The Process manager instance
*/
ProcessManager *ProcessManager::instance()
{
- static ProcessManager processManager;
- return &processManager;
+ return self_;
}
/**
diff --git a/src/libcamera/property_ids.cpp.in b/src/libcamera/property_ids.cpp.in
index bfdd823f..f917e334 100644
--- a/src/libcamera/property_ids.cpp.in
+++ b/src/libcamera/property_ids.cpp.in
@@ -23,12 +23,27 @@ namespace properties {
${controls_doc}
+/**
+ * \brief Namespace for libcamera draft properties
+ */
+namespace draft {
+
+${draft_controls_doc}
+
+} /* namespace draft */
+
#ifndef __DOXYGEN__
/*
* Keep the properties definitions hidden from doxygen as it incorrectly parses
* them as functions.
*/
${controls_def}
+
+namespace draft {
+
+${draft_controls_def}
+
+} /* namespace draft */
#endif
/**
diff --git a/src/libcamera/property_ids.yaml b/src/libcamera/property_ids.yaml
index 74ad0195..12ecbce5 100644
--- a/src/libcamera/property_ids.yaml
+++ b/src/libcamera/property_ids.yaml
@@ -387,6 +387,29 @@ controls:
| |
+--------------------+
+ - Model:
+ type: string
+ description: |
+ The model name shall to the extent possible describe the sensor. For
+ most devices this is the model name of the sensor. While for some
+ devices the sensor model is unavailable as the sensor or the entire
+ camera is part of a larger unit and exposed as a black-box to the
+ system. In such cases the model name of the smallest device that
+ contains the camera sensor shall be used.
+
+ The model name is not meant to be a camera name displayed to the
+ end-user, but may be combined with other camera information to create a
+ camera name.
+
+ The model name is not guaranteed to be unique in the system nor is
+ it guaranteed to be stable or have any other properties required to make
+ it a good candidate to be used as a permanent identifier of a camera.
+
+ The model name shall describe the camera in a human readable format and
+ shall be encoded in ASCII.
+
+ Example model names are 'ov5670', 'imx219' or 'Logitech Webcam C930e'.
+
- UnitCellSize:
type: Size
description: |
@@ -640,4 +663,52 @@ controls:
\todo Rename this property to ActiveAreas once we will have property
categories (i.e. Properties::PixelArray::ActiveAreas)
+ - ScalerCropMaximum:
+ type: Rectangle
+ description: |
+ The maximum valid rectangle for the controls::ScalerCrop control. This
+ reflects the minimum mandatory cropping applied in the camera sensor and
+ the rest of the pipeline. Just as the ScalerCrop control, it defines a
+ rectangle taken from the sensor's active pixel array.
+
+ This property is valid only after the camera has been successfully
+ configured and its value may change whenever a new configuration is
+ applied.
+
+ \todo Turn this property into a "maximum control value" for the
+ ScalerCrop control once "dynamic" controls have been implemented.
+
+ # ----------------------------------------------------------------------------
+ # Draft properties section
+
+ - ColorFilterArrangement:
+ type: int32_t
+ draft: true
+ description: |
+ The arrangement of color filters on sensor; represents the colors in the
+ top-left 2x2 section of the sensor, in reading order. Currently
+ identical to ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT.
+ enum:
+ - name: RGGB
+ value: 0
+ description: RGGB Bayer pattern
+ - name: GRBG
+ value: 1
+ description: GRBG Bayer pattern
+ - name: GBRG
+ value: 2
+ description: GBRG Bayer pattern
+ - name: BGGR
+ value: 3
+ description: BGGR Bayer pattern
+ - name: RGB
+ value: 4
+ description: |
+ Sensor is not Bayer; output has 3 16-bit values for each pixel,
+ instead of just 1 16-bit value per pixel.
+ - name: MONO
+ value: 5
+ description: |
+ Sensor is not Bayer; output consists of a single colour channel.
+
...
diff --git a/src/libcamera/proxy/ipa_proxy_linux.cpp b/src/libcamera/proxy/ipa_proxy_linux.cpp
deleted file mode 100644
index b78a0e45..00000000
--- a/src/libcamera/proxy/ipa_proxy_linux.cpp
+++ /dev/null
@@ -1,103 +0,0 @@
-/* SPDX-License-Identifier: LGPL-2.1-or-later */
-/*
- * Copyright (C) 2019, Google Inc.
- *
- * ipa_proxy_linux.cpp - Default Image Processing Algorithm proxy for Linux
- */
-
-#include <vector>
-
-#include <libcamera/ipa/ipa_interface.h>
-#include <libcamera/ipa/ipa_module_info.h>
-
-#include "libcamera/internal/ipa_module.h"
-#include "libcamera/internal/ipa_proxy.h"
-#include "libcamera/internal/ipc_unixsocket.h"
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/process.h"
-
-namespace libcamera {
-
-LOG_DECLARE_CATEGORY(IPAProxy)
-
-class IPAProxyLinux : public IPAProxy
-{
-public:
- IPAProxyLinux(IPAModule *ipam);
- ~IPAProxyLinux();
-
- int init([[maybe_unused]] const IPASettings &settings) override
- {
- return 0;
- }
- int start() override { return 0; }
- void stop() override {}
- void configure([[maybe_unused]] const CameraSensorInfo &sensorInfo,
- [[maybe_unused]] const std::map<unsigned int, IPAStream> &streamConfig,
- [[maybe_unused]] const std::map<unsigned int, const ControlInfoMap &> &entityControls,
- [[maybe_unused]] const IPAOperationData &ipaConfig,
- [[maybe_unused]] IPAOperationData *result) override {}
- void mapBuffers([[maybe_unused]] const std::vector<IPABuffer> &buffers) override {}
- void unmapBuffers([[maybe_unused]] const std::vector<unsigned int> &ids) override {}
- void processEvent([[maybe_unused]] const IPAOperationData &event) override {}
-
-private:
- void readyRead(IPCUnixSocket *ipc);
-
- Process *proc_;
-
- IPCUnixSocket *socket_;
-};
-
-IPAProxyLinux::IPAProxyLinux(IPAModule *ipam)
- : IPAProxy(ipam), proc_(nullptr), socket_(nullptr)
-{
- LOG(IPAProxy, Debug)
- << "initializing dummy proxy: loading IPA from "
- << ipam->path();
-
- std::vector<int> fds;
- std::vector<std::string> args;
- args.push_back(ipam->path());
- const std::string path = resolvePath("ipa_proxy_linux");
- if (path.empty()) {
- LOG(IPAProxy, Error)
- << "Failed to get proxy worker path";
- return;
- }
-
- socket_ = new IPCUnixSocket();
- int fd = socket_->create();
- if (fd < 0) {
- LOG(IPAProxy, Error)
- << "Failed to create socket";
- return;
- }
- socket_->readyRead.connect(this, &IPAProxyLinux::readyRead);
- args.push_back(std::to_string(fd));
- fds.push_back(fd);
-
- proc_ = new Process();
- int ret = proc_->start(path, args, fds);
- if (ret) {
- LOG(IPAProxy, Error)
- << "Failed to start proxy worker process";
- return;
- }
-
- valid_ = true;
-}
-
-IPAProxyLinux::~IPAProxyLinux()
-{
- delete proc_;
- delete socket_;
-}
-
-void IPAProxyLinux::readyRead([[maybe_unused]] IPCUnixSocket *ipc)
-{
-}
-
-REGISTER_IPA_PROXY(IPAProxyLinux)
-
-} /* namespace libcamera */
diff --git a/src/libcamera/proxy/ipa_proxy_thread.cpp b/src/libcamera/proxy/ipa_proxy_thread.cpp
deleted file mode 100644
index eead2883..00000000
--- a/src/libcamera/proxy/ipa_proxy_thread.cpp
+++ /dev/null
@@ -1,172 +0,0 @@
-/* SPDX-License-Identifier: LGPL-2.1-or-later */
-/*
- * Copyright (C) 2020, Google Inc.
- *
- * ipa_proxy_thread.cpp - Proxy running an Image Processing Algorithm in a thread
- */
-
-#include <memory>
-
-#include <libcamera/ipa/ipa_interface.h>
-#include <libcamera/ipa/ipa_module_info.h>
-
-#include "libcamera/internal/ipa_context_wrapper.h"
-#include "libcamera/internal/ipa_module.h"
-#include "libcamera/internal/ipa_proxy.h"
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/thread.h"
-
-namespace libcamera {
-
-LOG_DECLARE_CATEGORY(IPAProxy)
-
-class IPAProxyThread : public IPAProxy, public Object
-{
-public:
- IPAProxyThread(IPAModule *ipam);
-
- int init(const IPASettings &settings) override;
- int start() override;
- void stop() override;
-
- void configure(const CameraSensorInfo &sensorInfo,
- const std::map<unsigned int, IPAStream> &streamConfig,
- const std::map<unsigned int, const ControlInfoMap &> &entityControls,
- const IPAOperationData &ipaConfig,
- IPAOperationData *result) override;
- void mapBuffers(const std::vector<IPABuffer> &buffers) override;
- void unmapBuffers(const std::vector<unsigned int> &ids) override;
- void processEvent(const IPAOperationData &event) override;
-
-private:
- void queueFrameAction(unsigned int frame, const IPAOperationData &data);
-
- /* Helper class to invoke processEvent() in another thread. */
- class ThreadProxy : public Object
- {
- public:
- void setIPA(IPAInterface *ipa)
- {
- ipa_ = ipa;
- }
-
- int start()
- {
- return ipa_->start();
- }
-
- void stop()
- {
- ipa_->stop();
- }
-
- void processEvent(const IPAOperationData &event)
- {
- ipa_->processEvent(event);
- }
-
- private:
- IPAInterface *ipa_;
- };
-
- bool running_;
- Thread thread_;
- ThreadProxy proxy_;
- std::unique_ptr<IPAInterface> ipa_;
-};
-
-IPAProxyThread::IPAProxyThread(IPAModule *ipam)
- : IPAProxy(ipam), running_(false)
-{
- if (!ipam->load())
- return;
-
- struct ipa_context *ctx = ipam->createContext();
- if (!ctx) {
- LOG(IPAProxy, Error)
- << "Failed to create IPA context for " << ipam->path();
- return;
- }
-
- ipa_ = std::make_unique<IPAContextWrapper>(ctx);
- proxy_.setIPA(ipa_.get());
-
- /*
- * Proxy the queueFrameAction signal to dispatch it in the caller's
- * thread.
- */
- ipa_->queueFrameAction.connect(this, &IPAProxyThread::queueFrameAction);
-
- valid_ = true;
-}
-
-int IPAProxyThread::init(const IPASettings &settings)
-{
- int ret = ipa_->init(settings);
- if (ret)
- return ret;
-
- proxy_.moveToThread(&thread_);
-
- return 0;
-}
-
-int IPAProxyThread::start()
-{
- running_ = true;
- thread_.start();
-
- return proxy_.invokeMethod(&ThreadProxy::start, ConnectionTypeBlocking);
-}
-
-void IPAProxyThread::stop()
-{
- if (!running_)
- return;
-
- running_ = false;
-
- proxy_.invokeMethod(&ThreadProxy::stop, ConnectionTypeBlocking);
-
- thread_.exit();
- thread_.wait();
-}
-
-void IPAProxyThread::configure(const CameraSensorInfo &sensorInfo,
- const std::map<unsigned int, IPAStream> &streamConfig,
- const std::map<unsigned int, const ControlInfoMap &> &entityControls,
- const IPAOperationData &ipaConfig,
- IPAOperationData *result)
-{
- ipa_->configure(sensorInfo, streamConfig, entityControls, ipaConfig,
- result);
-}
-
-void IPAProxyThread::mapBuffers(const std::vector<IPABuffer> &buffers)
-{
- ipa_->mapBuffers(buffers);
-}
-
-void IPAProxyThread::unmapBuffers(const std::vector<unsigned int> &ids)
-{
- ipa_->unmapBuffers(ids);
-}
-
-void IPAProxyThread::processEvent(const IPAOperationData &event)
-{
- if (!running_)
- return;
-
- /* Dispatch the processEvent() call to the thread. */
- proxy_.invokeMethod(&ThreadProxy::processEvent, ConnectionTypeQueued,
- event);
-}
-
-void IPAProxyThread::queueFrameAction(unsigned int frame, const IPAOperationData &data)
-{
- IPAInterface::queueFrameAction.emit(frame, data);
-}
-
-REGISTER_IPA_PROXY(IPAProxyThread)
-
-} /* namespace libcamera */
diff --git a/src/libcamera/proxy/meson.build b/src/libcamera/proxy/meson.build
index bd804750..00ae5a8f 100644
--- a/src/libcamera/proxy/meson.build
+++ b/src/libcamera/proxy/meson.build
@@ -1,6 +1,19 @@
# SPDX-License-Identifier: CC0-1.0
-libcamera_sources += files([
- 'ipa_proxy_linux.cpp',
- 'ipa_proxy_thread.cpp',
-])
+# generate {pipeline}_ipa_proxy.cpp
+foreach mojom : ipa_mojoms
+ proxy = custom_target(mojom['name'] + '_proxy_cpp',
+ input : mojom['mojom'],
+ output : mojom['name'] + '_ipa_proxy.cpp',
+ depends : mojom_templates,
+ command : [
+ mojom_generator, 'generate',
+ '-g', 'libcamera',
+ '--bytecode_path', mojom_templates_dir,
+ '--libcamera_generate_proxy_cpp',
+ '--libcamera_output_path=@OUTPUT@',
+ './' + '@INPUT@'
+ ])
+
+ libcamera_sources += proxy
+endforeach
diff --git a/src/libcamera/proxy/worker/ipa_proxy_linux_worker.cpp b/src/libcamera/proxy/worker/ipa_proxy_linux_worker.cpp
deleted file mode 100644
index 0c4687f7..00000000
--- a/src/libcamera/proxy/worker/ipa_proxy_linux_worker.cpp
+++ /dev/null
@@ -1,90 +0,0 @@
-/* SPDX-License-Identifier: LGPL-2.1-or-later */
-/*
- * Copyright (C) 2019, Google Inc.
- *
- * ipa_proxy_linux_worker.cpp - Default Image Processing Algorithm proxy worker for Linux
- */
-
-#include <iostream>
-#include <sys/types.h>
-#include <unistd.h>
-
-#include <libcamera/event_dispatcher.h>
-#include <libcamera/ipa/ipa_interface.h>
-#include <libcamera/logging.h>
-
-#include "libcamera/internal/ipa_module.h"
-#include "libcamera/internal/ipc_unixsocket.h"
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/thread.h"
-
-using namespace libcamera;
-
-LOG_DEFINE_CATEGORY(IPAProxyLinuxWorker)
-
-void readyRead(IPCUnixSocket *ipc)
-{
- IPCUnixSocket::Payload message;
- int ret;
-
- ret = ipc->receive(&message);
- if (ret) {
- LOG(IPAProxyLinuxWorker, Error)
- << "Receive message failed: " << ret;
- return;
- }
-
- LOG(IPAProxyLinuxWorker, Debug) << "Received a message!";
-}
-
-int main(int argc, char **argv)
-{
- /* Uncomment this for debugging. */
-#if 0
- std::string logPath = "/tmp/libcamera.worker." +
- std::to_string(getpid()) + ".log";
- logSetFile(logPath.c_str());
-#endif
-
- if (argc < 3) {
- LOG(IPAProxyLinuxWorker, Debug)
- << "Tried to start worker with no args";
- return EXIT_FAILURE;
- }
-
- int fd = std::stoi(argv[2]);
- LOG(IPAProxyLinuxWorker, Debug)
- << "Starting worker for IPA module " << argv[1]
- << " with IPC fd = " << fd;
-
- std::unique_ptr<IPAModule> ipam = std::make_unique<IPAModule>(argv[1]);
- if (!ipam->isValid() || !ipam->load()) {
- LOG(IPAProxyLinuxWorker, Error)
- << "IPAModule " << argv[1] << " should be valid but isn't";
- return EXIT_FAILURE;
- }
-
- IPCUnixSocket socket;
- if (socket.bind(fd) < 0) {
- LOG(IPAProxyLinuxWorker, Error) << "IPC socket binding failed";
- return EXIT_FAILURE;
- }
- socket.readyRead.connect(&readyRead);
-
- struct ipa_context *ipac = ipam->createContext();
- if (!ipac) {
- LOG(IPAProxyLinuxWorker, Error) << "Failed to create IPA context";
- return EXIT_FAILURE;
- }
-
- LOG(IPAProxyLinuxWorker, Debug) << "Proxy worker successfully started";
-
- /* \todo upgrade listening loop */
- EventDispatcher *dispatcher = Thread::current()->eventDispatcher();
- while (1)
- dispatcher->processEvents();
-
- ipac->ops->destroy(ipac);
-
- return 0;
-}
diff --git a/src/libcamera/proxy/worker/meson.build b/src/libcamera/proxy/worker/meson.build
index ac0310a7..70c8760a 100644
--- a/src/libcamera/proxy/worker/meson.build
+++ b/src/libcamera/proxy/worker/meson.build
@@ -1,17 +1,32 @@
# SPDX-License-Identifier: CC0-1.0
-ipa_proxy_sources = [
- ['ipa_proxy_linux', 'ipa_proxy_linux_worker.cpp']
-]
+proxy_install_dir = get_option('libexecdir') / 'libcamera'
-proxy_install_dir = join_paths(get_option('libexecdir'), 'libcamera')
+# generate {pipeline}_ipa_proxy_worker.cpp
+foreach mojom : ipa_mojoms
+ worker = custom_target(mojom['name'] + '_proxy_worker',
+ input : mojom['mojom'],
+ output : mojom['name'] + '_ipa_proxy_worker.cpp',
+ depends : mojom_templates,
+ command : [
+ mojom_generator, 'generate',
+ '-g', 'libcamera',
+ '--bytecode_path', mojom_templates_dir,
+ '--libcamera_generate_proxy_worker',
+ '--libcamera_output_path=@OUTPUT@',
+ './' + '@INPUT@'
+ ])
-foreach t : ipa_proxy_sources
- proxy = executable(t[0], t[1],
+ proxy = executable(mojom['name'] + '_ipa_proxy',
+ [worker, libcamera_generated_ipa_headers],
install : true,
install_dir : proxy_install_dir,
- dependencies : libcamera_dep)
+ dependencies : libcamera_private)
endforeach
config_h.set('IPA_PROXY_DIR',
- '"' + join_paths(get_option('prefix'), proxy_install_dir) + '"')
+ '"' + get_option('prefix') / proxy_install_dir + '"')
+
+summary({
+ 'IPA_PROXY_DIR' : config_h.get('IPA_PROXY_DIR'),
+ }, section : 'Paths')
diff --git a/src/libcamera/request.cpp b/src/libcamera/request.cpp
index 60b30692..c095c9f4 100644
--- a/src/libcamera/request.cpp
+++ b/src/libcamera/request.cpp
@@ -8,14 +8,18 @@
#include <libcamera/request.h>
#include <map>
+#include <sstream>
+
+#include <libcamera/base/log.h>
-#include <libcamera/buffer.h>
#include <libcamera/camera.h>
#include <libcamera/control_ids.h>
+#include <libcamera/framebuffer.h>
#include <libcamera/stream.h>
#include "libcamera/internal/camera_controls.h"
-#include "libcamera/internal/log.h"
+#include "libcamera/internal/framebuffer.h"
+#include "libcamera/internal/tracepoints.h"
/**
* \file request.h
@@ -38,6 +42,15 @@ LOG_DEFINE_CATEGORY(Request)
*/
/**
+ * \enum Request::ReuseFlag
+ * Flags to control the behavior of Request::reuse()
+ * \var Request::Default
+ * Don't reuse buffers
+ * \var Request::ReuseBuffers
+ * Reuse the buffers that were previously added by addBuffer()
+ */
+
+/**
* \typedef Request::BufferMap
* \brief A map of Stream to FrameBuffer pointers
*/
@@ -62,8 +75,8 @@ LOG_DEFINE_CATEGORY(Request)
*
*/
Request::Request(Camera *camera, uint64_t cookie)
- : camera_(camera), cookie_(cookie), status_(RequestPending),
- cancelled_(false)
+ : camera_(camera), sequence_(0), cookie_(cookie),
+ status_(RequestPending), cancelled_(false)
{
/**
* \todo Should the Camera expose a validator instance, to avoid
@@ -76,23 +89,62 @@ Request::Request(Camera *camera, uint64_t cookie)
* \todo: Add a validator for metadata controls.
*/
metadata_ = new ControlList(controls::controls);
+
+ LIBCAMERA_TRACEPOINT(request_construct, this);
+
+ LOG(Request, Debug) << "Created request - cookie: " << cookie_;
}
Request::~Request()
{
+ LIBCAMERA_TRACEPOINT(request_destroy, this);
+
delete metadata_;
delete controls_;
delete validator_;
}
/**
+ * \brief Reset the request for reuse
+ * \param[in] flags Indicate whether or not to reuse the buffers
+ *
+ * Reset the status and controls associated with the request, to allow it to
+ * be reused and requeued without destruction. This function shall be called
+ * prior to queueing the request to the camera, in lieu of constructing a new
+ * request. The application can reuse the buffers that were previously added
+ * to the request via addBuffer() by setting \a flags to ReuseBuffers.
+ */
+void Request::reuse(ReuseFlag flags)
+{
+ LIBCAMERA_TRACEPOINT(request_reuse, this);
+
+ pending_.clear();
+ if (flags & ReuseBuffers) {
+ for (auto pair : bufferMap_) {
+ FrameBuffer *buffer = pair.second;
+ buffer->_d()->setRequest(this);
+ pending_.insert(buffer);
+ }
+ } else {
+ bufferMap_.clear();
+ }
+
+ sequence_ = 0;
+ status_ = RequestPending;
+ cancelled_ = false;
+
+ controls_->clear();
+ metadata_->clear();
+}
+
+/**
* \fn Request::controls()
* \brief Retrieve the request's ControlList
*
* Requests store a list of controls to be applied to all frames captured for
* the request. They are created with an empty list of controls that can be
- * accessed through this method and updated with ControlList::operator[]() or
- * ControlList::update().
+ * accessed through this method. Control values can be retrieved using
+ * ControlList::get() and updated using ControlList::set().
*
* Only controls supported by the camera to which this request will be
* submitted shall be included in the controls list. Attempting to add an
@@ -140,7 +192,7 @@ int Request::addBuffer(const Stream *stream, FrameBuffer *buffer)
return -EEXIST;
}
- buffer->request_ = this;
+ buffer->_d()->setRequest(this);
pending_.insert(buffer);
bufferMap_[stream] = buffer;
@@ -180,6 +232,23 @@ FrameBuffer *Request::findBuffer(const Stream *stream) const
*/
/**
+ * \fn Request::sequence()
+ * \brief Retrieve the sequence number for the request
+ *
+ * When requests are queued, they are given a sequential number to track the
+ * order in which requests are queued to a camera. This number counts all
+ * requests given to a camera through its lifetime, and is not reset to zero
+ * between camera stop/start sequences.
+ *
+ * It can be used to support debugging and identifying the flow of requests
+ * through a pipeline, but does not guarantee to represent the sequence number
+ * of any images in the stream. The sequence number is stored as an unsigned
+ * integer and will wrap when overflowed.
+ *
+ * \return The request sequence number
+ */
+
+/**
* \fn Request::cookie()
* \brief Retrieve the cookie set when the request was created
* \return The request cookie
@@ -215,12 +284,37 @@ FrameBuffer *Request::findBuffer(const Stream *stream) const
*/
void Request::complete()
{
+ ASSERT(status_ == RequestPending);
ASSERT(!hasPendingBuffers());
+
status_ = cancelled_ ? RequestCancelled : RequestComplete;
- LOG(Request, Debug)
- << "Request has completed - cookie: " << cookie_
- << (cancelled_ ? " [Cancelled]" : "");
+ LOG(Request, Debug) << toString();
+
+ LIBCAMERA_TRACEPOINT(request_complete, this);
+}
+
+/**
+ * \brief Cancel a queued request
+ *
+ * Mark the request and its associated buffers as cancelled and complete it.
+ *
+ * Set each pending buffer in error state and emit the buffer completion signal
+ * before completing the Request.
+ */
+void Request::cancel()
+{
+ LIBCAMERA_TRACEPOINT(request_cancel, this);
+
+ ASSERT(status_ == RequestPending);
+
+ for (FrameBuffer *buffer : pending_) {
+ buffer->cancel();
+ camera_->bufferCompleted.emit(this, buffer);
+ }
+
+ pending_.clear();
+ cancelled_ = true;
}
/**
@@ -238,10 +332,12 @@ void Request::complete()
*/
bool Request::completeBuffer(FrameBuffer *buffer)
{
+ LIBCAMERA_TRACEPOINT(request_complete_buffer, this, buffer);
+
int ret = pending_.erase(buffer);
ASSERT(ret == 1);
- buffer->request_ = nullptr;
+ buffer->_d()->setRequest(nullptr);
if (buffer->metadata().status == FrameMetadata::FrameCancelled)
cancelled_ = true;
@@ -249,4 +345,27 @@ bool Request::completeBuffer(FrameBuffer *buffer)
return !hasPendingBuffers();
}
+/**
+ * \brief Generate a string representation of the Request internals
+ *
+ * This function facilitates debugging of Request state while it is used
+ * internally within libcamera.
+ *
+ * \return A string representing the current state of the request
+ */
+std::string Request::toString() const
+{
+ std::stringstream ss;
+
+ /* Pending, Completed, Cancelled(X). */
+ static const char *statuses = "PCX";
+
+ /* Example Output: Request(55:P:1/2:6523524) */
+ ss << "Request(" << sequence_ << ":" << statuses[status_] << ":"
+ << pending_.size() << "/" << bufferMap_.size() << ":"
+ << cookie_ << ")";
+
+ return ss.str();
+}
+
} /* namespace libcamera */
diff --git a/src/libcamera/source_paths.cpp b/src/libcamera/source_paths.cpp
new file mode 100644
index 00000000..19689585
--- /dev/null
+++ b/src/libcamera/source_paths.cpp
@@ -0,0 +1,139 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * source_paths.cpp - Identify libcamera source and build paths
+ */
+
+#include "libcamera/internal/source_paths.h"
+
+#include <dlfcn.h>
+#include <elf.h>
+#include <link.h>
+#include <stdlib.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+
+#include <libcamera/base/utils.h>
+
+/**
+ * \file source_paths.h
+ * \brief Identify the build and source path of a not-yet-installed library
+ */
+
+/* musl doesn't declare _DYNAMIC in link.h, declare it manually. */
+extern ElfW(Dyn) _DYNAMIC[];
+
+namespace libcamera {
+
+namespace {
+
+/**
+ * \brief Check if libcamera is installed or not
+ *
+ * Utilise the build_rpath dynamic tag which is stripped out by meson at
+ * install time to determine at runtime if the library currently executing
+ * has been installed or not.
+ *
+ * \return True if libcamera is installed, false otherwise
+ */
+bool isLibcameraInstalled()
+{
+ /*
+ * DT_RUNPATH (DT_RPATH when the linker uses old dtags) is removed on
+ * install.
+ */
+ for (const ElfW(Dyn) *dyn = _DYNAMIC; dyn->d_tag != DT_NULL; ++dyn) {
+ if (dyn->d_tag == DT_RUNPATH || dyn->d_tag == DT_RPATH)
+ return false;
+ }
+
+ return true;
+}
+
+} /* namespace */
+
+namespace utils {
+
+/**
+ * \brief Retrieve the path to the build directory
+ *
+ * During development, it is useful to run libcamera binaries directly from the
+ * build directory without installing them. This function helps components that
+ * need to locate resources in the build tree, such as IPA modules or IPA proxy
+ * workers, by providing them with the path to the root of the build directory.
+ * Callers can then use it to complement or override searches in system-wide
+ * directories.
+ *
+ * If libcamera has been installed, the build directory path is not available
+ * and this function returns an empty string.
+ *
+ * \return The path to the build directory if running from a build, or an empty
+ * string otherwise
+ */
+std::string libcameraBuildPath()
+{
+ if (isLibcameraInstalled())
+ return std::string();
+
+ Dl_info info;
+
+ /* Look up our own symbol. */
+ int ret = dladdr(reinterpret_cast<void *>(libcameraBuildPath), &info);
+ if (ret == 0)
+ return std::string();
+
+ std::string path = dirname(info.dli_fname) + "/../../";
+
+ char *real = realpath(path.c_str(), nullptr);
+ if (!real)
+ return std::string();
+
+ path = real;
+ free(real);
+
+ return path + "/";
+}
+
+/**
+ * \brief Retrieve the path to the source directory
+ *
+ * During development, it is useful to run libcamera binaries directly from the
+ * build directory without installing them. This function helps components that
+ * need to locate resources in the source tree, such as IPA configuration
+ * files, by providing them with the path to the root of the source directory.
+ * Callers can then use it to complement or override searches in system-wide
+ * directories.
+ *
+ * If libcamera has been installed, the source directory path is not available
+ * and this function returns an empty string.
+ *
+ * \return The path to the source directory if running from a build directory,
+ * or an empty string otherwise
+ */
+std::string libcameraSourcePath()
+{
+ std::string path = libcameraBuildPath();
+ if (path.empty())
+ return std::string();
+
+ path += "source";
+
+ char *real = realpath(path.c_str(), nullptr);
+ if (!real)
+ return std::string();
+
+ path = real;
+ free(real);
+
+ struct stat statbuf;
+ int ret = stat(path.c_str(), &statbuf);
+ if (ret < 0 || (statbuf.st_mode & S_IFMT) != S_IFDIR)
+ return std::string();
+
+ return path + "/";
+}
+
+} /* namespace utils */
+
+} /* namespace libcamera */
diff --git a/src/libcamera/stream.cpp b/src/libcamera/stream.cpp
index f7bafcf8..b8626775 100644
--- a/src/libcamera/stream.cpp
+++ b/src/libcamera/stream.cpp
@@ -15,8 +15,9 @@
#include <libcamera/request.h>
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/utils.h"
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
/**
* \file stream.h
diff --git a/src/libcamera/sysfs.cpp b/src/libcamera/sysfs.cpp
index 6c8e9554..44c3331b 100644
--- a/src/libcamera/sysfs.cpp
+++ b/src/libcamera/sysfs.cpp
@@ -12,8 +12,8 @@
#include <sys/stat.h>
#include <sys/sysmacros.h>
-#include "libcamera/internal/file.h"
-#include "libcamera/internal/log.h"
+#include <libcamera/base/file.h>
+#include <libcamera/base/log.h>
/**
* \file sysfs.h
@@ -22,7 +22,7 @@
namespace libcamera {
-LOG_DEFINE_CATEGORY(SysFs);
+LOG_DEFINE_CATEGORY(SysFs)
namespace sysfs {
@@ -70,10 +70,11 @@ std::string charDevPath(const std::string &deviceNode)
std::string firmwareNodePath(const std::string &device)
{
std::string fwPath, node;
+ struct stat st;
/* Lookup for DT-based systems */
node = device + "/of_node";
- if (File::exists(node)) {
+ if (!stat(node.c_str(), &st)) {
char *ofPath = realpath(node.c_str(), nullptr);
if (!ofPath)
return {};
diff --git a/src/libcamera/tracepoints.cpp b/src/libcamera/tracepoints.cpp
new file mode 100644
index 00000000..0173b75a
--- /dev/null
+++ b/src/libcamera/tracepoints.cpp
@@ -0,0 +1,10 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * tracepoints.cpp - Tracepoints with lttng
+ */
+#define TRACEPOINT_CREATE_PROBES
+#define TRACEPOINT_DEFINE
+
+#include "libcamera/internal/tracepoints.h"
diff --git a/src/libcamera/transform.cpp b/src/libcamera/transform.cpp
index f3e37f31..99a043ba 100644
--- a/src/libcamera/transform.cpp
+++ b/src/libcamera/transform.cpp
@@ -43,7 +43,15 @@ namespace libcamera {
* couple of them have additional synonyms for convenience). We illustrate each
* with its nominal effect on a rectangle with vertices labelled A, B, C and D.
*
- * **Identity**
+ * \sa https://en.wikipedia.org/wiki/Examples_of_groups#dihedral_group_of_order_8
+ *
+ * The set of 2D plane transforms is also known as the symmetry group of a
+ * square, described in the link. Note that the group can be generated by
+ * only 2 elements (the horizontal flip and a 90 degree rotation, for
+ * example), however, the encoding used here makes the presence of the vertical
+ * flip explicit.
+ *
+ * \var Transform::Identity
*
* Identity transform.
~~~
@@ -53,11 +61,11 @@ Input image | | goes to output image | |
~~~
* Numeric value: 0 (no bits set).
*
- * **Rot0**
+ * \var Transform::Rot0
*
- * Synonym for `Identity` (zero degree rotation).
+ * Synonym for Transform::Identity (zero degree rotation).
*
- * **HFlip**
+ * \var Transform::HFlip
*
* Horizontal flip.
~~~
@@ -67,7 +75,7 @@ Input image | | goes to output image | |
~~~
* Numeric value: 1 (horizontal flip bit set only).
*
- * **VFlip**
+ * \var Transform::VFlip
*
* Vertical flip.
~~~
@@ -77,7 +85,7 @@ Input image | | goes to output image | |
~~~
* Numeric value: 2 (vertical flip bit set only).
*
- * **HVFlip**
+ * \var Transform::HVFlip
*
* Horizontal and vertical flip (identical to a 180 degree rotation).
~~~
@@ -87,11 +95,11 @@ Input image | | goes to output image | |
~~~
* Numeric value: 3 (horizontal and vertical flip bits set).
*
- * **Rot180**
+ * \var Transform::Rot180
*
* Synonym for `HVFlip` (180 degree rotation).
*
- * **Transpose**
+ * \var Transform::Transpose
*
* Transpose (about the main diagonal).
~~~
@@ -101,7 +109,7 @@ Input image | | goes to output image | |
~~~
* Numeric value: 4 (transpose bit set only).
*
- * **Rot270**
+ * \var Transform::Rot270
*
* Rotation by 270 degrees clockwise (90 degrees anticlockwise).
~~~
@@ -111,7 +119,7 @@ Input image | | goes to output image | |
~~~
* Numeric value: 5 (transpose and horizontal flip bits set).
*
- * **Rot90**
+ * \var Transform::Rot90
*
* Rotation by 90 degrees clockwise (270 degrees anticlockwise).
~~~
@@ -121,7 +129,7 @@ Input image | | goes to output image | |
~~~
* Numeric value: 6 (transpose and vertical flip bits set).
*
- * **Rot180Transpose**
+ * \var Transform::Rot180Transpose
*
* Rotation by 180 degrees followed by transpose (alternatively, transposition
* about the "opposite diagonal").
@@ -131,14 +139,6 @@ Input image | | goes to output image | |
C-D C-A
~~~
* Numeric value: 7 (all bits set).
- *
- * \sa https://en.wikipedia.org/wiki/Examples_of_groups#dihedral_group_of_order_8
- *
- * The set of 2D plane transforms is also known as the symmetry group of a
- * square, described in the link. Note that the group can be generated by
- * only 2 elements (the horizontal flip and a 90 degree rotation, for
- * example), however, the encoding used here makes the presence of the vertical
- * flip explicit.
*/
/**
diff --git a/src/libcamera/v4l2_controls.cpp b/src/libcamera/v4l2_controls.cpp
deleted file mode 100644
index 3f8ec6ca..00000000
--- a/src/libcamera/v4l2_controls.cpp
+++ /dev/null
@@ -1,151 +0,0 @@
-/* SPDX-License-Identifier: LGPL-2.1-or-later */
-/*
- * Copyright (C) 2019, Google Inc.
- *
- * v4l2_controls.cpp - V4L2 Controls Support
- */
-
-#include "libcamera/internal/v4l2_controls.h"
-
-#include <string.h>
-
-/**
- * \file v4l2_controls.h
- * \brief Support for V4L2 Controls using the V4L2 Extended Controls APIs
- *
- * The V4L2 Control API allows application to inspect and modify sets of
- * configurable parameters on a video device or subdevice. The nature of the
- * parameters an application can modify using the control framework depends on
- * what the driver implements support for, and on the characteristics of the
- * underlying hardware platform. Generally controls are used to modify user
- * visible settings, such as the image brightness and exposure time, or
- * non-standard parameters which cannot be controlled through the V4L2 format
- * negotiation API.
- *
- * Controls are identified by a numerical ID, defined by the V4L2 kernel headers
- * and have an associated type. Each control has a value, which is the data that
- * can be modified with V4L2Device::setControls() or retrieved with
- * V4L2Device::getControls().
- *
- * The control's type along with the control's flags define the type of the
- * control's value content. Controls can transport a single data value stored in
- * variable inside the control, or they might as well deal with more complex
- * data types, such as arrays of matrices, stored in a contiguous memory
- * locations associated with the control and called 'the payload'. Such controls
- * are called 'compound controls' and are currently not supported by the
- * libcamera V4L2 control framework.
- *
- * libcamera implements support for controls using the V4L2 Extended Control
- * API, which allows future handling of controls with payloads of arbitrary
- * sizes.
- *
- * The libcamera V4L2 Controls framework operates on lists of controls, wrapped
- * by the ControlList class, to match the V4L2 extended controls API. The
- * interface to set and get control is implemented by the V4L2Device class, and
- * this file only provides the data type definitions.
- *
- * \todo Add support for compound controls
- */
-
-namespace libcamera {
-
-namespace {
-
-std::string v4l2_ctrl_name(const struct v4l2_query_ext_ctrl &ctrl)
-{
- size_t len = strnlen(ctrl.name, sizeof(ctrl.name));
- return std::string(static_cast<const char *>(ctrl.name), len);
-}
-
-ControlType v4l2_ctrl_type(const struct v4l2_query_ext_ctrl &ctrl)
-{
- switch (ctrl.type) {
- case V4L2_CTRL_TYPE_U8:
- return ControlTypeByte;
-
- case V4L2_CTRL_TYPE_BOOLEAN:
- return ControlTypeBool;
-
- case V4L2_CTRL_TYPE_INTEGER:
- return ControlTypeInteger32;
-
- case V4L2_CTRL_TYPE_INTEGER64:
- return ControlTypeInteger64;
-
- case V4L2_CTRL_TYPE_MENU:
- case V4L2_CTRL_TYPE_BUTTON:
- case V4L2_CTRL_TYPE_BITMASK:
- case V4L2_CTRL_TYPE_INTEGER_MENU:
- /*
- * More precise types may be needed, for now use a 32-bit
- * integer type.
- */
- return ControlTypeInteger32;
-
- default:
- return ControlTypeNone;
- }
-}
-
-} /* namespace */
-
-/**
- * \class V4L2ControlId
- * \brief V4L2 control static metadata
- *
- * The V4L2ControlId class is a specialisation of the ControlId for V4L2
- * controls.
- */
-
-/**
- * \brief Construct a V4L2ControlId from a struct v4l2_query_ext_ctrl
- * \param[in] ctrl The struct v4l2_query_ext_ctrl as returned by the kernel
- */
-V4L2ControlId::V4L2ControlId(const struct v4l2_query_ext_ctrl &ctrl)
- : ControlId(ctrl.id, v4l2_ctrl_name(ctrl), v4l2_ctrl_type(ctrl))
-{
-}
-
-/**
- * \class V4L2ControlInfo
- * \brief Convenience specialisation of ControlInfo for V4L2 controls
- *
- * The V4L2ControlInfo class is a specialisation of the ControlInfo for V4L2
- * controls. It offers a convenience constructor from a struct
- * v4l2_query_ext_ctrl, and is otherwise equivalent to the ControlInfo class.
- */
-
-/**
- * \brief Construct a V4L2ControlInfo from a struct v4l2_query_ext_ctrl
- * \param[in] ctrl The struct v4l2_query_ext_ctrl as returned by the kernel
- */
-V4L2ControlInfo::V4L2ControlInfo(const struct v4l2_query_ext_ctrl &ctrl)
-{
- switch (ctrl.type) {
- case V4L2_CTRL_TYPE_U8:
- ControlInfo::operator=(ControlInfo(static_cast<uint8_t>(ctrl.minimum),
- static_cast<uint8_t>(ctrl.maximum),
- static_cast<uint8_t>(ctrl.default_value)));
- break;
-
- case V4L2_CTRL_TYPE_BOOLEAN:
- ControlInfo::operator=(ControlInfo(static_cast<bool>(ctrl.minimum),
- static_cast<bool>(ctrl.maximum),
- static_cast<bool>(ctrl.default_value)));
- break;
-
- case V4L2_CTRL_TYPE_INTEGER64:
- ControlInfo::operator=(ControlInfo(static_cast<int64_t>(ctrl.minimum),
- static_cast<int64_t>(ctrl.maximum),
- static_cast<int64_t>(ctrl.default_value)));
- break;
-
- default:
- ControlInfo::operator=(ControlInfo(static_cast<int32_t>(ctrl.minimum),
- static_cast<int32_t>(ctrl.maximum),
- static_cast<int32_t>(ctrl.default_value)));
- break;
- }
-}
-
-} /* namespace libcamera */
diff --git a/src/libcamera/v4l2_device.cpp b/src/libcamera/v4l2_device.cpp
index 31d4dad0..98d93a12 100644
--- a/src/libcamera/v4l2_device.cpp
+++ b/src/libcamera/v4l2_device.cpp
@@ -16,10 +16,11 @@
#include <sys/syscall.h>
#include <unistd.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/event_notifier.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
#include "libcamera/internal/sysfs.h"
-#include "libcamera/internal/utils.h"
-#include "libcamera/internal/v4l2_controls.h"
/**
* \file v4l2_device.h
@@ -52,7 +53,8 @@ LOG_DEFINE_CATEGORY(V4L2)
* at open() time, and the \a logTag to prefix log messages with.
*/
V4L2Device::V4L2Device(const std::string &deviceNode)
- : deviceNode_(deviceNode), fd_(-1)
+ : deviceNode_(deviceNode), fd_(-1), fdEventNotifier_(nullptr),
+ frameStartEnabled_(false)
{
}
@@ -87,7 +89,7 @@ int V4L2Device::open(unsigned int flags)
return ret;
}
- fd_ = ret;
+ setFd(ret);
listControls();
@@ -117,6 +119,10 @@ int V4L2Device::setFd(int fd)
fd_ = fd;
+ fdEventNotifier_ = new EventNotifier(fd_, EventNotifier::Exception);
+ fdEventNotifier_->activated.connect(this, &V4L2Device::eventAvailable);
+ fdEventNotifier_->setEnabled(false);
+
return 0;
}
@@ -130,6 +136,8 @@ void V4L2Device::close()
if (!isOpen())
return;
+ delete fdEventNotifier_;
+
if (::close(fd_) < 0)
LOG(V4L2, Error) << "Failed to close V4L2 device: "
<< strerror(errno);
@@ -165,18 +173,11 @@ void V4L2Device::close()
*/
ControlList V4L2Device::getControls(const std::vector<uint32_t> &ids)
{
- unsigned int count = ids.size();
- if (count == 0)
+ if (ids.empty())
return {};
ControlList ctrls{ controls_ };
- /*
- * Start by filling the ControlList. This can't be combined with filling
- * v4l2Ctrls, as updateControls() relies on both containers having the
- * same order, and the control list is based on a map, which is not
- * sorted by insertion order.
- */
for (uint32_t id : ids) {
const auto iter = controls_.find(id);
if (iter == controls_.end()) {
@@ -188,14 +189,17 @@ ControlList V4L2Device::getControls(const std::vector<uint32_t> &ids)
ctrls.set(id, {});
}
- struct v4l2_ext_control v4l2Ctrls[count];
- memset(v4l2Ctrls, 0, sizeof(v4l2Ctrls));
+ std::vector<v4l2_ext_control> v4l2Ctrls(ids.size());
+ memset(v4l2Ctrls.data(), 0, sizeof(v4l2_ext_control) * ctrls.size());
unsigned int i = 0;
for (auto &ctrl : ctrls) {
unsigned int id = ctrl.first;
const struct v4l2_query_ext_ctrl &info = controlInfo_[id];
+ v4l2_ext_control &v4l2Ctrl = v4l2Ctrls[i++];
+ v4l2Ctrl.id = id;
+
if (info.flags & V4L2_CTRL_FLAG_HAS_PAYLOAD) {
ControlType type;
@@ -215,25 +219,22 @@ ControlList V4L2Device::getControls(const std::vector<uint32_t> &ids)
value.reserve(type, true, info.elems);
Span<uint8_t> data = value.data();
- v4l2Ctrls[i].p_u8 = data.data();
- v4l2Ctrls[i].size = data.size();
+ v4l2Ctrl.p_u8 = data.data();
+ v4l2Ctrl.size = data.size();
}
-
- v4l2Ctrls[i].id = id;
- i++;
}
struct v4l2_ext_controls v4l2ExtCtrls = {};
v4l2ExtCtrls.which = V4L2_CTRL_WHICH_CUR_VAL;
- v4l2ExtCtrls.controls = v4l2Ctrls;
- v4l2ExtCtrls.count = count;
+ v4l2ExtCtrls.controls = v4l2Ctrls.data();
+ v4l2ExtCtrls.count = v4l2Ctrls.size();
int ret = ioctl(VIDIOC_G_EXT_CTRLS, &v4l2ExtCtrls);
if (ret) {
unsigned int errorIdx = v4l2ExtCtrls.error_idx;
/* Generic validation error. */
- if (errorIdx == 0 || errorIdx >= count) {
+ if (errorIdx == 0 || errorIdx >= v4l2Ctrls.size()) {
LOG(V4L2, Error) << "Unable to read controls: "
<< strerror(-ret);
return {};
@@ -242,11 +243,11 @@ ControlList V4L2Device::getControls(const std::vector<uint32_t> &ids)
/* A specific control failed. */
LOG(V4L2, Error) << "Unable to read control " << errorIdx
<< ": " << strerror(-ret);
- count = errorIdx - 1;
- ret = errorIdx;
+
+ v4l2Ctrls.resize(errorIdx);
}
- updateControls(&ctrls, v4l2Ctrls, count);
+ updateControls(&ctrls, v4l2Ctrls);
return ctrls;
}
@@ -275,30 +276,28 @@ ControlList V4L2Device::getControls(const std::vector<uint32_t> &ids)
*/
int V4L2Device::setControls(ControlList *ctrls)
{
- unsigned int count = ctrls->size();
- if (count == 0)
+ if (ctrls->empty())
return 0;
- struct v4l2_ext_control v4l2Ctrls[count];
- memset(v4l2Ctrls, 0, sizeof(v4l2Ctrls));
+ std::vector<v4l2_ext_control> v4l2Ctrls(ctrls->size());
+ memset(v4l2Ctrls.data(), 0, sizeof(v4l2_ext_control) * ctrls->size());
- unsigned int i = 0;
- for (auto &ctrl : *ctrls) {
- unsigned int id = ctrl.first;
+ for (auto [ctrl, i] = std::pair(ctrls->begin(), 0u); i < ctrls->size(); ctrl++, i++) {
+ const unsigned int id = ctrl->first;
const auto iter = controls_.find(id);
if (iter == controls_.end()) {
LOG(V4L2, Error)
<< "Control " << utils::hex(id) << " not found";
return -EINVAL;
}
-
- v4l2Ctrls[i].id = id;
+ v4l2_ext_control &v4l2Ctrl = v4l2Ctrls[i];
+ v4l2Ctrl.id = id;
/* Set the v4l2_ext_control value for the write operation. */
- ControlValue &value = ctrl.second;
+ ControlValue &value = ctrl->second;
switch (iter->first->type()) {
case ControlTypeInteger64:
- v4l2Ctrls[i].value64 = value.get<int64_t>();
+ v4l2Ctrl.value64 = value.get<int64_t>();
break;
case ControlTypeByte: {
@@ -310,32 +309,30 @@ int V4L2Device::setControls(ControlList *ctrls)
}
Span<uint8_t> data = value.data();
- v4l2Ctrls[i].p_u8 = data.data();
- v4l2Ctrls[i].size = data.size();
+ v4l2Ctrl.p_u8 = data.data();
+ v4l2Ctrl.size = data.size();
break;
}
default:
/* \todo To be changed to support strings. */
- v4l2Ctrls[i].value = value.get<int32_t>();
+ v4l2Ctrl.value = value.get<int32_t>();
break;
}
-
- i++;
}
struct v4l2_ext_controls v4l2ExtCtrls = {};
v4l2ExtCtrls.which = V4L2_CTRL_WHICH_CUR_VAL;
- v4l2ExtCtrls.controls = v4l2Ctrls;
- v4l2ExtCtrls.count = count;
+ v4l2ExtCtrls.controls = v4l2Ctrls.data();
+ v4l2ExtCtrls.count = v4l2Ctrls.size();
int ret = ioctl(VIDIOC_S_EXT_CTRLS, &v4l2ExtCtrls);
if (ret) {
unsigned int errorIdx = v4l2ExtCtrls.error_idx;
/* Generic validation error. */
- if (errorIdx == 0 || errorIdx >= count) {
+ if (errorIdx == 0 || errorIdx >= v4l2Ctrls.size()) {
LOG(V4L2, Error) << "Unable to set controls: "
<< strerror(-ret);
return -EINVAL;
@@ -344,11 +341,12 @@ int V4L2Device::setControls(ControlList *ctrls)
/* A specific control failed. */
LOG(V4L2, Error) << "Unable to set control " << errorIdx
<< ": " << strerror(-ret);
- count = errorIdx - 1;
+
+ v4l2Ctrls.resize(errorIdx);
ret = errorIdx;
}
- updateControls(ctrls, v4l2Ctrls, count);
+ updateControls(ctrls, v4l2Ctrls);
return ret;
}
@@ -397,6 +395,40 @@ std::string V4L2Device::devicePath() const
}
/**
+ * \brief Enable or disable frame start event notification
+ * \param[in] enable True to enable frame start events, false to disable them
+ *
+ * This function enables or disables generation of frame start events. Once
+ * enabled, the events are signalled through the frameStart signal.
+ *
+ * \return 0 on success, a negative error code otherwise
+ */
+int V4L2Device::setFrameStartEnabled(bool enable)
+{
+ if (frameStartEnabled_ == enable)
+ return 0;
+
+ struct v4l2_event_subscription event{};
+ event.type = V4L2_EVENT_FRAME_SYNC;
+
+ unsigned long request = enable ? VIDIOC_SUBSCRIBE_EVENT
+ : VIDIOC_UNSUBSCRIBE_EVENT;
+ int ret = ioctl(request, &event);
+ if (enable && ret)
+ return ret;
+
+ fdEventNotifier_->setEnabled(enable);
+ frameStartEnabled_ = enable;
+
+ return ret;
+}
+
+/**
+ * \var V4L2Device::frameStart
+ * \brief A Signal emitted when capture of a frame has started
+ */
+
+/**
* \brief Perform an IOCTL system call on the device node
* \param[in] request The IOCTL request code
* \param[in] argp A pointer to the IOCTL argument
@@ -426,6 +458,118 @@ int V4L2Device::ioctl(unsigned long request, void *argp)
* \return The V4L2 device file descriptor, -1 if the device node is not open
*/
+/**
+ * \brief Retrieve the libcamera control type associated with the V4L2 control
+ * \param[in] ctrlType The V4L2 control type
+ * \return The ControlType associated to \a ctrlType
+ */
+ControlType V4L2Device::v4l2CtrlType(uint32_t ctrlType)
+{
+ switch (ctrlType) {
+ case V4L2_CTRL_TYPE_U8:
+ return ControlTypeByte;
+
+ case V4L2_CTRL_TYPE_BOOLEAN:
+ return ControlTypeBool;
+
+ case V4L2_CTRL_TYPE_INTEGER:
+ return ControlTypeInteger32;
+
+ case V4L2_CTRL_TYPE_INTEGER64:
+ return ControlTypeInteger64;
+
+ case V4L2_CTRL_TYPE_MENU:
+ case V4L2_CTRL_TYPE_BUTTON:
+ case V4L2_CTRL_TYPE_BITMASK:
+ case V4L2_CTRL_TYPE_INTEGER_MENU:
+ /*
+ * More precise types may be needed, for now use a 32-bit
+ * integer type.
+ */
+ return ControlTypeInteger32;
+
+ default:
+ return ControlTypeNone;
+ }
+}
+
+/**
+ * \brief Create a ControlId for a V4L2 control
+ * \param[in] ctrl The v4l2_query_ext_ctrl that represents a V4L2 control
+ * \return A ControlId associated to \a ctrl
+ */
+std::unique_ptr<ControlId> V4L2Device::v4l2ControlId(const v4l2_query_ext_ctrl &ctrl)
+{
+ const size_t len = strnlen(ctrl.name, sizeof(ctrl.name));
+ const std::string name(static_cast<const char *>(ctrl.name), len);
+ const ControlType type = v4l2CtrlType(ctrl.type);
+
+ return std::make_unique<ControlId>(ctrl.id, name, type);
+}
+
+/**
+ * \brief Create a ControlInfo for a V4L2 control
+ * \param[in] ctrl The v4l2_query_ext_ctrl that represents a V4L2 control
+ * \return A ControlInfo that represents \a ctrl
+ */
+ControlInfo V4L2Device::v4l2ControlInfo(const v4l2_query_ext_ctrl &ctrl)
+{
+ switch (ctrl.type) {
+ case V4L2_CTRL_TYPE_U8:
+ return ControlInfo(static_cast<uint8_t>(ctrl.minimum),
+ static_cast<uint8_t>(ctrl.maximum),
+ static_cast<uint8_t>(ctrl.default_value));
+
+ case V4L2_CTRL_TYPE_BOOLEAN:
+ return ControlInfo(static_cast<bool>(ctrl.minimum),
+ static_cast<bool>(ctrl.maximum),
+ static_cast<bool>(ctrl.default_value));
+
+ case V4L2_CTRL_TYPE_INTEGER64:
+ return ControlInfo(static_cast<int64_t>(ctrl.minimum),
+ static_cast<int64_t>(ctrl.maximum),
+ static_cast<int64_t>(ctrl.default_value));
+
+ case V4L2_CTRL_TYPE_INTEGER_MENU:
+ case V4L2_CTRL_TYPE_MENU:
+ return v4l2MenuControlInfo(ctrl);
+
+ default:
+ return ControlInfo(static_cast<int32_t>(ctrl.minimum),
+ static_cast<int32_t>(ctrl.maximum),
+ static_cast<int32_t>(ctrl.default_value));
+ }
+}
+
+/**
+ * \brief Create ControlInfo for a V4L2 menu control
+ * \param[in] ctrl The v4l2_query_ext_ctrl that represents a V4L2 menu control
+ *
+ * The created ControlInfo contains indices acquired by VIDIOC_QUERYMENU.
+ *
+ * \return A ControlInfo that represents \a ctrl
+ */
+ControlInfo V4L2Device::v4l2MenuControlInfo(const struct v4l2_query_ext_ctrl &ctrl)
+{
+ std::vector<ControlValue> indices;
+ struct v4l2_querymenu menu = {};
+ menu.id = ctrl.id;
+
+ if (ctrl.minimum < 0)
+ return ControlInfo();
+
+ for (int32_t index = ctrl.minimum; index <= ctrl.maximum; ++index) {
+ menu.index = index;
+ if (ioctl(VIDIOC_QUERYMENU, &menu) != 0)
+ continue;
+
+ indices.push_back(index);
+ }
+
+ return ControlInfo(indices,
+ ControlValue(static_cast<int32_t>(ctrl.default_value)));
+}
+
/*
* \brief List and store information about all controls supported by the
* V4L2 device
@@ -435,7 +579,6 @@ void V4L2Device::listControls()
ControlInfoMap::Map ctrls;
struct v4l2_query_ext_ctrl ctrl = {};
- /* \todo Add support for menu controls. */
while (1) {
ctrl.id |= V4L2_CTRL_FLAG_NEXT_CTRL |
V4L2_CTRL_FLAG_NEXT_COMPOUND;
@@ -464,39 +607,79 @@ void V4L2Device::listControls()
continue;
}
- controlIds_.emplace_back(std::make_unique<V4L2ControlId>(ctrl));
+ LOG(V4L2, Debug) << "Control: " << ctrl.name
+ << " (" << utils::hex(ctrl.id) << ")";
+
+ controlIds_.emplace_back(v4l2ControlId(ctrl));
controlInfo_.emplace(ctrl.id, ctrl);
- ctrls.emplace(controlIds_.back().get(), V4L2ControlInfo(ctrl));
+ ctrls.emplace(controlIds_.back().get(), v4l2ControlInfo(ctrl));
}
controls_ = std::move(ctrls);
}
+/**
+* \brief Update the information for all device controls
+ *
+ * The V4L2Device class caches information about all controls supported by the
+ * device and exposes it through the controls() and controlInfo() functions.
+ * Control information may change at runtime, for instance when formats on a
+ * subdev are modified. When this occurs, this function can be used to refresh
+ * control information. The information is refreshed in-place, all pointers to
+ * v4l2_query_ext_ctrl instances previously returned by controlInfo() and
+ * iterators to the ControlInfoMap returned by controls() remain valid.
+ *
+ * Note that control information isn't refreshed automatically is it may be an
+ * expensive operation. The V4L2Device users are responsible for calling this
+ * function when required, based on their usage pattern of the class.
+ */
+void V4L2Device::updateControlInfo()
+{
+ for (auto &[controlId, info] : controls_) {
+ unsigned int id = controlId->id();
+
+ /*
+ * Assume controlInfo_ has a corresponding entry, as it has been
+ * generated by listControls().
+ */
+ struct v4l2_query_ext_ctrl &ctrl = controlInfo_[id];
+
+ if (ioctl(VIDIOC_QUERY_EXT_CTRL, &ctrl)) {
+ LOG(V4L2, Debug)
+ << "Could not refresh control "
+ << utils::hex(id);
+ continue;
+ }
+
+ info = v4l2ControlInfo(ctrl);
+ }
+}
+
/*
* \brief Update the value of the first \a count V4L2 controls in \a ctrls using
* values in \a v4l2Ctrls
* \param[inout] ctrls List of V4L2 controls to update
* \param[in] v4l2Ctrls List of V4L2 extended controls as returned by the driver
- * \param[in] count The number of controls to update
*/
void V4L2Device::updateControls(ControlList *ctrls,
- const struct v4l2_ext_control *v4l2Ctrls,
- unsigned int count)
+ Span<const v4l2_ext_control> v4l2Ctrls)
{
- unsigned int i = 0;
- for (auto &ctrl : *ctrls) {
- if (i == count)
- break;
+ for (const v4l2_ext_control &v4l2Ctrl : v4l2Ctrls) {
+ const unsigned int id = v4l2Ctrl.id;
- const struct v4l2_ext_control *v4l2Ctrl = &v4l2Ctrls[i];
- unsigned int id = ctrl.first;
- ControlValue &value = ctrl.second;
+ ControlValue value = ctrls->get(id);
const auto iter = controls_.find(id);
+ ASSERT(iter != controls_.end());
+
switch (iter->first->type()) {
case ControlTypeInteger64:
- value.set<int64_t>(v4l2Ctrl->value64);
+ value.set<int64_t>(v4l2Ctrl.value64);
+ break;
+
+ case ControlTypeInteger32:
+ value.set<int32_t>(v4l2Ctrl.value);
break;
case ControlTypeByte:
@@ -511,12 +694,41 @@ void V4L2Device::updateControls(ControlList *ctrls,
* \todo To be changed when support for string controls
* will be added.
*/
- value.set<int32_t>(v4l2Ctrl->value);
+ value.set<int32_t>(v4l2Ctrl.value);
break;
}
- i++;
+ ctrls->set(id, value);
+ }
+}
+
+/**
+ * \brief Slot to handle V4L2 events from the V4L2 device
+ * \param[in] notifier The event notifier
+ *
+ * When this slot is called, a V4L2 event is available to be dequeued from the
+ * device.
+ */
+void V4L2Device::eventAvailable([[maybe_unused]] EventNotifier *notifier)
+{
+ struct v4l2_event event{};
+ int ret = ioctl(VIDIOC_DQEVENT, &event);
+ if (ret < 0) {
+ LOG(V4L2, Error)
+ << "Failed to dequeue event, disabling event notifier";
+ fdEventNotifier_->setEnabled(false);
+ return;
+ }
+
+ if (event.type != V4L2_EVENT_FRAME_SYNC) {
+ LOG(V4L2, Error)
+ << "Spurious event (" << event.type
+ << "), disabling event notifier";
+ fdEventNotifier_->setEnabled(false);
+ return;
}
+
+ frameStart.emit(event.u.frame_sync.frame_sequence);
}
} /* namespace libcamera */
diff --git a/src/libcamera/v4l2_pixelformat.cpp b/src/libcamera/v4l2_pixelformat.cpp
index 6b05909f..87d728fe 100644
--- a/src/libcamera/v4l2_pixelformat.cpp
+++ b/src/libcamera/v4l2_pixelformat.cpp
@@ -12,16 +12,18 @@
#include <map>
#include <string.h>
+#include <libcamera/base/log.h>
+
#include <libcamera/formats.h>
#include <libcamera/pixel_format.h>
#include "libcamera/internal/formats.h"
-#include "libcamera/internal/log.h"
/**
* \file v4l2_pixelformat.h
* \brief V4L2 Pixel Format
*/
+
namespace libcamera {
LOG_DECLARE_CATEGORY(V4L2)
@@ -46,6 +48,7 @@ namespace {
const std::map<V4L2PixelFormat, PixelFormat> vpf2pf{
/* RGB formats. */
{ V4L2PixelFormat(V4L2_PIX_FMT_RGB565), formats::RGB565 },
+ { V4L2PixelFormat(V4L2_PIX_FMT_RGB565X), formats::RGB565_BE },
{ V4L2PixelFormat(V4L2_PIX_FMT_RGB24), formats::BGR888 },
{ V4L2PixelFormat(V4L2_PIX_FMT_BGR24), formats::RGB888 },
{ V4L2PixelFormat(V4L2_PIX_FMT_XBGR32), formats::XRGB8888 },
diff --git a/src/libcamera/v4l2_subdevice.cpp b/src/libcamera/v4l2_subdevice.cpp
index 85d00c24..8fe5e45b 100644
--- a/src/libcamera/v4l2_subdevice.cpp
+++ b/src/libcamera/v4l2_subdevice.cpp
@@ -19,10 +19,11 @@
#include <libcamera/geometry.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/media_object.h"
-#include "libcamera/internal/utils.h"
/**
* \file v4l2_subdevice.h
@@ -446,19 +447,17 @@ int V4L2Subdevice::setFormat(unsigned int pad, V4L2SubdeviceFormat *format,
* \param[in] media The media device where the entity is registered
* \param[in] entity The media entity name
*
- * Releasing memory of the newly created instance is responsibility of the
- * caller of this function.
- *
* \return A newly created V4L2Subdevice on success, nullptr otherwise
*/
-V4L2Subdevice *V4L2Subdevice::fromEntityName(const MediaDevice *media,
- const std::string &entity)
+std::unique_ptr<V4L2Subdevice>
+V4L2Subdevice::fromEntityName(const MediaDevice *media,
+ const std::string &entity)
{
MediaEntity *mediaEntity = media->getEntityByName(entity);
if (!mediaEntity)
return nullptr;
- return new V4L2Subdevice(mediaEntity);
+ return std::make_unique<V4L2Subdevice>(mediaEntity);
}
std::string V4L2Subdevice::logPrefix() const
diff --git a/src/libcamera/v4l2_videodevice.cpp b/src/libcamera/v4l2_videodevice.cpp
index 508522ef..3d2d99b4 100644
--- a/src/libcamera/v4l2_videodevice.cpp
+++ b/src/libcamera/v4l2_videodevice.cpp
@@ -7,6 +7,7 @@
#include "libcamera/internal/v4l2_videodevice.h"
+#include <array>
#include <fcntl.h>
#include <iomanip>
#include <sstream>
@@ -20,18 +21,19 @@
#include <linux/version.h>
-#include <libcamera/event_notifier.h>
+#include <libcamera/base/event_notifier.h>
+#include <libcamera/base/log.h>
+
#include <libcamera/file_descriptor.h>
-#include "libcamera/internal/log.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/media_object.h"
-#include "libcamera/internal/utils.h"
/**
* \file v4l2_videodevice.h
* \brief V4L2 Video Device
*/
+
namespace libcamera {
LOG_DECLARE_CATEGORY(V4L2)
@@ -212,7 +214,7 @@ int V4L2BufferCache::get(const FrameBuffer &buffer)
for (unsigned int index = 0; index < cache_.size(); index++) {
const Entry &entry = cache_[index];
- if (!entry.free)
+ if (!entry.free_)
continue;
/* Try to find a cache hit by comparing the planes. */
@@ -222,9 +224,9 @@ int V4L2BufferCache::get(const FrameBuffer &buffer)
break;
}
- if (entry.lastUsed < oldest) {
+ if (entry.lastUsed_ < oldest) {
use = index;
- oldest = entry.lastUsed;
+ oldest = entry.lastUsed_;
}
}
@@ -248,16 +250,16 @@ int V4L2BufferCache::get(const FrameBuffer &buffer)
void V4L2BufferCache::put(unsigned int index)
{
ASSERT(index < cache_.size());
- cache_[index].free = true;
+ cache_[index].free_ = true;
}
V4L2BufferCache::Entry::Entry()
- : free(true), lastUsed(0)
+ : free_(true), lastUsed_(0)
{
}
V4L2BufferCache::Entry::Entry(bool free, uint64_t lastUsed, const FrameBuffer &buffer)
- : free(free), lastUsed(lastUsed)
+ : free_(free), lastUsed_(lastUsed)
{
for (const FrameBuffer::Plane &plane : buffer.planes())
planes_.emplace_back(plane);
@@ -351,6 +353,15 @@ bool V4L2BufferCache::Entry::operator==(const FrameBuffer &buffer) const
*/
/**
+ * \struct V4L2DeviceFormat::Plane
+ * \brief Per-plane memory size information
+ * \var V4L2DeviceFormat::Plane::size
+ * \brief The plane total memory size (in bytes)
+ * \var V4L2DeviceFormat::Plane::bpl
+ * \brief The plane line stride (in bytes)
+ */
+
+/**
* \var V4L2DeviceFormat::size
* \brief The image size in pixels
*/
@@ -472,7 +483,7 @@ const std::string V4L2DeviceFormat::toString() const
*/
V4L2VideoDevice::V4L2VideoDevice(const std::string &deviceNode)
: V4L2Device(deviceNode), cache_(nullptr), fdBufferNotifier_(nullptr),
- fdEventNotifier_(nullptr), frameStartEnabled_(false)
+ streaming_(false)
{
/*
* We default to an MMAP based CAPTURE video device, however this will
@@ -565,10 +576,6 @@ int V4L2VideoDevice::open()
fdBufferNotifier_->activated.connect(this, &V4L2VideoDevice::bufferAvailable);
fdBufferNotifier_->setEnabled(false);
- fdEventNotifier_ = new EventNotifier(fd(), EventNotifier::Exception);
- fdEventNotifier_->activated.connect(this, &V4L2VideoDevice::eventAvailable);
- fdEventNotifier_->setEnabled(false);
-
LOG(V4L2, Debug)
<< "Opened device " << caps_.bus_info() << ": "
<< caps_.driver() << ": " << caps_.card();
@@ -658,10 +665,6 @@ int V4L2VideoDevice::open(int handle, enum v4l2_buf_type type)
fdBufferNotifier_->activated.connect(this, &V4L2VideoDevice::bufferAvailable);
fdBufferNotifier_->setEnabled(false);
- fdEventNotifier_ = new EventNotifier(fd(), EventNotifier::Exception);
- fdEventNotifier_->activated.connect(this, &V4L2VideoDevice::eventAvailable);
- fdEventNotifier_->setEnabled(false);
-
LOG(V4L2, Debug)
<< "Opened device " << caps_.bus_info() << ": "
<< caps_.driver() << ": " << caps_.card();
@@ -679,7 +682,6 @@ void V4L2VideoDevice::close()
releaseBuffers();
delete fdBufferNotifier_;
- delete fdEventNotifier_;
V4L2Device::close();
}
@@ -710,7 +712,8 @@ void V4L2VideoDevice::close()
std::string V4L2VideoDevice::logPrefix() const
{
- return deviceNode() + (V4L2_TYPE_IS_OUTPUT(bufferType_) ? "[out]" : "[cap]");
+ return deviceNode() + "[" + std::to_string(fd()) +
+ (V4L2_TYPE_IS_OUTPUT(bufferType_) ? ":out]" : ":cap]");
}
/**
@@ -860,6 +863,8 @@ int V4L2VideoDevice::trySetFormatMultiplane(V4L2DeviceFormat *format, bool set)
pix->num_planes = format->planesCount;
pix->field = V4L2_FIELD_NONE;
+ ASSERT(pix->num_planes <= std::size(pix->plane_fmt));
+
for (unsigned int i = 0; i < pix->num_planes; ++i) {
pix->plane_fmt[i].bytesperline = format->planes[i].bpl;
pix->plane_fmt[i].sizeimage = format->planes[i].size;
@@ -1252,8 +1257,7 @@ std::unique_ptr<FrameBuffer> V4L2VideoDevice::createBuffer(unsigned int index)
buf.index = index;
buf.type = bufferType_;
- buf.memory = V4L2_MEMORY_MMAP;
- buf.length = ARRAY_SIZE(v4l2Planes);
+ buf.length = std::size(v4l2Planes);
buf.m.planes = v4l2Planes;
int ret = ioctl(VIDIOC_QUERYBUF, &buf);
@@ -1388,6 +1392,16 @@ int V4L2VideoDevice::queueBuffer(FrameBuffer *buffer)
struct v4l2_buffer buf = {};
int ret;
+ /*
+ * Pipeline handlers should not requeue buffers after releasing the
+ * buffers on the device. Any occurence of this error should be fixed
+ * in the pipeline handler directly.
+ */
+ if (!cache_) {
+ LOG(V4L2, Fatal) << "No BufferCache available to queue.";
+ return -ENOENT;
+ }
+
ret = cache_->get(*buffer);
if (ret < 0)
return ret;
@@ -1533,74 +1547,11 @@ FrameBuffer *V4L2VideoDevice::dequeueBuffer()
}
/**
- * \brief Slot to handle V4L2 events from the V4L2 video device
- * \param[in] notifier The event notifier
- *
- * When this slot is called, a V4L2 event is available to be dequeued from the
- * device.
- */
-void V4L2VideoDevice::eventAvailable([[maybe_unused]] EventNotifier *notifier)
-{
- struct v4l2_event event{};
- int ret = ioctl(VIDIOC_DQEVENT, &event);
- if (ret < 0) {
- LOG(V4L2, Error)
- << "Failed to dequeue event, disabling event notifier";
- fdEventNotifier_->setEnabled(false);
- return;
- }
-
- if (event.type != V4L2_EVENT_FRAME_SYNC) {
- LOG(V4L2, Error)
- << "Spurious event (" << event.type
- << "), disabling event notifier";
- fdEventNotifier_->setEnabled(false);
- return;
- }
-
- frameStart.emit(event.u.frame_sync.frame_sequence);
-}
-
-/**
* \var V4L2VideoDevice::bufferReady
* \brief A Signal emitted when a framebuffer completes
*/
/**
- * \brief Enable or disable frame start event notification
- * \param[in] enable True to enable frame start events, false to disable them
- *
- * This function enables or disables generation of frame start events. Once
- * enabled, the events are signalled through the frameStart signal.
- *
- * \return 0 on success, a negative error code otherwise
- */
-int V4L2VideoDevice::setFrameStartEnabled(bool enable)
-{
- if (frameStartEnabled_ == enable)
- return 0;
-
- struct v4l2_event_subscription event{};
- event.type = V4L2_EVENT_FRAME_SYNC;
-
- unsigned long request = enable ? VIDIOC_SUBSCRIBE_EVENT
- : VIDIOC_UNSUBSCRIBE_EVENT;
- int ret = ioctl(request, &event);
- if (enable && ret)
- return ret;
-
- fdEventNotifier_->setEnabled(enable);
- frameStartEnabled_ = enable;
-
- return ret;
-}
-
-/**
- * \var V4L2VideoDevice::frameStart
- * \brief A Signal emitted when capture of a frame has started
- */
-
-/**
* \brief Start the video stream
* \return 0 on success or a negative error code otherwise
*/
@@ -1615,6 +1566,8 @@ int V4L2VideoDevice::streamOn()
return ret;
}
+ streaming_ = true;
+
return 0;
}
@@ -1626,12 +1579,18 @@ int V4L2VideoDevice::streamOn()
* and the bufferReady signal is emitted for them. The order in which those
* buffers are dequeued is not specified.
*
+ * This will be a no-op if the stream is not started in the first place and
+ * has no queued buffers.
+ *
* \return 0 on success or a negative error code otherwise
*/
int V4L2VideoDevice::streamOff()
{
int ret;
+ if (!streaming_ && queuedBuffers_.empty())
+ return 0;
+
ret = ioctl(VIDIOC_STREAMOFF, &bufferType_);
if (ret < 0) {
LOG(V4L2, Error)
@@ -1649,6 +1608,7 @@ int V4L2VideoDevice::streamOff()
queuedBuffers_.clear();
fdBufferNotifier_->setEnabled(false);
+ streaming_ = false;
return 0;
}
@@ -1659,19 +1619,17 @@ int V4L2VideoDevice::streamOff()
* \param[in] media The media device where the entity is registered
* \param[in] entity The media entity name
*
- * Releasing memory of the newly created instance is responsibility of the
- * caller of this function.
- *
* \return A newly created V4L2VideoDevice on success, nullptr otherwise
*/
-V4L2VideoDevice *V4L2VideoDevice::fromEntityName(const MediaDevice *media,
- const std::string &entity)
+std::unique_ptr<V4L2VideoDevice>
+V4L2VideoDevice::fromEntityName(const MediaDevice *media,
+ const std::string &entity)
{
MediaEntity *mediaEntity = media->getEntityByName(entity);
if (!mediaEntity)
return nullptr;
- return new V4L2VideoDevice(mediaEntity);
+ return std::make_unique<V4L2VideoDevice>(mediaEntity);
}
/**
diff --git a/src/meson.build b/src/meson.build
index b9c7e759..e0ea9c35 100644
--- a/src/meson.build
+++ b/src/meson.build
@@ -1,25 +1,40 @@
# SPDX-License-Identifier: CC0-1.0
-subdir('android')
+# Cache system paths
+libcamera_datadir = get_option('datadir') / 'libcamera'
+libcamera_libdir = get_option('libdir') / 'libcamera'
+libcamera_sysconfdir = get_option('sysconfdir') / 'libcamera'
+
+config_h.set('LIBCAMERA_DATA_DIR', '"' + get_option('prefix') / libcamera_datadir + '"')
+config_h.set('LIBCAMERA_SYSCONF_DIR', '"' + get_option('prefix') / libcamera_sysconfdir + '"')
+
+summary({
+ 'LIBCAMERA_DATA_DIR' : config_h.get('LIBCAMERA_DATA_DIR'),
+ 'LIBCAMERA_SYSCONF_DIR' : config_h.get('LIBCAMERA_SYSCONF_DIR'),
+ }, section : 'Paths')
+# Module Signing
openssl = find_program('openssl', required : true)
if openssl.found()
ipa_priv_key = custom_target('ipa-priv-key',
- output : [ 'ipa-priv-key.pem' ],
- command : [ gen_ipa_priv_key, '@OUTPUT@' ])
+ output : ['ipa-priv-key.pem'],
+ command : [gen_ipa_priv_key, '@OUTPUT@'])
config_h.set('HAVE_IPA_PUBKEY', 1)
ipa_sign_module = true
else
ipa_sign_module = false
endif
+# libcamera must be built first as a dependency to the other components.
subdir('libcamera')
+
+subdir('android')
subdir('ipa')
+
+subdir('lc-compliance')
+
subdir('cam')
subdir('qcam')
-if get_option('v4l2')
- subdir('v4l2')
-endif
-
subdir('gstreamer')
+subdir('v4l2')
diff --git a/src/qcam/assets/feathericons/feathericons.qrc b/src/qcam/assets/feathericons/feathericons.qrc
index 656f2b46..c5302040 100644
--- a/src/qcam/assets/feathericons/feathericons.qrc
+++ b/src/qcam/assets/feathericons/feathericons.qrc
@@ -1,11 +1,11 @@
<!-- SPDX-License-Identifier: GPL-2.0-or-later -->
<!DOCTYPE RCC><RCC version="1.0">
<qresource>
-<file>./aperture.svg</file>
-<file>./camera-off.svg</file>
-<file>./play-circle.svg</file>
-<file>./save.svg</file>
-<file>./stop-circle.svg</file>
-<file>./x-circle.svg</file>
+ <file>aperture.svg</file>
+ <file>camera-off.svg</file>
+ <file>play-circle.svg</file>
+ <file>save.svg</file>
+ <file>stop-circle.svg</file>
+ <file>x-circle.svg</file>
</qresource>
</RCC>
diff --git a/src/qcam/assets/shader/NV_2_planes_VU_f.glsl b/src/qcam/assets/shader/NV_2_planes_VU_f.glsl
deleted file mode 100644
index 086c5b6d..00000000
--- a/src/qcam/assets/shader/NV_2_planes_VU_f.glsl
+++ /dev/null
@@ -1,32 +0,0 @@
-/* SPDX-License-Identifier: LGPL-2.1-or-later */
-/*
- * Copyright (C) 2020, Linaro
- *
- * NV_2_planes_VU_f.glsl - Fragment shader code for NV21, NV61 and NV42 formats
- */
-
-#ifdef GL_ES
-precision mediump float;
-#endif
-
-varying vec2 textureOut;
-uniform sampler2D tex_y;
-uniform sampler2D tex_u;
-
-void main(void)
-{
- vec3 yuv;
- vec3 rgb;
- mat3 yuv2rgb_bt601_mat = mat3(
- vec3(1.164, 1.164, 1.164),
- vec3(0.000, -0.392, 2.017),
- vec3(1.596, -0.813, 0.000)
- );
-
- yuv.x = texture2D(tex_y, textureOut).r - 0.063;
- yuv.y = texture2D(tex_u, textureOut).g - 0.500;
- yuv.z = texture2D(tex_u, textureOut).r - 0.500;
-
- rgb = yuv2rgb_bt601_mat * yuv;
- gl_FragColor = vec4(rgb, 1.0);
-}
diff --git a/src/qcam/assets/shader/RGB.frag b/src/qcam/assets/shader/RGB.frag
new file mode 100644
index 00000000..4c374ac9
--- /dev/null
+++ b/src/qcam/assets/shader/RGB.frag
@@ -0,0 +1,22 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Laurent Pinchart
+ *
+ * RGB.frag - Fragment shader code for RGB formats
+ */
+
+#ifdef GL_ES
+precision mediump float;
+#endif
+
+varying vec2 textureOut;
+uniform sampler2D tex_y;
+
+void main(void)
+{
+ vec3 rgb;
+
+ rgb = texture2D(tex_y, textureOut).RGB_PATTERN;
+
+ gl_FragColor = vec4(rgb, 1.0);
+}
diff --git a/src/qcam/assets/shader/NV_2_planes_UV_f.glsl b/src/qcam/assets/shader/YUV_2_planes.frag
index 67633a11..125f1c85 100644
--- a/src/qcam/assets/shader/NV_2_planes_UV_f.glsl
+++ b/src/qcam/assets/shader/YUV_2_planes.frag
@@ -2,7 +2,7 @@
/*
* Copyright (C) 2020, Linaro
*
- * NV_2_planes_UV_f.glsl - Fragment shader code for NV12, NV16 and NV24 formats
+ * YUV_2_planes.frag - Fragment shader code for NV12, NV16 and NV24 formats
*/
#ifdef GL_ES
@@ -24,8 +24,15 @@ void main(void)
);
yuv.x = texture2D(tex_y, textureOut).r - 0.063;
+#if defined(YUV_PATTERN_UV)
yuv.y = texture2D(tex_u, textureOut).r - 0.500;
yuv.z = texture2D(tex_u, textureOut).g - 0.500;
+#elif defined(YUV_PATTERN_VU)
+ yuv.y = texture2D(tex_u, textureOut).g - 0.500;
+ yuv.z = texture2D(tex_u, textureOut).r - 0.500;
+#else
+#error Invalid pattern
+#endif
rgb = yuv2rgb_bt601_mat * yuv;
gl_FragColor = vec4(rgb, 1.0);
diff --git a/src/qcam/assets/shader/NV_3_planes_f.glsl b/src/qcam/assets/shader/YUV_3_planes.frag
index 4bc94184..2be74b5d 100644
--- a/src/qcam/assets/shader/NV_3_planes_f.glsl
+++ b/src/qcam/assets/shader/YUV_3_planes.frag
@@ -2,7 +2,7 @@
/*
* Copyright (C) 2020, Linaro
*
- * NV_3_planes_UV_f.glsl - Fragment shader code for YUV420 format
+ * YUV_3_planes_UV.frag - Fragment shader code for YUV420 format
*/
#ifdef GL_ES
diff --git a/src/qcam/assets/shader/YUV_packed.frag b/src/qcam/assets/shader/YUV_packed.frag
new file mode 100644
index 00000000..d6efd4ce
--- /dev/null
+++ b/src/qcam/assets/shader/YUV_packed.frag
@@ -0,0 +1,82 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Laurent Pinchart <laurent.pinchart@ideasonboard.com>
+ *
+ * YUV_packed.frag - Fragment shader code for YUYV packed formats
+ */
+
+#ifdef GL_ES
+precision mediump float;
+#endif
+
+varying vec2 textureOut;
+
+uniform sampler2D tex_y;
+uniform vec2 tex_step;
+
+void main(void)
+{
+ mat3 yuv2rgb_bt601_mat = mat3(
+ vec3(1.164, 1.164, 1.164),
+ vec3(0.000, -0.392, 2.017),
+ vec3(1.596, -0.813, 0.000)
+ );
+ vec3 yuv2rgb_bt601_offset = vec3(0.063, 0.500, 0.500);
+
+ /*
+ * The sampler won't interpolate the texture correctly along the X axis,
+ * as each RGBA pixel effectively stores two pixels. We thus need to
+ * interpolate manually.
+ *
+ * In integer texture coordinates, the Y values are layed out in the
+ * texture memory as follows:
+ *
+ * ...| Y U Y V | Y U Y V | Y U Y V |...
+ * ...| R G B A | R G B A | R G B A |...
+ * ^ ^ ^ ^ ^ ^
+ * | | | | | |
+ * n-1 n-0.5 n n+0.5 n+1 n+1.5
+ *
+ * For a texture location x in the interval [n, n+1[, sample the left
+ * and right pixels at n and n+1, and interpolate them with
+ *
+ * left.r * (1 - a) + left.b * a if fract(x) < 0.5
+ * left.b * (1 - a) + right.r * a if fract(x) >= 0.5
+ *
+ * with a = fract(x * 2) which can also be written
+ *
+ * a = fract(x) * 2 if fract(x) < 0.5
+ * a = fract(x) * 2 - 1 if fract(x) >= 0.5
+ */
+ vec2 pos = textureOut;
+ float f_x = fract(pos.x / tex_step.x);
+
+ vec4 left = texture2D(tex_y, vec2(pos.x - f_x * tex_step.x, pos.y));
+ vec4 right = texture2D(tex_y, vec2(pos.x + (1.0 - f_x) * tex_step.x , pos.y));
+
+#if defined(YUV_PATTERN_UYVY)
+ float y_left = mix(left.g, left.a, f_x * 2.0);
+ float y_right = mix(left.a, right.g, f_x * 2.0 - 1.0);
+ vec2 uv = mix(left.rb, right.rb, f_x);
+#elif defined(YUV_PATTERN_VYUY)
+ float y_left = mix(left.g, left.a, f_x * 2.0);
+ float y_right = mix(left.a, right.g, f_x * 2.0 - 1.0);
+ vec2 uv = mix(left.br, right.br, f_x);
+#elif defined(YUV_PATTERN_YUYV)
+ float y_left = mix(left.r, left.b, f_x * 2.0);
+ float y_right = mix(left.b, right.r, f_x * 2.0 - 1.0);
+ vec2 uv = mix(left.ga, right.ga, f_x);
+#elif defined(YUV_PATTERN_YVYU)
+ float y_left = mix(left.r, left.b, f_x * 2.0);
+ float y_right = mix(left.b, right.r, f_x * 2.0 - 1.0);
+ vec2 uv = mix(left.ag, right.ag, f_x);
+#else
+#error Invalid pattern
+#endif
+
+ float y = mix(y_left, y_right, step(0.5, f_x));
+
+ vec3 rgb = yuv2rgb_bt601_mat * (vec3(y, uv) - yuv2rgb_bt601_offset);
+
+ gl_FragColor = vec4(rgb, 1.0);
+}
diff --git a/src/qcam/assets/shader/bayer_1x_packed.frag b/src/qcam/assets/shader/bayer_1x_packed.frag
new file mode 100644
index 00000000..f53f5575
--- /dev/null
+++ b/src/qcam/assets/shader/bayer_1x_packed.frag
@@ -0,0 +1,216 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Based on the code from http://jgt.akpeters.com/papers/McGuire08/
+ *
+ * Efficient, High-Quality Bayer Demosaic Filtering on GPUs
+ *
+ * Morgan McGuire
+ *
+ * This paper appears in issue Volume 13, Number 4.
+ * ---------------------------------------------------------
+ * Copyright (c) 2008, Morgan McGuire. All rights reserved.
+ *
+ *
+ * Modified by Linaro Ltd for 10/12-bit packed vs 8-bit raw Bayer format,
+ * and for simpler demosaic algorithm.
+ * Copyright (C) 2020, Linaro
+ *
+ * bayer_1x_packed.frag - Fragment shader code for raw Bayer 10-bit and 12-bit
+ * packed formats
+ */
+
+#ifdef GL_ES
+precision mediump float;
+#endif
+
+/*
+ * These constants are used to select the bytes containing the HS part of
+ * the pixel value:
+ * BPP - bytes per pixel,
+ * THRESHOLD_L = fract(BPP) * 0.5 + 0.02
+ * THRESHOLD_H = 1.0 - fract(BPP) * 1.5 + 0.02
+ * Let X is the x coordinate in the texture measured in bytes (so that the
+ * range is from 0 to (stride_-1)) aligned on the nearest pixel.
+ * E.g. for RAW10P:
+ * -------------+-------------------+-------------------+--
+ * pixel No | 0 1 2 3 | 4 5 6 7 | ...
+ * -------------+-------------------+-------------------+--
+ * byte offset | 0 1 2 3 4 | 5 6 7 8 9 | ...
+ * -------------+-------------------+-------------------+--
+ * X | 0.0 1.25 2.5 3.75 | 5.0 6.25 7.5 8.75 | ...
+ * -------------+-------------------+-------------------+--
+ * If fract(X) < THRESHOLD_L then the previous byte contains the LS
+ * bits of the pixel values and needs to be skipped.
+ * If fract(X) > THRESHOLD_H then the next byte contains the LS bits
+ * of the pixel values and needs to be skipped.
+ */
+#if defined(RAW10P)
+#define BPP 1.25
+#define THRESHOLD_L 0.14
+#define THRESHOLD_H 0.64
+#elif defined(RAW12P)
+#define BPP 1.5
+#define THRESHOLD_L 0.27
+#define THRESHOLD_H 0.27
+#else
+#error Invalid raw format
+#endif
+
+
+varying vec2 textureOut;
+
+/* the texture size in pixels */
+uniform vec2 tex_size;
+uniform vec2 tex_step;
+uniform vec2 tex_bayer_first_red;
+
+uniform sampler2D tex_y;
+
+void main(void)
+{
+ vec3 rgb;
+
+ /*
+ * center_bytes holds the coordinates of the MS byte of the pixel
+ * being sampled on the [0, stride-1/height-1] range.
+ * center_pixel holds the coordinates of the pixel being sampled
+ * on the [0, width/height-1] range.
+ */
+ vec2 center_bytes;
+ vec2 center_pixel;
+
+ /*
+ * x- and y-positions of the adjacent pixels on the [0, 1] range.
+ */
+ vec2 xcoords;
+ vec2 ycoords;
+
+ /*
+ * The coordinates passed to the shader in textureOut may point
+ * to a place in between the pixels if the texture format doesn't
+ * match the image format. In particular, MIPI packed raw Bayer
+ * formats don't have a matching texture format.
+ * In this case align the coordinates to the left nearest pixel
+ * by hand.
+ */
+ center_pixel = floor(textureOut * tex_size);
+ center_bytes.y = center_pixel.y;
+
+ /*
+ * Add a small number (a few mantissa's LSBs) to avoid float
+ * representation issues. Maybe paranoic.
+ */
+ center_bytes.x = BPP * center_pixel.x + 0.02;
+
+ float fract_x = fract(center_bytes.x);
+
+ /*
+ * The below floor() call ensures that center_bytes.x points
+ * at one of the bytes representing the 8 higher bits of
+ * the pixel value, not at the byte containing the LS bits
+ * of the group of the pixels.
+ */
+ center_bytes.x = floor(center_bytes.x);
+ center_bytes *= tex_step;
+
+ xcoords = center_bytes.x + vec2(-tex_step.x, tex_step.x);
+ ycoords = center_bytes.y + vec2(-tex_step.y, tex_step.y);
+
+ /*
+ * If xcoords[0] points at the byte containing the LS bits
+ * of the previous group of the pixels, move xcoords[0] one
+ * byte back.
+ */
+ xcoords[0] += (fract_x < THRESHOLD_L) ? -tex_step.x : 0.0;
+
+ /*
+ * If xcoords[1] points at the byte containing the LS bits
+ * of the current group of the pixels, move xcoords[1] one
+ * byte forward.
+ */
+ xcoords[1] += (fract_x > THRESHOLD_H) ? tex_step.x : 0.0;
+
+ vec2 alternate = mod(center_pixel.xy + tex_bayer_first_red, 2.0);
+ bool even_col = alternate.x < 1.0;
+ bool even_row = alternate.y < 1.0;
+
+ /*
+ * We need to sample the central pixel and the ones with offset
+ * of -1 to +1 pixel in both X and Y directions. Let's name these
+ * pixels as below, where C is the central pixel:
+ *
+ * +----+----+----+----+
+ * | \ x| | | |
+ * |y \ | -1 | 0 | +1 |
+ * +----+----+----+----+
+ * | +1 | D2 | A1 | D3 |
+ * +----+----+----+----+
+ * | 0 | B0 | C | B1 |
+ * +----+----+----+----+
+ * | -1 | D0 | A0 | D1 |
+ * +----+----+----+----+
+ *
+ * In the below equations (0,-1).r means "r component of the texel
+ * shifted by -tex_step.y from the center_bytes one" etc.
+ *
+ * In the even row / even column (EE) case the colour values are:
+ * R = C = (0,0).r,
+ * G = (A0 + A1 + B0 + B1) / 4.0 =
+ * ( (0,-1).r + (0,1).r + (-1,0).r + (1,0).r ) / 4.0,
+ * B = (D0 + D1 + D2 + D3) / 4.0 =
+ * ( (-1,-1).r + (1,-1).r + (-1,1).r + (1,1).r ) / 4.0
+ *
+ * For even row / odd column (EO):
+ * R = (B0 + B1) / 2.0 = ( (-1,0).r + (1,0).r ) / 2.0,
+ * G = C = (0,0).r,
+ * B = (A0 + A1) / 2.0 = ( (0,-1).r + (0,1).r ) / 2.0
+ *
+ * For odd row / even column (OE):
+ * R = (A0 + A1) / 2.0 = ( (0,-1).r + (0,1).r ) / 2.0,
+ * G = C = (0,0).r,
+ * B = (B0 + B1) / 2.0 = ( (-1,0).r + (1,0).r ) / 2.0
+ *
+ * For odd row / odd column (OO):
+ * R = (D0 + D1 + D2 + D3) / 4.0 =
+ * ( (-1,-1).r + (1,-1).r + (-1,1).r + (1,1).r ) / 4.0,
+ * G = (A0 + A1 + B0 + B1) / 4.0 =
+ * ( (0,-1).r + (0,1).r + (-1,0).r + (1,0).r ) / 4.0,
+ * B = C = (0,0).r
+ */
+
+ /*
+ * Fetch the values and precalculate the terms:
+ * patterns.x = (A0 + A1) / 2.0
+ * patterns.y = (B0 + B1) / 2.0
+ * patterns.z = (A0 + A1 + B0 + B1) / 4.0
+ * patterns.w = (D0 + D1 + D2 + D3) / 4.0
+ */
+ #define fetch(x, y) texture2D(tex_y, vec2(x, y)).r
+
+ float C = texture2D(tex_y, center_bytes).r;
+ vec4 patterns = vec4(
+ fetch(center_bytes.x, ycoords[0]), /* A0: (0,-1) */
+ fetch(xcoords[0], center_bytes.y), /* B0: (-1,0) */
+ fetch(xcoords[0], ycoords[0]), /* D0: (-1,-1) */
+ fetch(xcoords[1], ycoords[0])); /* D1: (1,-1) */
+ vec4 temp = vec4(
+ fetch(center_bytes.x, ycoords[1]), /* A1: (0,1) */
+ fetch(xcoords[1], center_bytes.y), /* B1: (1,0) */
+ fetch(xcoords[1], ycoords[1]), /* D3: (1,1) */
+ fetch(xcoords[0], ycoords[1])); /* D2: (-1,1) */
+ patterns = (patterns + temp) * 0.5;
+ /* .x = (A0 + A1) / 2.0, .y = (B0 + B1) / 2.0 */
+ /* .z = (D0 + D3) / 2.0, .w = (D1 + D2) / 2.0 */
+ patterns.w = (patterns.z + patterns.w) * 0.5;
+ patterns.z = (patterns.x + patterns.y) * 0.5;
+
+ rgb = even_col ?
+ (even_row ?
+ vec3(C, patterns.zw) :
+ vec3(patterns.x, C, patterns.y)) :
+ (even_row ?
+ vec3(patterns.y, C, patterns.x) :
+ vec3(patterns.wz, C));
+
+ gl_FragColor = vec4(rgb, 1.0);
+}
diff --git a/src/qcam/assets/shader/bayer_8.frag b/src/qcam/assets/shader/bayer_8.frag
new file mode 100644
index 00000000..4ece44ab
--- /dev/null
+++ b/src/qcam/assets/shader/bayer_8.frag
@@ -0,0 +1,104 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+From http://jgt.akpeters.com/papers/McGuire08/
+
+Efficient, High-Quality Bayer Demosaic Filtering on GPUs
+
+Morgan McGuire
+
+This paper appears in issue Volume 13, Number 4.
+---------------------------------------------------------
+Copyright (c) 2008, Morgan McGuire. All rights reserved.
+
+Modified by Linaro Ltd to integrate it into libcamera.
+Copyright (C) 2021, Linaro
+*/
+
+//Pixel Shader
+
+/** Monochrome RGBA or GL_LUMINANCE Bayer encoded texture.*/
+uniform sampler2D tex_y;
+varying vec4 center;
+varying vec4 yCoord;
+varying vec4 xCoord;
+
+void main(void) {
+ #define fetch(x, y) texture2D(tex_y, vec2(x, y)).r
+
+ float C = texture2D(tex_y, center.xy).r; // ( 0, 0)
+ const vec4 kC = vec4( 4.0, 6.0, 5.0, 5.0) / 8.0;
+
+ // Determine which of four types of pixels we are on.
+ vec2 alternate = mod(floor(center.zw), 2.0);
+
+ vec4 Dvec = vec4(
+ fetch(xCoord[1], yCoord[1]), // (-1,-1)
+ fetch(xCoord[1], yCoord[2]), // (-1, 1)
+ fetch(xCoord[2], yCoord[1]), // ( 1,-1)
+ fetch(xCoord[2], yCoord[2])); // ( 1, 1)
+
+ vec4 PATTERN = (kC.xyz * C).xyzz;
+
+ // Can also be a dot product with (1,1,1,1) on hardware where that is
+ // specially optimized.
+ // Equivalent to: D = Dvec[0] + Dvec[1] + Dvec[2] + Dvec[3];
+ Dvec.xy += Dvec.zw;
+ Dvec.x += Dvec.y;
+
+ vec4 value = vec4(
+ fetch(center.x, yCoord[0]), // ( 0,-2)
+ fetch(center.x, yCoord[1]), // ( 0,-1)
+ fetch(xCoord[0], center.y), // (-2, 0)
+ fetch(xCoord[1], center.y)); // (-1, 0)
+
+ vec4 temp = vec4(
+ fetch(center.x, yCoord[3]), // ( 0, 2)
+ fetch(center.x, yCoord[2]), // ( 0, 1)
+ fetch(xCoord[3], center.y), // ( 2, 0)
+ fetch(xCoord[2], center.y)); // ( 1, 0)
+
+ // Even the simplest compilers should be able to constant-fold these to
+ // avoid the division.
+ // Note that on scalar processors these constants force computation of some
+ // identical products twice.
+ const vec4 kA = vec4(-1.0, -1.5, 0.5, -1.0) / 8.0;
+ const vec4 kB = vec4( 2.0, 0.0, 0.0, 4.0) / 8.0;
+ const vec4 kD = vec4( 0.0, 2.0, -1.0, -1.0) / 8.0;
+
+ // Conserve constant registers and take advantage of free swizzle on load
+ #define kE (kA.xywz)
+ #define kF (kB.xywz)
+
+ value += temp;
+
+ // There are five filter patterns (identity, cross, checker,
+ // theta, phi). Precompute the terms from all of them and then
+ // use swizzles to assign to color channels.
+ //
+ // Channel Matches
+ // x cross (e.g., EE G)
+ // y checker (e.g., EE B)
+ // z theta (e.g., EO R)
+ // w phi (e.g., EO R)
+ #define A (value[0])
+ #define B (value[1])
+ #define D (Dvec.x)
+ #define E (value[2])
+ #define F (value[3])
+
+ // Avoid zero elements. On a scalar processor this saves two MADDs
+ // and it has no effect on a vector processor.
+ PATTERN.yzw += (kD.yz * D).xyy;
+
+ PATTERN += (kA.xyz * A).xyzx + (kE.xyw * E).xyxz;
+ PATTERN.xw += kB.xw * B;
+ PATTERN.xz += kF.xz * F;
+
+ gl_FragColor.rgb = (alternate.y == 0.0) ?
+ ((alternate.x == 0.0) ?
+ vec3(C, PATTERN.xy) :
+ vec3(PATTERN.z, C, PATTERN.w)) :
+ ((alternate.x == 0.0) ?
+ vec3(PATTERN.w, C, PATTERN.z) :
+ vec3(PATTERN.yx, C));
+}
diff --git a/src/qcam/assets/shader/bayer_8.vert b/src/qcam/assets/shader/bayer_8.vert
new file mode 100644
index 00000000..3695a5e9
--- /dev/null
+++ b/src/qcam/assets/shader/bayer_8.vert
@@ -0,0 +1,51 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+From http://jgt.akpeters.com/papers/McGuire08/
+
+Efficient, High-Quality Bayer Demosaic Filtering on GPUs
+
+Morgan McGuire
+
+This paper appears in issue Volume 13, Number 4.
+---------------------------------------------------------
+Copyright (c) 2008, Morgan McGuire. All rights reserved.
+
+Modified by Linaro Ltd to integrate it into libcamera.
+Copyright (C) 2021, Linaro
+*/
+
+//Vertex Shader
+
+attribute vec4 vertexIn;
+attribute vec2 textureIn;
+
+uniform vec2 tex_size; /* The texture size in pixels */
+uniform vec2 tex_step;
+
+/** Pixel position of the first red pixel in the */
+/** Bayer pattern. [{0,1}, {0, 1}]*/
+uniform vec2 tex_bayer_first_red;
+
+/** .xy = Pixel being sampled in the fragment shader on the range [0, 1]
+ .zw = ...on the range [0, sourceSize], offset by firstRed */
+varying vec4 center;
+
+/** center.x + (-2/w, -1/w, 1/w, 2/w); These are the x-positions */
+/** of the adjacent pixels.*/
+varying vec4 xCoord;
+
+/** center.y + (-2/h, -1/h, 1/h, 2/h); These are the y-positions */
+/** of the adjacent pixels.*/
+varying vec4 yCoord;
+
+void main(void) {
+ center.xy = textureIn;
+ center.zw = textureIn * tex_size + tex_bayer_first_red;
+
+ xCoord = center.x + vec4(-2.0 * tex_step.x,
+ -tex_step.x, tex_step.x, 2.0 * tex_step.x);
+ yCoord = center.y + vec4(-2.0 * tex_step.y,
+ -tex_step.y, tex_step.y, 2.0 * tex_step.y);
+
+ gl_Position = vertexIn;
+}
diff --git a/src/qcam/assets/shader/NV_vertex_shader.glsl b/src/qcam/assets/shader/identity.vert
index 12e791e3..6d6f7551 100644
--- a/src/qcam/assets/shader/NV_vertex_shader.glsl
+++ b/src/qcam/assets/shader/identity.vert
@@ -2,7 +2,7 @@
/*
* Copyright (C) 2020, Linaro
*
- * NV_vertex_shader.glsl - Vertex shader code for NV family
+ * identity.vert - Identity vertex shader for pixel format conversion
*/
attribute vec4 vertexIn;
diff --git a/src/qcam/assets/shader/shaders.qrc b/src/qcam/assets/shader/shaders.qrc
index 33eab278..96c709f9 100644
--- a/src/qcam/assets/shader/shaders.qrc
+++ b/src/qcam/assets/shader/shaders.qrc
@@ -1,9 +1,13 @@
<!-- SPDX-License-Identifier: LGPL-2.1-or-later -->
<!DOCTYPE RCC><RCC version="1.0">
<qresource>
-<file>./NV_vertex_shader.glsl</file>
-<file>./NV_2_planes_UV_f.glsl</file>
-<file>./NV_2_planes_VU_f.glsl</file>
-<file>./NV_3_planes_f.glsl</file>
+ <file>RGB.frag</file>
+ <file>YUV_2_planes.frag</file>
+ <file>YUV_3_planes.frag</file>
+ <file>YUV_packed.frag</file>
+ <file>bayer_1x_packed.frag</file>
+ <file>bayer_8.frag</file>
+ <file>bayer_8.vert</file>
+ <file>identity.vert</file>
</qresource>
</RCC>
diff --git a/src/qcam/dng_writer.cpp b/src/qcam/dng_writer.cpp
index 030d1387..34c8df5a 100644
--- a/src/qcam/dng_writer.cpp
+++ b/src/qcam/dng_writer.cpp
@@ -15,6 +15,7 @@
#include <libcamera/control_ids.h>
#include <libcamera/formats.h>
+#include <libcamera/property_ids.h>
using namespace libcamera;
@@ -353,6 +354,8 @@ int DNGWriter::write(const char *filename, const Camera *camera,
[[maybe_unused]] const FrameBuffer *buffer,
const void *data)
{
+ const ControlList &cameraProperties = camera->properties();
+
const auto it = formatInfo.find(config.pixelFormat);
if (it == formatInfo.cend()) {
std::cerr << "Unsupported pixel format" << std::endl;
@@ -387,9 +390,13 @@ int DNGWriter::write(const char *filename, const Camera *camera,
TIFFSetField(tif, TIFFTAG_DNGBACKWARDVERSION, version);
TIFFSetField(tif, TIFFTAG_FILLORDER, FILLORDER_MSB2LSB);
TIFFSetField(tif, TIFFTAG_MAKE, "libcamera");
- /* \todo Report a real model string instead of id. */
- TIFFSetField(tif, TIFFTAG_MODEL, camera->id().c_str());
- TIFFSetField(tif, TIFFTAG_UNIQUECAMERAMODEL, camera->id().c_str());
+
+ if (cameraProperties.contains(properties::Model)) {
+ std::string model = cameraProperties.get(properties::Model);
+ TIFFSetField(tif, TIFFTAG_MODEL, model.c_str());
+ /* \todo set TIFFTAG_UNIQUECAMERAMODEL. */
+ }
+
TIFFSetField(tif, TIFFTAG_SOFTWARE, "qcam");
TIFFSetField(tif, TIFFTAG_ORIENTATION, ORIENTATION_TOPLEFT);
diff --git a/src/qcam/dng_writer.h b/src/qcam/dng_writer.h
index bf44c879..20905f37 100644
--- a/src/qcam/dng_writer.h
+++ b/src/qcam/dng_writer.h
@@ -10,9 +10,9 @@
#ifdef HAVE_TIFF
#define HAVE_DNG
-#include <libcamera/buffer.h>
#include <libcamera/camera.h>
#include <libcamera/controls.h>
+#include <libcamera/framebuffer.h>
#include <libcamera/stream.h>
using namespace libcamera;
diff --git a/src/qcam/main.cpp b/src/qcam/main.cpp
index f60d3cef..5eff90a3 100644
--- a/src/qcam/main.cpp
+++ b/src/qcam/main.cpp
@@ -2,7 +2,7 @@
/*
* Copyright (C) 2019, Google Inc.
*
- * main.cpp - cam - The libcamera swiss army knife
+ * main.cpp - qcam - The libcamera GUI test application
*/
#include <signal.h>
@@ -16,6 +16,7 @@
#include "../cam/options.h"
#include "../cam/stream_options.h"
#include "main_window.h"
+#include "message_handler.h"
void signalHandler([[maybe_unused]] int signal)
{
@@ -38,6 +39,8 @@ OptionsParser::Options parseOptions(int argc, char *argv[])
"renderer", ArgumentRequired, "renderer");
parser.addOption(OptStream, &streamKeyValue,
"Set configuration of a camera stream", "stream", true);
+ parser.addOption(OptVerbose, OptionNone,
+ "Print verbose log messages", "verbose");
OptionsParser::Options options = parser.parse(argc, argv);
if (options.isSet(OptHelp))
@@ -57,6 +60,8 @@ int main(int argc, char **argv)
if (options.isSet(OptHelp))
return 0;
+ MessageHandler msgHandler(options.isSet(OptVerbose));
+
struct sigaction sa = {};
sa.sa_handler = &signalHandler;
sigaction(SIGINT, &sa, nullptr);
diff --git a/src/qcam/main_window.cpp b/src/qcam/main_window.cpp
index ecb9dd66..39d034de 100644
--- a/src/qcam/main_window.cpp
+++ b/src/qcam/main_window.cpp
@@ -367,7 +367,6 @@ void MainWindow::toggleCapture(bool start)
int MainWindow::startCapture()
{
StreamRoles roles = StreamKeyValueParser::roles(options_[OptStream]);
- std::vector<Request *> requests;
int ret;
/* Verify roles are supported. */
@@ -486,7 +485,7 @@ int MainWindow::startCapture()
while (!freeBuffers_[vfStream_].isEmpty()) {
FrameBuffer *buffer = freeBuffers_[vfStream_].dequeue();
- Request *request = camera_->createRequest();
+ std::unique_ptr<Request> request = camera_->createRequest();
if (!request) {
qWarning() << "Can't create request";
ret = -ENOMEM;
@@ -499,7 +498,7 @@ int MainWindow::startCapture()
goto error;
}
- requests.push_back(request);
+ requests_.push_back(std::move(request));
}
/* Start the title timer and the camera. */
@@ -518,8 +517,8 @@ int MainWindow::startCapture()
camera_->requestCompleted.connect(this, &MainWindow::requestComplete);
/* Queue all requests. */
- for (Request *request : requests) {
- ret = camera_->queueRequest(request);
+ for (std::unique_ptr<Request> &request : requests_) {
+ ret = camera_->queueRequest(request.get());
if (ret < 0) {
qWarning() << "Can't queue request";
goto error_disconnect;
@@ -535,8 +534,7 @@ error_disconnect:
camera_->stop();
error:
- for (Request *request : requests)
- delete request;
+ requests_.clear();
for (auto &iter : mappedBuffers_) {
const MappedBuffer &buffer = iter.second;
@@ -580,6 +578,9 @@ void MainWindow::stopCapture()
}
mappedBuffers_.clear();
+ requests_.clear();
+ freeQueue_.clear();
+
delete allocator_;
isCapturing_ = false;
@@ -701,7 +702,7 @@ void MainWindow::requestComplete(Request *request)
*/
{
QMutexLocker locker(&mutex_);
- doneQueue_.enqueue({ request->buffers(), request->metadata() });
+ doneQueue_.enqueue(request);
}
QCoreApplication::postEvent(this, new CaptureEvent);
@@ -714,8 +715,7 @@ void MainWindow::processCapture()
* if stopCapture() has been called while a CaptureEvent was posted but
* not processed yet. Return immediately in that case.
*/
- CaptureRequest request;
-
+ Request *request;
{
QMutexLocker locker(&mutex_);
if (doneQueue_.isEmpty())
@@ -725,11 +725,15 @@ void MainWindow::processCapture()
}
/* Process buffers. */
- if (request.buffers_.count(vfStream_))
- processViewfinder(request.buffers_[vfStream_]);
+ if (request->buffers().count(vfStream_))
+ processViewfinder(request->buffers().at(vfStream_));
- if (request.buffers_.count(rawStream_))
- processRaw(request.buffers_[rawStream_], request.metadata_);
+ if (request->buffers().count(rawStream_))
+ processRaw(request->buffers().at(rawStream_), request->metadata());
+
+ request->reuse();
+ QMutexLocker locker(&mutex_);
+ freeQueue_.enqueue(request);
}
void MainWindow::processViewfinder(FrameBuffer *buffer)
@@ -742,7 +746,7 @@ void MainWindow::processViewfinder(FrameBuffer *buffer)
fps = lastBufferTime_ && fps ? 1000000000.0 / fps : 0.0;
lastBufferTime_ = metadata.timestamp;
- qInfo().noquote()
+ qDebug().noquote()
<< QString("seq: %1").arg(metadata.sequence, 6, 10, QLatin1Char('0'))
<< "bytesused:" << metadata.planes[0].bytesused
<< "timestamp:" << metadata.timestamp
@@ -754,25 +758,28 @@ void MainWindow::processViewfinder(FrameBuffer *buffer)
void MainWindow::queueRequest(FrameBuffer *buffer)
{
- Request *request = camera_->createRequest();
- if (!request) {
- qWarning() << "Can't create request";
- return;
+ Request *request;
+ {
+ QMutexLocker locker(&mutex_);
+ if (freeQueue_.isEmpty())
+ return;
+
+ request = freeQueue_.dequeue();
}
request->addBuffer(vfStream_, buffer);
if (captureRaw_) {
- FrameBuffer *buffer = nullptr;
+ FrameBuffer *rawBuffer = nullptr;
{
QMutexLocker locker(&mutex_);
if (!freeBuffers_[rawStream_].isEmpty())
- buffer = freeBuffers_[rawStream_].dequeue();
+ rawBuffer = freeBuffers_[rawStream_].dequeue();
}
- if (buffer) {
- request->addBuffer(rawStream_, buffer);
+ if (rawBuffer) {
+ request->addBuffer(rawStream_, rawBuffer);
captureRaw_ = false;
} else {
qWarning() << "No free buffer available for RAW capture";
diff --git a/src/qcam/main_window.h b/src/qcam/main_window.h
index 5c61a4df..85d56ce4 100644
--- a/src/qcam/main_window.h
+++ b/src/qcam/main_window.h
@@ -8,6 +8,7 @@
#define __QCAM_MAIN_WINDOW_H__
#include <memory>
+#include <vector>
#include <QElapsedTimer>
#include <QIcon>
@@ -17,11 +18,12 @@
#include <QQueue>
#include <QTimer>
-#include <libcamera/buffer.h>
#include <libcamera/camera.h>
#include <libcamera/camera_manager.h>
#include <libcamera/controls.h>
+#include <libcamera/framebuffer.h>
#include <libcamera/framebuffer_allocator.h>
+#include <libcamera/request.h>
#include <libcamera/stream.h>
#include "../cam/stream_options.h"
@@ -39,23 +41,7 @@ enum {
OptHelp = 'h',
OptRenderer = 'r',
OptStream = 's',
-};
-
-class CaptureRequest
-{
-public:
- CaptureRequest()
- {
- }
-
- CaptureRequest(const Request::BufferMap &buffers,
- const ControlList &metadata)
- : buffers_(buffers), metadata_(metadata)
- {
- }
-
- Request::BufferMap buffers_;
- ControlList metadata_;
+ OptVerbose = 'v',
};
class MainWindow : public QMainWindow
@@ -128,13 +114,16 @@ private:
Stream *vfStream_;
Stream *rawStream_;
std::map<const Stream *, QQueue<FrameBuffer *>> freeBuffers_;
- QQueue<CaptureRequest> doneQueue_;
- QMutex mutex_; /* Protects freeBuffers_ and doneQueue_ */
+ QQueue<Request *> doneQueue_;
+ QQueue<Request *> freeQueue_;
+ QMutex mutex_; /* Protects freeBuffers_, doneQueue_, and freeQueue_ */
uint64_t lastBufferTime_;
QElapsedTimer frameRateInterval_;
uint32_t previousFrames_;
uint32_t framesCaptured_;
+
+ std::vector<std::unique_ptr<Request>> requests_;
};
#endif /* __QCAM_MAIN_WINDOW__ */
diff --git a/src/qcam/meson.build b/src/qcam/meson.build
index 9bb48c0d..7d3621c9 100644
--- a/src/qcam/meson.build
+++ b/src/qcam/meson.build
@@ -1,11 +1,26 @@
# SPDX-License-Identifier: CC0-1.0
+qt5 = import('qt5')
+qt5_dep = dependency('qt5',
+ method : 'pkg-config',
+ modules : ['Core', 'Gui', 'Widgets'],
+ required : get_option('qcam'),
+ version : '>=5.4')
+
+if not qt5_dep.found()
+ qcam_enabled = false
+ subdir_done()
+endif
+
+qcam_enabled = true
+
qcam_sources = files([
'../cam/options.cpp',
'../cam/stream_options.cpp',
'format_converter.cpp',
'main.cpp',
'main_window.cpp',
+ 'message_handler.cpp',
'viewfinder_qt.cpp',
])
@@ -18,58 +33,50 @@ qcam_resources = files([
'assets/feathericons/feathericons.qrc',
])
-qt5 = import('qt5')
-qt5_dep = dependency('qt5',
- method : 'pkg-config',
- modules : ['Core', 'Gui', 'Widgets'],
- required : get_option('qcam'),
- version : '>=5.4')
+qcam_deps = [
+ libatomic,
+ libcamera_public,
+ qt5_dep,
+]
-if qt5_dep.found()
- qcam_deps = [
- libcamera_dep,
- qt5_dep,
- ]
+qt5_cpp_args = ['-DQT_NO_KEYWORDS']
- qt5_cpp_args = [ '-DQT_NO_KEYWORDS' ]
-
- tiff_dep = dependency('libtiff-4', required : false)
- if tiff_dep.found()
- qt5_cpp_args += [ '-DHAVE_TIFF' ]
- qcam_deps += [ tiff_dep ]
- qcam_sources += files([
- 'dng_writer.cpp',
- ])
- endif
+tiff_dep = dependency('libtiff-4', required : false)
+if tiff_dep.found()
+ qt5_cpp_args += ['-DHAVE_TIFF']
+ qcam_deps += [tiff_dep]
+ qcam_sources += files([
+ 'dng_writer.cpp',
+ ])
+endif
- if cxx.has_header_symbol('QOpenGLWidget', 'QOpenGLWidget',
- dependencies : qt5_dep, args : '-fPIC')
- qcam_sources += files([
- 'viewfinder_gl.cpp',
- ])
- qcam_moc_headers += files([
- 'viewfinder_gl.h',
- ])
- qcam_resources += files([
- 'assets/shader/shaders.qrc'
- ])
- endif
+if cxx.has_header_symbol('QOpenGLWidget', 'QOpenGLWidget',
+ dependencies : qt5_dep, args : '-fPIC')
+ qcam_sources += files([
+ 'viewfinder_gl.cpp',
+ ])
+ qcam_moc_headers += files([
+ 'viewfinder_gl.h',
+ ])
+ qcam_resources += files([
+ 'assets/shader/shaders.qrc'
+ ])
+endif
- # gcc 9 introduced a deprecated-copy warning that is triggered by Qt until
- # Qt 5.13. clang 10 introduced the same warning, but detects more issues
- # that are not fixed in Qt yet. Disable the warning manually in both cases.
- if ((cc.get_id() == 'gcc' and cc.version().version_compare('>=9.0') and
- qt5_dep.version().version_compare('<5.13')) or
- (cc.get_id() == 'clang' and cc.version().version_compare('>=10.0')))
- qt5_cpp_args += [ '-Wno-deprecated-copy' ]
- endif
+# gcc 9 introduced a deprecated-copy warning that is triggered by Qt until
+# Qt 5.13. clang 10 introduced the same warning, but detects more issues
+# that are not fixed in Qt yet. Disable the warning manually in both cases.
+if ((cc.get_id() == 'gcc' and cc.version().version_compare('>=9.0') and
+ qt5_dep.version().version_compare('<5.13')) or
+ (cc.get_id() == 'clang' and cc.version().version_compare('>=10.0')))
+ qt5_cpp_args += ['-Wno-deprecated-copy']
+endif
- resources = qt5.preprocess(moc_headers: qcam_moc_headers,
- qresources : qcam_resources,
- dependencies: qt5_dep)
+resources = qt5.preprocess(moc_headers: qcam_moc_headers,
+ qresources : qcam_resources,
+ dependencies: qt5_dep)
- qcam = executable('qcam', qcam_sources, resources,
- install : true,
- dependencies : qcam_deps,
- cpp_args : qt5_cpp_args)
-endif
+qcam = executable('qcam', qcam_sources, resources,
+ install : true,
+ dependencies : qcam_deps,
+ cpp_args : qt5_cpp_args)
diff --git a/src/qcam/message_handler.cpp b/src/qcam/message_handler.cpp
new file mode 100644
index 00000000..261623e1
--- /dev/null
+++ b/src/qcam/message_handler.cpp
@@ -0,0 +1,27 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2020, Laurent Pinchart <laurent.pinchart@ideasonboard.com>
+ *
+ * message_handler.cpp - qcam - Log message handling
+ */
+
+#include "message_handler.h"
+
+QtMessageHandler MessageHandler::handler_ = nullptr;
+bool MessageHandler::verbose_ = false;
+
+MessageHandler::MessageHandler(bool verbose)
+{
+ verbose_ = verbose;
+ handler_ = qInstallMessageHandler(&MessageHandler::handleMessage);
+}
+
+void MessageHandler::handleMessage(QtMsgType type,
+ const QMessageLogContext &context,
+ const QString &msg)
+{
+ if (type == QtDebugMsg && !verbose_)
+ return;
+
+ handler_(type, context, msg);
+}
diff --git a/src/qcam/message_handler.h b/src/qcam/message_handler.h
new file mode 100644
index 00000000..4534db9d
--- /dev/null
+++ b/src/qcam/message_handler.h
@@ -0,0 +1,26 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2020, Laurent Pinchart <laurent.pinchart@ideasonboard.com>
+ *
+ * message_handler.cpp - qcam - Log message handling
+ */
+#ifndef __QCAM_MESSAGE_HANDLER_H__
+#define __QCAM_MESSAGE_HANDLER_H__
+
+#include <QtGlobal>
+
+class MessageHandler
+{
+public:
+ MessageHandler(bool verbose);
+
+private:
+ static void handleMessage(QtMsgType type,
+ const QMessageLogContext &context,
+ const QString &msg);
+
+ static QtMessageHandler handler_;
+ static bool verbose_;
+};
+
+#endif /* __QCAM_MESSAGE_HANDLER_H__ */
diff --git a/src/qcam/viewfinder.h b/src/qcam/viewfinder.h
index 67da1df2..46747c22 100644
--- a/src/qcam/viewfinder.h
+++ b/src/qcam/viewfinder.h
@@ -11,8 +11,8 @@
#include <QList>
#include <QSize>
-#include <libcamera/buffer.h>
#include <libcamera/formats.h>
+#include <libcamera/framebuffer.h>
struct MappedBuffer {
void *memory;
@@ -22,7 +22,7 @@ struct MappedBuffer {
class ViewFinder
{
public:
- virtual ~ViewFinder() {}
+ virtual ~ViewFinder() = default;
virtual const QList<libcamera::PixelFormat> &nativeFormats() const = 0;
diff --git a/src/qcam/viewfinder_gl.cpp b/src/qcam/viewfinder_gl.cpp
index fbe21dcf..e7c8620c 100644
--- a/src/qcam/viewfinder_gl.cpp
+++ b/src/qcam/viewfinder_gl.cpp
@@ -7,28 +7,55 @@
#include "viewfinder_gl.h"
+#include <QByteArray>
+#include <QFile>
#include <QImage>
#include <libcamera/formats.h>
static const QList<libcamera::PixelFormat> supportedFormats{
+ /* YUV - packed (single plane) */
+ libcamera::formats::UYVY,
+ libcamera::formats::VYUY,
+ libcamera::formats::YUYV,
+ libcamera::formats::YVYU,
+ /* YUV - semi planar (two planes) */
libcamera::formats::NV12,
libcamera::formats::NV21,
libcamera::formats::NV16,
libcamera::formats::NV61,
libcamera::formats::NV24,
libcamera::formats::NV42,
+ /* YUV - fully planar (three planes) */
libcamera::formats::YUV420,
libcamera::formats::YVU420,
+ /* RGB */
+ libcamera::formats::ABGR8888,
+ libcamera::formats::ARGB8888,
+ libcamera::formats::BGRA8888,
+ libcamera::formats::RGBA8888,
+ libcamera::formats::BGR888,
+ libcamera::formats::RGB888,
+ /* Raw Bayer 8-bit */
+ libcamera::formats::SBGGR8,
+ libcamera::formats::SGBRG8,
+ libcamera::formats::SGRBG8,
+ libcamera::formats::SRGGB8,
+ /* Raw Bayer 10-bit packed */
+ libcamera::formats::SBGGR10_CSI2P,
+ libcamera::formats::SGBRG10_CSI2P,
+ libcamera::formats::SGRBG10_CSI2P,
+ libcamera::formats::SRGGB10_CSI2P,
+ /* Raw Bayer 12-bit packed */
+ libcamera::formats::SBGGR12_CSI2P,
+ libcamera::formats::SGBRG12_CSI2P,
+ libcamera::formats::SGRBG12_CSI2P,
+ libcamera::formats::SRGGB12_CSI2P,
};
ViewFinderGL::ViewFinderGL(QWidget *parent)
- : QOpenGLWidget(parent), buffer_(nullptr), yuvData_(nullptr),
- fragmentShader_(nullptr), vertexShader_(nullptr),
- vertexBuffer_(QOpenGLBuffer::VertexBuffer),
- textureU_(QOpenGLTexture::Target2D),
- textureV_(QOpenGLTexture::Target2D),
- textureY_(QOpenGLTexture::Target2D)
+ : QOpenGLWidget(parent), buffer_(nullptr), data_(nullptr),
+ vertexBuffer_(QOpenGLBuffer::VertexBuffer)
{
}
@@ -45,19 +72,23 @@ const QList<libcamera::PixelFormat> &ViewFinderGL::nativeFormats() const
int ViewFinderGL::setFormat(const libcamera::PixelFormat &format,
const QSize &size)
{
- /* If the fragment is created remove it and create a new one. */
- if (fragmentShader_) {
+ if (format != format_) {
+ /*
+ * If the fragment already exists, remove it and create a new
+ * one for the new format.
+ */
if (shaderProgram_.isLinked()) {
shaderProgram_.release();
- shaderProgram_.removeShader(fragmentShader_);
- delete fragmentShader_;
+ shaderProgram_.removeShader(fragmentShader_.get());
+ fragmentShader_.reset();
}
- }
- if (!selectFormat(format))
- return -1;
+ if (!selectFormat(format))
+ return -1;
+
+ format_ = format;
+ }
- format_ = format;
size_ = size;
updateGeometry();
@@ -89,7 +120,11 @@ void ViewFinderGL::render(libcamera::FrameBuffer *buffer, MappedBuffer *map)
if (buffer_)
renderComplete(buffer_);
- yuvData_ = static_cast<unsigned char *>(map->memory);
+ data_ = static_cast<unsigned char *>(map->memory);
+ /*
+ * \todo Get the stride from the buffer instead of computing it naively
+ */
+ stride_ = buffer->metadata().planes[0].bytesused / size_.height();
update();
buffer_ = buffer;
}
@@ -97,54 +132,185 @@ void ViewFinderGL::render(libcamera::FrameBuffer *buffer, MappedBuffer *map)
bool ViewFinderGL::selectFormat(const libcamera::PixelFormat &format)
{
bool ret = true;
+
+ /* Set min/mag filters to GL_LINEAR by default. */
+ textureMinMagFilters_ = GL_LINEAR;
+
+ /* Use identity.vert as the default vertex shader. */
+ vertexShaderFile_ = ":identity.vert";
+
+ fragmentShaderDefines_.clear();
+
switch (format) {
case libcamera::formats::NV12:
horzSubSample_ = 2;
vertSubSample_ = 2;
- vertexShaderSrc_ = ":NV_vertex_shader.glsl";
- fragmentShaderSrc_ = ":NV_2_planes_UV_f.glsl";
+ fragmentShaderDefines_.append("#define YUV_PATTERN_UV");
+ fragmentShaderFile_ = ":YUV_2_planes.frag";
break;
case libcamera::formats::NV21:
horzSubSample_ = 2;
vertSubSample_ = 2;
- vertexShaderSrc_ = ":NV_vertex_shader.glsl";
- fragmentShaderSrc_ = ":NV_2_planes_VU_f.glsl";
+ fragmentShaderDefines_.append("#define YUV_PATTERN_VU");
+ fragmentShaderFile_ = ":YUV_2_planes.frag";
break;
case libcamera::formats::NV16:
horzSubSample_ = 2;
vertSubSample_ = 1;
- vertexShaderSrc_ = ":NV_vertex_shader.glsl";
- fragmentShaderSrc_ = ":NV_2_planes_UV_f.glsl";
+ fragmentShaderDefines_.append("#define YUV_PATTERN_UV");
+ fragmentShaderFile_ = ":YUV_2_planes.frag";
break;
case libcamera::formats::NV61:
horzSubSample_ = 2;
vertSubSample_ = 1;
- vertexShaderSrc_ = ":NV_vertex_shader.glsl";
- fragmentShaderSrc_ = ":NV_2_planes_VU_f.glsl";
+ fragmentShaderDefines_.append("#define YUV_PATTERN_VU");
+ fragmentShaderFile_ = ":YUV_2_planes.frag";
break;
case libcamera::formats::NV24:
horzSubSample_ = 1;
vertSubSample_ = 1;
- vertexShaderSrc_ = ":NV_vertex_shader.glsl";
- fragmentShaderSrc_ = ":NV_2_planes_UV_f.glsl";
+ fragmentShaderDefines_.append("#define YUV_PATTERN_UV");
+ fragmentShaderFile_ = ":YUV_2_planes.frag";
break;
case libcamera::formats::NV42:
horzSubSample_ = 1;
vertSubSample_ = 1;
- vertexShaderSrc_ = ":NV_vertex_shader.glsl";
- fragmentShaderSrc_ = ":NV_2_planes_VU_f.glsl";
+ fragmentShaderDefines_.append("#define YUV_PATTERN_VU");
+ fragmentShaderFile_ = ":YUV_2_planes.frag";
break;
case libcamera::formats::YUV420:
horzSubSample_ = 2;
vertSubSample_ = 2;
- vertexShaderSrc_ = ":NV_vertex_shader.glsl";
- fragmentShaderSrc_ = ":NV_3_planes_f.glsl";
+ fragmentShaderFile_ = ":YUV_3_planes.frag";
break;
case libcamera::formats::YVU420:
horzSubSample_ = 2;
vertSubSample_ = 2;
- vertexShaderSrc_ = ":NV_vertex_shader.glsl";
- fragmentShaderSrc_ = ":NV_3_planes_f.glsl";
+ fragmentShaderFile_ = ":YUV_3_planes.frag";
+ break;
+ case libcamera::formats::UYVY:
+ fragmentShaderDefines_.append("#define YUV_PATTERN_UYVY");
+ fragmentShaderFile_ = ":YUV_packed.frag";
+ break;
+ case libcamera::formats::VYUY:
+ fragmentShaderDefines_.append("#define YUV_PATTERN_VYUY");
+ fragmentShaderFile_ = ":YUV_packed.frag";
+ break;
+ case libcamera::formats::YUYV:
+ fragmentShaderDefines_.append("#define YUV_PATTERN_YUYV");
+ fragmentShaderFile_ = ":YUV_packed.frag";
+ break;
+ case libcamera::formats::YVYU:
+ fragmentShaderDefines_.append("#define YUV_PATTERN_YVYU");
+ fragmentShaderFile_ = ":YUV_packed.frag";
+ break;
+ case libcamera::formats::ABGR8888:
+ fragmentShaderDefines_.append("#define RGB_PATTERN rgb");
+ fragmentShaderFile_ = ":RGB.frag";
+ break;
+ case libcamera::formats::ARGB8888:
+ fragmentShaderDefines_.append("#define RGB_PATTERN bgr");
+ fragmentShaderFile_ = ":RGB.frag";
+ break;
+ case libcamera::formats::BGRA8888:
+ fragmentShaderDefines_.append("#define RGB_PATTERN gba");
+ fragmentShaderFile_ = ":RGB.frag";
+ break;
+ case libcamera::formats::RGBA8888:
+ fragmentShaderDefines_.append("#define RGB_PATTERN abg");
+ fragmentShaderFile_ = ":RGB.frag";
+ break;
+ case libcamera::formats::BGR888:
+ fragmentShaderDefines_.append("#define RGB_PATTERN rgb");
+ fragmentShaderFile_ = ":RGB.frag";
+ break;
+ case libcamera::formats::RGB888:
+ fragmentShaderDefines_.append("#define RGB_PATTERN bgr");
+ fragmentShaderFile_ = ":RGB.frag";
+ break;
+ case libcamera::formats::SBGGR8:
+ firstRed_.setX(1.0);
+ firstRed_.setY(1.0);
+ vertexShaderFile_ = ":bayer_8.vert";
+ fragmentShaderFile_ = ":bayer_8.frag";
+ textureMinMagFilters_ = GL_NEAREST;
+ break;
+ case libcamera::formats::SGBRG8:
+ firstRed_.setX(0.0);
+ firstRed_.setY(1.0);
+ vertexShaderFile_ = ":bayer_8.vert";
+ fragmentShaderFile_ = ":bayer_8.frag";
+ textureMinMagFilters_ = GL_NEAREST;
+ break;
+ case libcamera::formats::SGRBG8:
+ firstRed_.setX(1.0);
+ firstRed_.setY(0.0);
+ vertexShaderFile_ = ":bayer_8.vert";
+ fragmentShaderFile_ = ":bayer_8.frag";
+ textureMinMagFilters_ = GL_NEAREST;
+ break;
+ case libcamera::formats::SRGGB8:
+ firstRed_.setX(0.0);
+ firstRed_.setY(0.0);
+ vertexShaderFile_ = ":bayer_8.vert";
+ fragmentShaderFile_ = ":bayer_8.frag";
+ textureMinMagFilters_ = GL_NEAREST;
+ break;
+ case libcamera::formats::SBGGR10_CSI2P:
+ firstRed_.setX(1.0);
+ firstRed_.setY(1.0);
+ fragmentShaderDefines_.append("#define RAW10P");
+ fragmentShaderFile_ = ":bayer_1x_packed.frag";
+ textureMinMagFilters_ = GL_NEAREST;
+ break;
+ case libcamera::formats::SGBRG10_CSI2P:
+ firstRed_.setX(0.0);
+ firstRed_.setY(1.0);
+ fragmentShaderDefines_.append("#define RAW10P");
+ fragmentShaderFile_ = ":bayer_1x_packed.frag";
+ textureMinMagFilters_ = GL_NEAREST;
+ break;
+ case libcamera::formats::SGRBG10_CSI2P:
+ firstRed_.setX(1.0);
+ firstRed_.setY(0.0);
+ fragmentShaderDefines_.append("#define RAW10P");
+ fragmentShaderFile_ = ":bayer_1x_packed.frag";
+ textureMinMagFilters_ = GL_NEAREST;
+ break;
+ case libcamera::formats::SRGGB10_CSI2P:
+ firstRed_.setX(0.0);
+ firstRed_.setY(0.0);
+ fragmentShaderDefines_.append("#define RAW10P");
+ fragmentShaderFile_ = ":bayer_1x_packed.frag";
+ textureMinMagFilters_ = GL_NEAREST;
+ break;
+ case libcamera::formats::SBGGR12_CSI2P:
+ firstRed_.setX(1.0);
+ firstRed_.setY(1.0);
+ fragmentShaderDefines_.append("#define RAW12P");
+ fragmentShaderFile_ = ":bayer_1x_packed.frag";
+ textureMinMagFilters_ = GL_NEAREST;
+ break;
+ case libcamera::formats::SGBRG12_CSI2P:
+ firstRed_.setX(0.0);
+ firstRed_.setY(1.0);
+ fragmentShaderDefines_.append("#define RAW12P");
+ fragmentShaderFile_ = ":bayer_1x_packed.frag";
+ textureMinMagFilters_ = GL_NEAREST;
+ break;
+ case libcamera::formats::SGRBG12_CSI2P:
+ firstRed_.setX(1.0);
+ firstRed_.setY(0.0);
+ fragmentShaderDefines_.append("#define RAW12P");
+ fragmentShaderFile_ = ":bayer_1x_packed.frag";
+ textureMinMagFilters_ = GL_NEAREST;
+ break;
+ case libcamera::formats::SRGGB12_CSI2P:
+ firstRed_.setX(0.0);
+ firstRed_.setY(0.0);
+ fragmentShaderDefines_.append("#define RAW12P");
+ fragmentShaderFile_ = ":bayer_1x_packed.frag";
+ textureMinMagFilters_ = GL_NEAREST;
break;
default:
ret = false;
@@ -159,15 +325,15 @@ bool ViewFinderGL::selectFormat(const libcamera::PixelFormat &format)
bool ViewFinderGL::createVertexShader()
{
/* Create Vertex Shader */
- vertexShader_ = new QOpenGLShader(QOpenGLShader::Vertex, this);
+ vertexShader_ = std::make_unique<QOpenGLShader>(QOpenGLShader::Vertex, this);
/* Compile the vertex shader */
- if (!vertexShader_->compileSourceFile(vertexShaderSrc_)) {
+ if (!vertexShader_->compileSourceFile(vertexShaderFile_)) {
qWarning() << "[ViewFinderGL]:" << vertexShader_->log();
return false;
}
- shaderProgram_.addShader(vertexShader_);
+ shaderProgram_.addShader(vertexShader_.get());
return true;
}
@@ -176,16 +342,29 @@ bool ViewFinderGL::createFragmentShader()
int attributeVertex;
int attributeTexture;
- /* Create Fragment Shader */
- fragmentShader_ = new QOpenGLShader(QOpenGLShader::Fragment, this);
+ /*
+ * Create the fragment shader, compile it, and add it to the shader
+ * program. The #define macros stored in fragmentShaderDefines_, if
+ * any, are prepended to the source code.
+ */
+ fragmentShader_ = std::make_unique<QOpenGLShader>(QOpenGLShader::Fragment, this);
- /* Compile the fragment shader */
- if (!fragmentShader_->compileSourceFile(fragmentShaderSrc_)) {
+ QFile file(fragmentShaderFile_);
+ if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) {
+ qWarning() << "Shader" << fragmentShaderFile_ << "not found";
+ return false;
+ }
+
+ QString defines = fragmentShaderDefines_.join('\n') + "\n";
+ QByteArray src = file.readAll();
+ src.prepend(defines.toUtf8());
+
+ if (!fragmentShader_->compileSourceCode(src)) {
qWarning() << "[ViewFinderGL]:" << fragmentShader_->log();
return false;
}
- shaderProgram_.addShader(fragmentShader_);
+ shaderProgram_.addShader(fragmentShader_.get());
/* Link shader pipeline */
if (!shaderProgram_.link()) {
@@ -219,27 +398,29 @@ bool ViewFinderGL::createFragmentShader()
textureUniformY_ = shaderProgram_.uniformLocation("tex_y");
textureUniformU_ = shaderProgram_.uniformLocation("tex_u");
textureUniformV_ = shaderProgram_.uniformLocation("tex_v");
+ textureUniformStep_ = shaderProgram_.uniformLocation("tex_step");
+ textureUniformSize_ = shaderProgram_.uniformLocation("tex_size");
+ textureUniformBayerFirstRed_ = shaderProgram_.uniformLocation("tex_bayer_first_red");
- if (!textureY_.isCreated())
- textureY_.create();
+ /* Create the textures. */
+ for (std::unique_ptr<QOpenGLTexture> &texture : textures_) {
+ if (texture)
+ continue;
- if (!textureU_.isCreated())
- textureU_.create();
-
- if (!textureV_.isCreated())
- textureV_.create();
+ texture = std::make_unique<QOpenGLTexture>(QOpenGLTexture::Target2D);
+ texture->create();
+ }
- id_y_ = textureY_.textureId();
- id_u_ = textureU_.textureId();
- id_v_ = textureV_.textureId();
return true;
}
-void ViewFinderGL::configureTexture(unsigned int id)
+void ViewFinderGL::configureTexture(QOpenGLTexture &texture)
{
- glBindTexture(GL_TEXTURE_2D, id);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glBindTexture(GL_TEXTURE_2D, texture.textureId());
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
+ textureMinMagFilters_);
+ glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
+ textureMinMagFilters_);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
@@ -250,12 +431,6 @@ void ViewFinderGL::removeShader()
shaderProgram_.release();
shaderProgram_.removeAllShaders();
}
-
- if (fragmentShader_)
- delete fragmentShader_;
-
- if (vertexShader_)
- delete vertexShader_;
}
void ViewFinderGL::initializeGL()
@@ -303,7 +478,7 @@ void ViewFinderGL::doRender()
case libcamera::formats::NV42:
/* Activate texture Y */
glActiveTexture(GL_TEXTURE0);
- configureTexture(id_y_);
+ configureTexture(*textures_[0]);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RED,
@@ -312,12 +487,12 @@ void ViewFinderGL::doRender()
0,
GL_RED,
GL_UNSIGNED_BYTE,
- yuvData_);
+ data_);
shaderProgram_.setUniformValue(textureUniformY_, 0);
/* Activate texture UV/VU */
glActiveTexture(GL_TEXTURE1);
- configureTexture(id_u_);
+ configureTexture(*textures_[1]);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RG,
@@ -326,14 +501,14 @@ void ViewFinderGL::doRender()
0,
GL_RG,
GL_UNSIGNED_BYTE,
- (char *)yuvData_ + size_.width() * size_.height());
+ data_ + size_.width() * size_.height());
shaderProgram_.setUniformValue(textureUniformU_, 1);
break;
case libcamera::formats::YUV420:
/* Activate texture Y */
glActiveTexture(GL_TEXTURE0);
- configureTexture(id_y_);
+ configureTexture(*textures_[0]);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RED,
@@ -342,12 +517,12 @@ void ViewFinderGL::doRender()
0,
GL_RED,
GL_UNSIGNED_BYTE,
- yuvData_);
+ data_);
shaderProgram_.setUniformValue(textureUniformY_, 0);
/* Activate texture U */
glActiveTexture(GL_TEXTURE1);
- configureTexture(id_u_);
+ configureTexture(*textures_[1]);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RED,
@@ -356,12 +531,12 @@ void ViewFinderGL::doRender()
0,
GL_RED,
GL_UNSIGNED_BYTE,
- (char *)yuvData_ + size_.width() * size_.height());
+ data_ + size_.width() * size_.height());
shaderProgram_.setUniformValue(textureUniformU_, 1);
/* Activate texture V */
glActiveTexture(GL_TEXTURE2);
- configureTexture(id_v_);
+ configureTexture(*textures_[2]);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RED,
@@ -370,14 +545,14 @@ void ViewFinderGL::doRender()
0,
GL_RED,
GL_UNSIGNED_BYTE,
- (char *)yuvData_ + size_.width() * size_.height() * 5 / 4);
+ data_ + size_.width() * size_.height() * 5 / 4);
shaderProgram_.setUniformValue(textureUniformV_, 2);
break;
case libcamera::formats::YVU420:
/* Activate texture Y */
glActiveTexture(GL_TEXTURE0);
- configureTexture(id_y_);
+ configureTexture(*textures_[0]);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RED,
@@ -386,12 +561,12 @@ void ViewFinderGL::doRender()
0,
GL_RED,
GL_UNSIGNED_BYTE,
- yuvData_);
+ data_);
shaderProgram_.setUniformValue(textureUniformY_, 0);
/* Activate texture V */
glActiveTexture(GL_TEXTURE2);
- configureTexture(id_v_);
+ configureTexture(*textures_[2]);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RED,
@@ -400,12 +575,12 @@ void ViewFinderGL::doRender()
0,
GL_RED,
GL_UNSIGNED_BYTE,
- (char *)yuvData_ + size_.width() * size_.height());
+ data_ + size_.width() * size_.height());
shaderProgram_.setUniformValue(textureUniformV_, 2);
/* Activate texture U */
glActiveTexture(GL_TEXTURE1);
- configureTexture(id_u_);
+ configureTexture(*textures_[1]);
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RED,
@@ -414,10 +589,116 @@ void ViewFinderGL::doRender()
0,
GL_RED,
GL_UNSIGNED_BYTE,
- (char *)yuvData_ + size_.width() * size_.height() * 5 / 4);
+ data_ + size_.width() * size_.height() * 5 / 4);
shaderProgram_.setUniformValue(textureUniformU_, 1);
break;
+ case libcamera::formats::UYVY:
+ case libcamera::formats::VYUY:
+ case libcamera::formats::YUYV:
+ case libcamera::formats::YVYU:
+ /*
+ * Packed YUV formats are stored in a RGBA texture to match the
+ * OpenGL texel size with the 4 bytes repeating pattern in YUV.
+ * The texture width is thus half of the image with.
+ */
+ glActiveTexture(GL_TEXTURE0);
+ configureTexture(*textures_[0]);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ GL_RGBA,
+ size_.width() / 2,
+ size_.height(),
+ 0,
+ GL_RGBA,
+ GL_UNSIGNED_BYTE,
+ data_);
+ shaderProgram_.setUniformValue(textureUniformY_, 0);
+
+ /*
+ * The shader needs the step between two texture pixels in the
+ * horizontal direction, expressed in texture coordinate units
+ * ([0, 1]). There are exactly width - 1 steps between the
+ * leftmost and rightmost texels.
+ */
+ shaderProgram_.setUniformValue(textureUniformStep_,
+ 1.0f / (size_.width() / 2 - 1),
+ 1.0f /* not used */);
+ break;
+
+ case libcamera::formats::ABGR8888:
+ case libcamera::formats::ARGB8888:
+ case libcamera::formats::BGRA8888:
+ case libcamera::formats::RGBA8888:
+ glActiveTexture(GL_TEXTURE0);
+ configureTexture(*textures_[0]);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ GL_RGBA,
+ size_.width(),
+ size_.height(),
+ 0,
+ GL_RGBA,
+ GL_UNSIGNED_BYTE,
+ data_);
+ shaderProgram_.setUniformValue(textureUniformY_, 0);
+ break;
+
+ case libcamera::formats::BGR888:
+ case libcamera::formats::RGB888:
+ glActiveTexture(GL_TEXTURE0);
+ configureTexture(*textures_[0]);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ GL_RGB,
+ size_.width(),
+ size_.height(),
+ 0,
+ GL_RGB,
+ GL_UNSIGNED_BYTE,
+ data_);
+ shaderProgram_.setUniformValue(textureUniformY_, 0);
+ break;
+
+ case libcamera::formats::SBGGR8:
+ case libcamera::formats::SGBRG8:
+ case libcamera::formats::SGRBG8:
+ case libcamera::formats::SRGGB8:
+ case libcamera::formats::SBGGR10_CSI2P:
+ case libcamera::formats::SGBRG10_CSI2P:
+ case libcamera::formats::SGRBG10_CSI2P:
+ case libcamera::formats::SRGGB10_CSI2P:
+ case libcamera::formats::SBGGR12_CSI2P:
+ case libcamera::formats::SGBRG12_CSI2P:
+ case libcamera::formats::SGRBG12_CSI2P:
+ case libcamera::formats::SRGGB12_CSI2P:
+ /*
+ * Raw Bayer 8-bit, and packed raw Bayer 10-bit/12-bit formats
+ * are stored in GL_RED texture.
+ * The texture width is equal to the stride.
+ */
+ glActiveTexture(GL_TEXTURE0);
+ configureTexture(*textures_[0]);
+ glTexImage2D(GL_TEXTURE_2D,
+ 0,
+ GL_RED,
+ stride_,
+ size_.height(),
+ 0,
+ GL_RED,
+ GL_UNSIGNED_BYTE,
+ data_);
+ shaderProgram_.setUniformValue(textureUniformY_, 0);
+ shaderProgram_.setUniformValue(textureUniformBayerFirstRed_,
+ firstRed_);
+ shaderProgram_.setUniformValue(textureUniformSize_,
+ size_.width(), /* in pixels */
+ size_.height());
+ shaderProgram_.setUniformValue(textureUniformStep_,
+ 1.0f / (stride_ - 1),
+ 1.0f / (size_.height() - 1));
+ break;
+
default:
break;
};
@@ -431,7 +712,7 @@ void ViewFinderGL::paintGL()
<< "create fragment shader failed.";
}
- if (yuvData_) {
+ if (data_) {
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
diff --git a/src/qcam/viewfinder_gl.h b/src/qcam/viewfinder_gl.h
index 69502b7a..4a0f8ca5 100644
--- a/src/qcam/viewfinder_gl.h
+++ b/src/qcam/viewfinder_gl.h
@@ -8,6 +8,9 @@
#ifndef __VIEWFINDER_GL_H__
#define __VIEWFINDER_GL_H__
+#include <array>
+#include <memory>
+
#include <QImage>
#include <QMutex>
#include <QOpenGLBuffer>
@@ -18,8 +21,8 @@
#include <QOpenGLWidget>
#include <QSize>
-#include <libcamera/buffer.h>
#include <libcamera/formats.h>
+#include <libcamera/framebuffer.h>
#include "viewfinder.h"
@@ -53,7 +56,7 @@ protected:
private:
bool selectFormat(const libcamera::PixelFormat &format);
- void configureTexture(unsigned int id);
+ void configureTexture(QOpenGLTexture &texture);
bool createFragmentShader();
bool createVertexShader();
void removeShader();
@@ -63,33 +66,39 @@ private:
libcamera::FrameBuffer *buffer_;
libcamera::PixelFormat format_;
QSize size_;
- unsigned char *yuvData_;
+ unsigned int stride_;
+ unsigned char *data_;
- /* OpenGL components for rendering */
- QOpenGLShader *fragmentShader_;
- QOpenGLShader *vertexShader_;
+ /* Shaders */
QOpenGLShaderProgram shaderProgram_;
+ std::unique_ptr<QOpenGLShader> vertexShader_;
+ std::unique_ptr<QOpenGLShader> fragmentShader_;
+ QString vertexShaderFile_;
+ QString fragmentShaderFile_;
+ QStringList fragmentShaderDefines_;
/* Vertex buffer */
QOpenGLBuffer vertexBuffer_;
- /* Fragment and Vertex shader file name */
- QString fragmentShaderSrc_;
- QString vertexShaderSrc_;
+ /* Textures */
+ std::array<std::unique_ptr<QOpenGLTexture>, 3> textures_;
+
+ /* Common texture parameters */
+ GLuint textureMinMagFilters_;
- /* YUV texture planars and parameters */
- GLuint id_u_;
- GLuint id_v_;
- GLuint id_y_;
+ /* YUV texture parameters */
GLuint textureUniformU_;
GLuint textureUniformV_;
GLuint textureUniformY_;
- QOpenGLTexture textureU_;
- QOpenGLTexture textureV_;
- QOpenGLTexture textureY_;
+ GLuint textureUniformStep_;
unsigned int horzSubSample_;
unsigned int vertSubSample_;
+ /* Raw Bayer texture parameters */
+ GLuint textureUniformSize_;
+ GLuint textureUniformBayerFirstRed_;
+ QPointF firstRed_;
+
QMutex mutex_; /* Prevent concurrent access to image_ */
};
diff --git a/src/qcam/viewfinder_qt.h b/src/qcam/viewfinder_qt.h
index d7554288..501c72a7 100644
--- a/src/qcam/viewfinder_qt.h
+++ b/src/qcam/viewfinder_qt.h
@@ -14,8 +14,8 @@
#include <QSize>
#include <QWidget>
-#include <libcamera/buffer.h>
#include <libcamera/formats.h>
+#include <libcamera/framebuffer.h>
#include <libcamera/pixel_format.h>
#include "format_converter.h"
diff --git a/src/v4l2/meson.build b/src/v4l2/meson.build
index e3838f0b..f78497b6 100644
--- a/src/v4l2/meson.build
+++ b/src/v4l2/meson.build
@@ -1,5 +1,12 @@
# SPDX-License-Identifier: CC0-1.0
+if not get_option('v4l2')
+ v4l2_enabled = false
+ subdir_done()
+endif
+
+v4l2_enabled = true
+
v4l2_compat_sources = files([
'v4l2_camera.cpp',
'v4l2_camera_file.cpp',
@@ -24,5 +31,5 @@ v4l2_compat = shared_library('v4l2-compat',
v4l2_compat_sources,
name_prefix : '',
install : true,
- dependencies : [ libcamera_dep, libdl ],
+ dependencies : [libcamera_private, libdl],
cpp_args : v4l2_compat_cpp_args)
diff --git a/src/v4l2/v4l2_camera.cpp b/src/v4l2/v4l2_camera.cpp
index 3565f369..157ab94e 100644
--- a/src/v4l2/v4l2_camera.cpp
+++ b/src/v4l2/v4l2_camera.cpp
@@ -10,11 +10,11 @@
#include <errno.h>
#include <unistd.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
using namespace libcamera;
-LOG_DECLARE_CATEGORY(V4L2Compat);
+LOG_DECLARE_CATEGORY(V4L2Compat)
V4L2Camera::V4L2Camera(std::shared_ptr<Camera> camera)
: camera_(camera), isRunning_(false), bufferAllocator_(nullptr),
@@ -49,6 +49,8 @@ int V4L2Camera::open(StreamConfiguration *streamConfig)
void V4L2Camera::close()
{
+ requestPool_.clear();
+
delete bufferAllocator_;
bufferAllocator_ = nullptr;
@@ -96,6 +98,7 @@ void V4L2Camera::requestComplete(Request *request)
if (ret != sizeof(data))
LOG(V4L2Compat, Error) << "Failed to signal eventfd POLLIN";
+ request->reuse();
{
MutexLocker locker(bufferMutex_);
bufferAvailableCount_++;
@@ -154,16 +157,30 @@ int V4L2Camera::validateConfiguration(const PixelFormat &pixelFormat,
return 0;
}
-int V4L2Camera::allocBuffers([[maybe_unused]] unsigned int count)
+int V4L2Camera::allocBuffers(unsigned int count)
{
Stream *stream = config_->at(0).stream();
- return bufferAllocator_->allocate(stream);
+ int ret = bufferAllocator_->allocate(stream);
+ if (ret < 0)
+ return ret;
+
+ for (unsigned int i = 0; i < count; i++) {
+ std::unique_ptr<Request> request = camera_->createRequest(i);
+ if (!request) {
+ requestPool_.clear();
+ return -ENOMEM;
+ }
+ requestPool_.push_back(std::move(request));
+ }
+
+ return ret;
}
void V4L2Camera::freeBuffers()
{
pendingRequests_.clear();
+ requestPool_.clear();
Stream *stream = config_->at(0).stream();
bufferAllocator_->free(stream);
@@ -192,9 +209,9 @@ int V4L2Camera::streamOn()
isRunning_ = true;
- for (std::unique_ptr<Request> &req : pendingRequests_) {
+ for (Request *req : pendingRequests_) {
/* \todo What should we do if this returns -EINVAL? */
- ret = camera_->queueRequest(req.release());
+ ret = camera_->queueRequest(req);
if (ret < 0)
return ret == -EACCES ? -EBUSY : ret;
}
@@ -206,8 +223,12 @@ int V4L2Camera::streamOn()
int V4L2Camera::streamOff()
{
- if (!isRunning_)
+ if (!isRunning_) {
+ for (std::unique_ptr<Request> &req : requestPool_)
+ req->reuse();
+
return 0;
+ }
pendingRequests_.clear();
@@ -226,12 +247,11 @@ int V4L2Camera::streamOff()
int V4L2Camera::qbuf(unsigned int index)
{
- std::unique_ptr<Request> request =
- std::unique_ptr<Request>(camera_->createRequest(index));
- if (!request) {
- LOG(V4L2Compat, Error) << "Can't create request";
- return -ENOMEM;
+ if (index >= requestPool_.size()) {
+ LOG(V4L2Compat, Error) << "Invalid index";
+ return -EINVAL;
}
+ Request *request = requestPool_[index].get();
Stream *stream = config_->at(0).stream();
FrameBuffer *buffer = bufferAllocator_->buffers(stream)[index].get();
@@ -242,11 +262,11 @@ int V4L2Camera::qbuf(unsigned int index)
}
if (!isRunning_) {
- pendingRequests_.push_back(std::move(request));
+ pendingRequests_.push_back(request);
return 0;
}
- ret = camera_->queueRequest(request.release());
+ ret = camera_->queueRequest(request);
if (ret < 0) {
LOG(V4L2Compat, Error) << "Can't queue request";
return ret == -EACCES ? -EBUSY : ret;
diff --git a/src/v4l2/v4l2_camera.h b/src/v4l2/v4l2_camera.h
index 1fc5ebef..a095f4e2 100644
--- a/src/v4l2/v4l2_camera.h
+++ b/src/v4l2/v4l2_camera.h
@@ -12,13 +12,13 @@
#include <mutex>
#include <utility>
-#include <libcamera/buffer.h>
+#include <libcamera/base/semaphore.h>
+
#include <libcamera/camera.h>
#include <libcamera/file_descriptor.h>
+#include <libcamera/framebuffer.h>
#include <libcamera/framebuffer_allocator.h>
-#include "libcamera/internal/semaphore.h"
-
using namespace libcamera;
class V4L2Camera
@@ -26,12 +26,12 @@ class V4L2Camera
public:
struct Buffer {
Buffer(unsigned int index, const FrameMetadata &data)
- : index(index), data(data)
+ : index_(index), data_(data)
{
}
- unsigned int index;
- FrameMetadata data;
+ unsigned int index_;
+ FrameMetadata data_;
};
V4L2Camera(std::shared_ptr<Camera> camera);
@@ -76,7 +76,9 @@ private:
std::mutex bufferLock_;
FrameBufferAllocator *bufferAllocator_;
- std::deque<std::unique_ptr<Request>> pendingRequests_;
+ std::vector<std::unique_ptr<Request>> requestPool_;
+
+ std::deque<Request *> pendingRequests_;
std::deque<std::unique_ptr<Buffer>> completedBuffers_;
int efd_;
diff --git a/src/v4l2/v4l2_camera_proxy.cpp b/src/v4l2/v4l2_camera_proxy.cpp
index 8ff990f6..7682c4bd 100644
--- a/src/v4l2/v4l2_camera_proxy.cpp
+++ b/src/v4l2/v4l2_camera_proxy.cpp
@@ -18,11 +18,12 @@
#include <libcamera/camera.h>
#include <libcamera/formats.h>
-#include <libcamera/object.h>
+
+#include <libcamera/base/log.h>
+#include <libcamera/base/object.h>
+#include <libcamera/base/utils.h>
#include "libcamera/internal/formats.h"
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/utils.h"
#include "v4l2_camera.h"
#include "v4l2_camera_file.h"
@@ -32,7 +33,7 @@
using namespace libcamera;
-LOG_DECLARE_CATEGORY(V4L2Compat);
+LOG_DECLARE_CATEGORY(V4L2Compat)
V4L2CameraProxy::V4L2CameraProxy(unsigned int index,
std::shared_ptr<Camera> camera)
@@ -206,8 +207,8 @@ void V4L2CameraProxy::updateBuffers()
{
std::vector<V4L2Camera::Buffer> completedBuffers = vcam_->completedBuffers();
for (const V4L2Camera::Buffer &buffer : completedBuffers) {
- const FrameMetadata &fmd = buffer.data;
- struct v4l2_buffer &buf = buffers_[buffer.index];
+ const FrameMetadata &fmd = buffer.data_;
+ struct v4l2_buffer &buf = buffers_[buffer.index_];
switch (fmd.status) {
case FrameMetadata::FrameSuccess:
diff --git a/src/v4l2/v4l2_compat_manager.cpp b/src/v4l2/v4l2_compat_manager.cpp
index 90c0f012..e566125a 100644
--- a/src/v4l2/v4l2_compat_manager.cpp
+++ b/src/v4l2/v4l2_compat_manager.cpp
@@ -19,11 +19,12 @@
#include <sys/types.h>
#include <unistd.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
#include <libcamera/camera.h>
#include <libcamera/camera_manager.h>
-#include "libcamera/internal/log.h"
-
#include "v4l2_camera_file.h"
using namespace libcamera;
@@ -81,11 +82,10 @@ int V4L2CompatManager::start()
* For each Camera registered in the system, a V4L2CameraProxy gets
* created here to wrap a camera device.
*/
- unsigned int index = 0;
- for (auto &camera : cm_->cameras()) {
+ auto cameras = cm_->cameras();
+ for (auto [index, camera] : utils::enumerate(cameras)) {
V4L2CameraProxy *proxy = new V4L2CameraProxy(index, camera);
proxies_.emplace_back(proxy);
- ++index;
}
return 0;
@@ -117,11 +117,10 @@ int V4L2CompatManager::getCameraIndex(int fd)
if (!target)
return -1;
- unsigned int index = 0;
- for (auto &camera : cm_->cameras()) {
+ auto cameras = cm_->cameras();
+ for (auto [index, camera] : utils::enumerate(cameras)) {
if (camera == target)
return index;
- ++index;
}
return -1;