summaryrefslogtreecommitdiff
path: root/src/ipa/raspberrypi
diff options
context:
space:
mode:
Diffstat (limited to 'src/ipa/raspberrypi')
-rw-r--r--src/ipa/raspberrypi/cam_helper.cpp119
-rw-r--r--src/ipa/raspberrypi/cam_helper.hpp51
-rw-r--r--src/ipa/raspberrypi/cam_helper_imx219.cpp134
-rw-r--r--src/ipa/raspberrypi/cam_helper_imx290.cpp67
-rw-r--r--src/ipa/raspberrypi/cam_helper_imx477.cpp214
-rw-r--r--src/ipa/raspberrypi/cam_helper_ov5647.cpp27
-rw-r--r--src/ipa/raspberrypi/cam_helper_ov9281.cpp65
-rw-r--r--src/ipa/raspberrypi/controller/agc_algorithm.hpp8
-rw-r--r--src/ipa/raspberrypi/controller/agc_status.h12
-rw-r--r--src/ipa/raspberrypi/controller/algorithm.hpp6
-rw-r--r--src/ipa/raspberrypi/controller/awb_algorithm.hpp1
-rw-r--r--src/ipa/raspberrypi/controller/camera_mode.h8
-rw-r--r--src/ipa/raspberrypi/controller/controller.cpp19
-rw-r--r--src/ipa/raspberrypi/controller/denoise_algorithm.hpp23
-rw-r--r--src/ipa/raspberrypi/controller/denoise_status.h24
-rw-r--r--src/ipa/raspberrypi/controller/device_status.cpp21
-rw-r--r--src/ipa/raspberrypi/controller/device_status.h39
-rw-r--r--src/ipa/raspberrypi/controller/logging.hpp30
-rw-r--r--src/ipa/raspberrypi/controller/metadata.hpp70
-rw-r--r--src/ipa/raspberrypi/controller/pwl.cpp30
-rw-r--r--src/ipa/raspberrypi/controller/pwl.hpp3
-rw-r--r--src/ipa/raspberrypi/controller/rpi/agc.cpp526
-rw-r--r--src/ipa/raspberrypi/controller/rpi/agc.hpp50
-rw-r--r--src/ipa/raspberrypi/controller/rpi/alsc.cpp78
-rw-r--r--src/ipa/raspberrypi/controller/rpi/awb.cpp213
-rw-r--r--src/ipa/raspberrypi/controller/rpi/awb.hpp17
-rw-r--r--src/ipa/raspberrypi/controller/rpi/black_level.cpp11
-rw-r--r--src/ipa/raspberrypi/controller/rpi/ccm.cpp26
-rw-r--r--src/ipa/raspberrypi/controller/rpi/ccm.hpp3
-rw-r--r--src/ipa/raspberrypi/controller/rpi/contrast.cpp29
-rw-r--r--src/ipa/raspberrypi/controller/rpi/contrast.hpp5
-rw-r--r--src/ipa/raspberrypi/controller/rpi/dpc.cpp8
-rw-r--r--src/ipa/raspberrypi/controller/rpi/focus.cpp2
-rw-r--r--src/ipa/raspberrypi/controller/rpi/geq.cpp22
-rw-r--r--src/ipa/raspberrypi/controller/rpi/lux.cpp30
-rw-r--r--src/ipa/raspberrypi/controller/rpi/lux.hpp7
-rw-r--r--src/ipa/raspberrypi/controller/rpi/noise.cpp14
-rw-r--r--src/ipa/raspberrypi/controller/rpi/noise.hpp2
-rw-r--r--src/ipa/raspberrypi/controller/rpi/sdn.cpp38
-rw-r--r--src/ipa/raspberrypi/controller/rpi/sdn.hpp5
-rw-r--r--src/ipa/raspberrypi/controller/rpi/sharpen.cpp11
-rw-r--r--src/ipa/raspberrypi/controller/sdn_status.h23
-rw-r--r--src/ipa/raspberrypi/data/imx219.json17
-rw-r--r--src/ipa/raspberrypi/data/imx290.json165
-rw-r--r--src/ipa/raspberrypi/data/imx477.json17
-rw-r--r--src/ipa/raspberrypi/data/meson.build5
-rw-r--r--src/ipa/raspberrypi/data/ov5647.json17
-rw-r--r--src/ipa/raspberrypi/data/ov9281.json92
-rw-r--r--src/ipa/raspberrypi/data/se327m12.json341
-rw-r--r--src/ipa/raspberrypi/md_parser.cpp101
-rw-r--r--src/ipa/raspberrypi/md_parser.hpp178
-rw-r--r--src/ipa/raspberrypi/md_parser_rpi.cpp37
-rw-r--r--src/ipa/raspberrypi/md_parser_rpi.hpp32
-rw-r--r--src/ipa/raspberrypi/md_parser_smia.cpp149
-rw-r--r--src/ipa/raspberrypi/meson.build12
-rw-r--r--src/ipa/raspberrypi/raspberrypi.cpp986
56 files changed, 2835 insertions, 1405 deletions
diff --git a/src/ipa/raspberrypi/cam_helper.cpp b/src/ipa/raspberrypi/cam_helper.cpp
index c8ac3232..3c6afce7 100644
--- a/src/ipa/raspberrypi/cam_helper.cpp
+++ b/src/ipa/raspberrypi/cam_helper.cpp
@@ -17,6 +17,12 @@
#include "md_parser.hpp"
using namespace RPiController;
+using namespace libcamera;
+using libcamera::utils::Duration;
+
+namespace libcamera {
+LOG_DECLARE_CATEGORY(IPARPI)
+}
static std::map<std::string, CamHelperCreateFunc> cam_helpers;
@@ -34,37 +40,80 @@ CamHelper *CamHelper::Create(std::string const &cam_name)
return nullptr;
}
-CamHelper::CamHelper(MdParser *parser)
- : parser_(parser), initialized_(false)
+CamHelper::CamHelper(std::unique_ptr<MdParser> parser, unsigned int frameIntegrationDiff)
+ : parser_(std::move(parser)), initialized_(false),
+ frameIntegrationDiff_(frameIntegrationDiff)
{
}
CamHelper::~CamHelper()
{
- delete parser_;
}
-uint32_t CamHelper::ExposureLines(double exposure_us) const
+void CamHelper::Prepare(Span<const uint8_t> buffer,
+ Metadata &metadata)
+{
+ parseEmbeddedData(buffer, metadata);
+}
+
+void CamHelper::Process([[maybe_unused]] StatisticsPtr &stats,
+ [[maybe_unused]] Metadata &metadata)
+{
+}
+
+uint32_t CamHelper::ExposureLines(const Duration exposure) const
+{
+ assert(initialized_);
+ return exposure / mode_.line_length;
+}
+
+Duration CamHelper::Exposure(uint32_t exposure_lines) const
{
assert(initialized_);
- return exposure_us * 1000.0 / mode_.line_length;
+ return exposure_lines * mode_.line_length;
}
-double CamHelper::Exposure(uint32_t exposure_lines) const
+uint32_t CamHelper::GetVBlanking(Duration &exposure,
+ Duration minFrameDuration,
+ Duration maxFrameDuration) const
{
+ uint32_t frameLengthMin, frameLengthMax, vblank;
+ uint32_t exposureLines = ExposureLines(exposure);
+
assert(initialized_);
- return exposure_lines * mode_.line_length / 1000.0;
+
+ /*
+ * minFrameDuration and maxFrameDuration are clamped by the caller
+ * based on the limits for the active sensor mode.
+ */
+ frameLengthMin = minFrameDuration / mode_.line_length;
+ frameLengthMax = maxFrameDuration / mode_.line_length;
+
+ /*
+ * Limit the exposure to the maximum frame duration requested, and
+ * re-calculate if it has been clipped.
+ */
+ exposureLines = std::min(frameLengthMax - frameIntegrationDiff_, exposureLines);
+ exposure = Exposure(exposureLines);
+
+ /* Limit the vblank to the range allowed by the frame length limits. */
+ vblank = std::clamp(exposureLines + frameIntegrationDiff_,
+ frameLengthMin, frameLengthMax) - mode_.height;
+ return vblank;
}
void CamHelper::SetCameraMode(const CameraMode &mode)
{
mode_ = mode;
- parser_->SetBitsPerPixel(mode.bitdepth);
- parser_->SetLineLengthBytes(0); /* We use SetBufferSize. */
+ if (parser_) {
+ parser_->SetBitsPerPixel(mode.bitdepth);
+ parser_->SetLineLengthBytes(0); /* We use SetBufferSize. */
+ }
initialized_ = true;
}
-void CamHelper::GetDelays(int &exposure_delay, int &gain_delay) const
+void CamHelper::GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const
{
/*
* These values are correct for many sensors. Other sensors will
@@ -72,6 +121,7 @@ void CamHelper::GetDelays(int &exposure_delay, int &gain_delay) const
*/
exposure_delay = 2;
gain_delay = 1;
+ vblank_delay = 2;
}
bool CamHelper::SensorEmbeddedDataPresent() const
@@ -82,10 +132,10 @@ bool CamHelper::SensorEmbeddedDataPresent() const
unsigned int CamHelper::HideFramesStartup() const
{
/*
- * By default, hide 6 frames completely at start-up while AGC etc. sort
- * themselves out (converge).
+ * The number of frames when a camera first starts that shouldn't be
+ * displayed as they are invalid in some way.
*/
- return 6;
+ return 0;
}
unsigned int CamHelper::HideFramesModeSwitch() const
@@ -106,6 +156,49 @@ unsigned int CamHelper::MistrustFramesModeSwitch() const
return 0;
}
+void CamHelper::parseEmbeddedData(Span<const uint8_t> buffer,
+ Metadata &metadata)
+{
+ MdParser::RegisterMap registers;
+ Metadata parsedMetadata;
+
+ if (buffer.empty())
+ return;
+
+ if (parser_->Parse(buffer, registers) != MdParser::Status::OK) {
+ LOG(IPARPI, Error) << "Embedded data buffer parsing failed";
+ return;
+ }
+
+ PopulateMetadata(registers, parsedMetadata);
+ metadata.Merge(parsedMetadata);
+
+ /*
+ * Overwrite the exposure/gain values in the existing DeviceStatus with
+ * values from the parsed embedded buffer. Fetch it first in case any
+ * other fields were set meaningfully.
+ */
+ DeviceStatus deviceStatus, parsedDeviceStatus;
+ if (metadata.Get("device.status", deviceStatus) ||
+ parsedMetadata.Get("device.status", parsedDeviceStatus)) {
+ LOG(IPARPI, Error) << "DeviceStatus not found";
+ return;
+ }
+
+ deviceStatus.shutter_speed = parsedDeviceStatus.shutter_speed;
+ deviceStatus.analogue_gain = parsedDeviceStatus.analogue_gain;
+ deviceStatus.frame_length = parsedDeviceStatus.frame_length;
+
+ LOG(IPARPI, Debug) << "Metadata updated - " << deviceStatus;
+
+ metadata.Set("device.status", deviceStatus);
+}
+
+void CamHelper::PopulateMetadata([[maybe_unused]] const MdParser::RegisterMap &registers,
+ [[maybe_unused]] Metadata &metadata) const
+{
+}
+
RegisterCamHelper::RegisterCamHelper(char const *cam_name,
CamHelperCreateFunc create_func)
{
diff --git a/src/ipa/raspberrypi/cam_helper.hpp b/src/ipa/raspberrypi/cam_helper.hpp
index 044c2866..200cc83f 100644
--- a/src/ipa/raspberrypi/cam_helper.hpp
+++ b/src/ipa/raspberrypi/cam_helper.hpp
@@ -6,9 +6,15 @@
*/
#pragma once
+#include <memory>
#include <string>
+#include <libcamera/base/span.h>
+#include <libcamera/base/utils.h>
+
#include "camera_mode.h"
+#include "controller/controller.hpp"
+#include "controller/metadata.hpp"
#include "md_parser.hpp"
#include "libcamera/internal/v4l2_videodevice.h"
@@ -16,8 +22,8 @@
namespace RPiController {
// The CamHelper class provides a number of facilities that anyone trying
-// trying to drive a camera will need to know, but which are not provided by
-// by the standard driver framework. Specifically, it provides:
+// to drive a camera will need to know, but which are not provided by the
+// standard driver framework. Specifically, it provides:
//
// A "CameraMode" structure to describe extra information about the chosen
// mode of the driver. For example, how it is cropped from the full sensor
@@ -28,14 +34,14 @@ namespace RPiController {
// exposure time, and to convert between the sensor's gain codes and actual
// gains.
//
-// A method to return the number of frames of delay between updating exposure
-// and analogue gain and the changes taking effect. For many sensors these
-// take the values 2 and 1 respectively, but sensors that are different will
-// need to over-ride the default method provided.
+// A method to return the number of frames of delay between updating exposure,
+// analogue gain and vblanking, and for the changes to take effect. For many
+// sensors these take the values 2, 1 and 2 respectively, but sensors that are
+// different will need to over-ride the default method provided.
//
// A method to query if the sensor outputs embedded data that can be parsed.
//
-// A parser to parse the metadata buffers provided by some sensors (for
+// A parser to parse the embedded data buffers provided by some sensors (for
// example, the imx219 does; the ov5647 doesn't). This allows us to know for
// sure the exposure and gain of the frame we're looking at. CamHelper
// provides methods for converting analogue gains to and from the sensor's
@@ -62,24 +68,43 @@ class CamHelper
{
public:
static CamHelper *Create(std::string const &cam_name);
- CamHelper(MdParser *parser);
+ CamHelper(std::unique_ptr<MdParser> parser, unsigned int frameIntegrationDiff);
virtual ~CamHelper();
void SetCameraMode(const CameraMode &mode);
- MdParser &Parser() const { return *parser_; }
- uint32_t ExposureLines(double exposure_us) const;
- double Exposure(uint32_t exposure_lines) const; // in us
+ virtual void Prepare(libcamera::Span<const uint8_t> buffer,
+ Metadata &metadata);
+ virtual void Process(StatisticsPtr &stats, Metadata &metadata);
+ uint32_t ExposureLines(libcamera::utils::Duration exposure) const;
+ libcamera::utils::Duration Exposure(uint32_t exposure_lines) const;
+ virtual uint32_t GetVBlanking(libcamera::utils::Duration &exposure,
+ libcamera::utils::Duration minFrameDuration,
+ libcamera::utils::Duration maxFrameDuration) const;
virtual uint32_t GainCode(double gain) const = 0;
virtual double Gain(uint32_t gain_code) const = 0;
- virtual void GetDelays(int &exposure_delay, int &gain_delay) const;
+ virtual void GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const;
virtual bool SensorEmbeddedDataPresent() const;
virtual unsigned int HideFramesStartup() const;
virtual unsigned int HideFramesModeSwitch() const;
virtual unsigned int MistrustFramesStartup() const;
virtual unsigned int MistrustFramesModeSwitch() const;
+
protected:
- MdParser *parser_;
+ void parseEmbeddedData(libcamera::Span<const uint8_t> buffer,
+ Metadata &metadata);
+ virtual void PopulateMetadata(const MdParser::RegisterMap &registers,
+ Metadata &metadata) const;
+
+ std::unique_ptr<MdParser> parser_;
CameraMode mode_;
+
+private:
bool initialized_;
+ /*
+ * Smallest difference between the frame length and integration time,
+ * in units of lines.
+ */
+ unsigned int frameIntegrationDiff_;
};
// This is for registering camera helpers with the system, so that the
diff --git a/src/ipa/raspberrypi/cam_helper_imx219.cpp b/src/ipa/raspberrypi/cam_helper_imx219.cpp
index db8ab879..a3caab71 100644
--- a/src/ipa/raspberrypi/cam_helper_imx219.cpp
+++ b/src/ipa/raspberrypi/cam_helper_imx219.cpp
@@ -11,35 +11,29 @@
#include <stdlib.h>
/*
- * We have observed the imx219 embedded data stream randomly return junk
- * reister values. Do not rely on embedded data until this has been resolved.
+ * We have observed that the imx219 embedded data stream randomly returns junk
+ * register values. Do not rely on embedded data until this has been resolved.
*/
#define ENABLE_EMBEDDED_DATA 0
#include "cam_helper.hpp"
#if ENABLE_EMBEDDED_DATA
#include "md_parser.hpp"
-#else
-#include "md_parser_rpi.hpp"
#endif
using namespace RPiController;
-/* Metadata parser implementation specific to Sony IMX219 sensors. */
-
-class MdParserImx219 : public MdParserSmia
-{
-public:
- MdParserImx219();
- Status Parse(void *data) override;
- Status GetExposureLines(unsigned int &lines) override;
- Status GetGainCode(unsigned int &gain_code) override;
-private:
- /* Offset of the register's value in the metadata block. */
- int reg_offsets_[3];
- /* Value of the register, once read from the metadata block. */
- int reg_values_[3];
-};
+/*
+ * We care about one gain register and a pair of exposure registers. Their I2C
+ * addresses from the Sony IMX219 datasheet:
+ */
+constexpr uint32_t gainReg = 0x157;
+constexpr uint32_t expHiReg = 0x15a;
+constexpr uint32_t expLoReg = 0x15b;
+constexpr uint32_t frameLengthHiReg = 0x160;
+constexpr uint32_t frameLengthLoReg = 0x161;
+constexpr std::initializer_list<uint32_t> registerList [[maybe_unused]]
+ = { expHiReg, expLoReg, gainReg, frameLengthHiReg, frameLengthLoReg };
class CamHelperImx219 : public CamHelper
{
@@ -49,13 +43,23 @@ public:
double Gain(uint32_t gain_code) const override;
unsigned int MistrustFramesModeSwitch() const override;
bool SensorEmbeddedDataPresent() const override;
+
+private:
+ /*
+ * Smallest difference between the frame length and integration time,
+ * in units of lines.
+ */
+ static constexpr int frameIntegrationDiff = 4;
+
+ void PopulateMetadata(const MdParser::RegisterMap &registers,
+ Metadata &metadata) const override;
};
CamHelperImx219::CamHelperImx219()
#if ENABLE_EMBEDDED_DATA
- : CamHelper(new MdParserImx219())
+ : CamHelper(std::make_unique<MdParserSmia>(registerList), frameIntegrationDiff)
#else
- : CamHelper(new MdParserRPi())
+ : CamHelper({}, frameIntegrationDiff)
#endif
{
}
@@ -85,89 +89,21 @@ bool CamHelperImx219::SensorEmbeddedDataPresent() const
return ENABLE_EMBEDDED_DATA;
}
-static CamHelper *Create()
+void CamHelperImx219::PopulateMetadata(const MdParser::RegisterMap &registers,
+ Metadata &metadata) const
{
- return new CamHelperImx219();
-}
-
-static RegisterCamHelper reg("imx219", &Create);
+ DeviceStatus deviceStatus;
-/*
- * We care about one gain register and a pair of exposure registers. Their I2C
- * addresses from the Sony IMX219 datasheet:
- */
-#define GAIN_REG 0x157
-#define EXPHI_REG 0x15A
-#define EXPLO_REG 0x15B
-
-/*
- * Index of each into the reg_offsets and reg_values arrays. Must be in
- * register address order.
- */
-#define GAIN_INDEX 0
-#define EXPHI_INDEX 1
-#define EXPLO_INDEX 2
-
-MdParserImx219::MdParserImx219()
-{
- reg_offsets_[0] = reg_offsets_[1] = reg_offsets_[2] = -1;
-}
+ deviceStatus.shutter_speed = Exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
+ deviceStatus.analogue_gain = Gain(registers.at(gainReg));
+ deviceStatus.frame_length = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
-MdParser::Status MdParserImx219::Parse(void *data)
-{
- bool try_again = false;
-
- if (reset_) {
- /*
- * Search again through the metadata for the gain and exposure
- * registers.
- */
- assert(bits_per_pixel_);
- assert(num_lines_ || buffer_size_bytes_);
- /* Need to be ordered */
- uint32_t regs[3] = { GAIN_REG, EXPHI_REG, EXPLO_REG };
- reg_offsets_[0] = reg_offsets_[1] = reg_offsets_[2] = -1;
- int ret = static_cast<int>(findRegs(static_cast<uint8_t *>(data),
- regs, reg_offsets_, 3));
- /*
- * > 0 means "worked partially but parse again next time",
- * < 0 means "hard error".
- */
- if (ret > 0)
- try_again = true;
- else if (ret < 0)
- return ERROR;
- }
-
- for (int i = 0; i < 3; i++) {
- if (reg_offsets_[i] == -1)
- continue;
-
- reg_values_[i] = static_cast<uint8_t *>(data)[reg_offsets_[i]];
- }
-
- /* Re-parse next time if we were unhappy in some way. */
- reset_ = try_again;
-
- return OK;
+ metadata.Set("device.status", deviceStatus);
}
-MdParser::Status MdParserImx219::GetExposureLines(unsigned int &lines)
+static CamHelper *Create()
{
- if (reg_offsets_[EXPHI_INDEX] == -1 || reg_offsets_[EXPLO_INDEX] == -1)
- return NOTFOUND;
-
- lines = reg_values_[EXPHI_INDEX] * 256 + reg_values_[EXPLO_INDEX];
-
- return OK;
+ return new CamHelperImx219();
}
-MdParser::Status MdParserImx219::GetGainCode(unsigned int &gain_code)
-{
- if (reg_offsets_[GAIN_INDEX] == -1)
- return NOTFOUND;
-
- gain_code = reg_values_[GAIN_INDEX];
-
- return OK;
-}
+static RegisterCamHelper reg("imx219", &Create);
diff --git a/src/ipa/raspberrypi/cam_helper_imx290.cpp b/src/ipa/raspberrypi/cam_helper_imx290.cpp
new file mode 100644
index 00000000..871c1f8e
--- /dev/null
+++ b/src/ipa/raspberrypi/cam_helper_imx290.cpp
@@ -0,0 +1,67 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2021, Raspberry Pi (Trading) Limited
+ *
+ * cam_helper_imx290.cpp - camera helper for imx290 sensor
+ */
+
+#include <math.h>
+
+#include "cam_helper.hpp"
+
+using namespace RPiController;
+
+class CamHelperImx290 : public CamHelper
+{
+public:
+ CamHelperImx290();
+ uint32_t GainCode(double gain) const override;
+ double Gain(uint32_t gain_code) const override;
+ void GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const override;
+ unsigned int HideFramesModeSwitch() const override;
+
+private:
+ /*
+ * Smallest difference between the frame length and integration time,
+ * in units of lines.
+ */
+ static constexpr int frameIntegrationDiff = 2;
+};
+
+CamHelperImx290::CamHelperImx290()
+ : CamHelper({}, frameIntegrationDiff)
+{
+}
+
+uint32_t CamHelperImx290::GainCode(double gain) const
+{
+ int code = 66.6667 * log10(gain);
+ return std::max(0, std::min(code, 0xf0));
+}
+
+double CamHelperImx290::Gain(uint32_t gain_code) const
+{
+ return pow(10, 0.015 * gain_code);
+}
+
+void CamHelperImx290::GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const
+{
+ exposure_delay = 2;
+ gain_delay = 2;
+ vblank_delay = 2;
+}
+
+unsigned int CamHelperImx290::HideFramesModeSwitch() const
+{
+ /* After a mode switch, we seem to get 1 bad frame. */
+ return 1;
+}
+
+static CamHelper *Create()
+{
+ return new CamHelperImx290();
+}
+
+static RegisterCamHelper reg("imx290", &Create);
diff --git a/src/ipa/raspberrypi/cam_helper_imx477.cpp b/src/ipa/raspberrypi/cam_helper_imx477.cpp
index 0e896ac7..338fdc0c 100644
--- a/src/ipa/raspberrypi/cam_helper_imx477.cpp
+++ b/src/ipa/raspberrypi/cam_helper_imx477.cpp
@@ -6,30 +6,36 @@
*/
#include <assert.h>
+#include <cmath>
#include <stddef.h>
#include <stdio.h>
#include <stdlib.h>
+#include <libcamera/base/log.h>
+
#include "cam_helper.hpp"
#include "md_parser.hpp"
using namespace RPiController;
+using namespace libcamera;
+using libcamera::utils::Duration;
-/* Metadata parser implementation specific to Sony IMX477 sensors. */
+namespace libcamera {
+LOG_DECLARE_CATEGORY(IPARPI)
+}
-class MdParserImx477 : public MdParserSmia
-{
-public:
- MdParserImx477();
- Status Parse(void *data) override;
- Status GetExposureLines(unsigned int &lines) override;
- Status GetGainCode(unsigned int &gain_code) override;
-private:
- /* Offset of the register's value in the metadata block. */
- int reg_offsets_[4];
- /* Value of the register, once read from the metadata block. */
- int reg_values_[4];
-};
+/*
+ * We care about two gain registers and a pair of exposure registers. Their
+ * I2C addresses from the Sony IMX477 datasheet:
+ */
+constexpr uint32_t expHiReg = 0x0202;
+constexpr uint32_t expLoReg = 0x0203;
+constexpr uint32_t gainHiReg = 0x0204;
+constexpr uint32_t gainLoReg = 0x0205;
+constexpr uint32_t frameLengthHiReg = 0x0340;
+constexpr uint32_t frameLengthLoReg = 0x0341;
+constexpr std::initializer_list<uint32_t> registerList =
+ { expHiReg, expLoReg, gainHiReg, gainLoReg, frameLengthHiReg, frameLengthLoReg };
class CamHelperImx477 : public CamHelper
{
@@ -37,11 +43,30 @@ public:
CamHelperImx477();
uint32_t GainCode(double gain) const override;
double Gain(uint32_t gain_code) const override;
+ void Prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata) override;
+ uint32_t GetVBlanking(Duration &exposure, Duration minFrameDuration,
+ Duration maxFrameDuration) const override;
+ void GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const override;
bool SensorEmbeddedDataPresent() const override;
+
+private:
+ /*
+ * Smallest difference between the frame length and integration time,
+ * in units of lines.
+ */
+ static constexpr int frameIntegrationDiff = 22;
+ /* Maximum frame length allowable for long exposure calculations. */
+ static constexpr int frameLengthMax = 0xffdc;
+ /* Largest long exposure scale factor given as a left shift on the frame length. */
+ static constexpr int longExposureShiftMax = 7;
+
+ void PopulateMetadata(const MdParser::RegisterMap &registers,
+ Metadata &metadata) const override;
};
CamHelperImx477::CamHelperImx477()
- : CamHelper(new MdParserImx477())
+ : CamHelper(std::make_unique<MdParserSmia>(registerList), frameIntegrationDiff)
{
}
@@ -55,101 +80,104 @@ double CamHelperImx477::Gain(uint32_t gain_code) const
return 1024.0 / (1024 - gain_code);
}
-bool CamHelperImx477::SensorEmbeddedDataPresent() const
-{
- return true;
-}
-
-static CamHelper *Create()
+void CamHelperImx477::Prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata)
{
- return new CamHelperImx477();
-}
-
-static RegisterCamHelper reg("imx477", &Create);
-
-/*
- * We care about two gain registers and a pair of exposure registers. Their
- * I2C addresses from the Sony IMX477 datasheet:
- */
-#define EXPHI_REG 0x0202
-#define EXPLO_REG 0x0203
-#define GAINHI_REG 0x0204
-#define GAINLO_REG 0x0205
+ MdParser::RegisterMap registers;
+ DeviceStatus deviceStatus;
-/*
- * Index of each into the reg_offsets and reg_values arrays. Must be in register
- * address order.
- */
-#define EXPHI_INDEX 0
-#define EXPLO_INDEX 1
-#define GAINHI_INDEX 2
-#define GAINLO_INDEX 3
+ if (metadata.Get("device.status", deviceStatus)) {
+ LOG(IPARPI, Error) << "DeviceStatus not found from DelayedControls";
+ return;
+ }
-MdParserImx477::MdParserImx477()
-{
- reg_offsets_[0] = reg_offsets_[1] = reg_offsets_[2] = reg_offsets_[3] = -1;
+ parseEmbeddedData(buffer, metadata);
+
+ /*
+ * The DeviceStatus struct is first populated with values obtained from
+ * DelayedControls. If this reports frame length is > frameLengthMax,
+ * it means we are using a long exposure mode. Since the long exposure
+ * scale factor is not returned back through embedded data, we must rely
+ * on the existing exposure lines and frame length values returned by
+ * DelayedControls.
+ *
+ * Otherwise, all values are updated with what is reported in the
+ * embedded data.
+ */
+ if (deviceStatus.frame_length > frameLengthMax) {
+ DeviceStatus parsedDeviceStatus;
+
+ metadata.Get("device.status", parsedDeviceStatus);
+ parsedDeviceStatus.shutter_speed = deviceStatus.shutter_speed;
+ parsedDeviceStatus.frame_length = deviceStatus.frame_length;
+ metadata.Set("device.status", parsedDeviceStatus);
+
+ LOG(IPARPI, Debug) << "Metadata updated for long exposure: "
+ << parsedDeviceStatus;
+ }
}
-MdParser::Status MdParserImx477::Parse(void *data)
+uint32_t CamHelperImx477::GetVBlanking(Duration &exposure,
+ Duration minFrameDuration,
+ Duration maxFrameDuration) const
{
- bool try_again = false;
-
- if (reset_) {
- /*
- * Search again through the metadata for the gain and exposure
- * registers.
- */
- assert(bits_per_pixel_);
- assert(num_lines_ || buffer_size_bytes_);
- /* Need to be ordered */
- uint32_t regs[4] = {
- EXPHI_REG,
- EXPLO_REG,
- GAINHI_REG,
- GAINLO_REG
- };
- reg_offsets_[0] = reg_offsets_[1] = reg_offsets_[2] = reg_offsets_[3] = -1;
- int ret = static_cast<int>(findRegs(static_cast<uint8_t *>(data),
- regs, reg_offsets_, 4));
- /*
- * > 0 means "worked partially but parse again next time",
- * < 0 means "hard error".
- */
- if (ret > 0)
- try_again = true;
- else if (ret < 0)
- return ERROR;
+ uint32_t frameLength, exposureLines;
+ unsigned int shift = 0;
+
+ frameLength = mode_.height + CamHelper::GetVBlanking(exposure, minFrameDuration,
+ maxFrameDuration);
+ /*
+ * Check if the frame length calculated needs to be setup for long
+ * exposure mode. This will require us to use a long exposure scale
+ * factor provided by a shift operation in the sensor.
+ */
+ while (frameLength > frameLengthMax) {
+ if (++shift > longExposureShiftMax) {
+ shift = longExposureShiftMax;
+ frameLength = frameLengthMax;
+ break;
+ }
+ frameLength >>= 1;
}
- for (int i = 0; i < 4; i++) {
- if (reg_offsets_[i] == -1)
- continue;
-
- reg_values_[i] = static_cast<uint8_t *>(data)[reg_offsets_[i]];
+ if (shift) {
+ /* Account for any rounding in the scaled frame length value. */
+ frameLength <<= shift;
+ exposureLines = ExposureLines(exposure);
+ exposureLines = std::min(exposureLines, frameLength - frameIntegrationDiff);
+ exposure = Exposure(exposureLines);
}
- /* Re-parse next time if we were unhappy in some way. */
- reset_ = try_again;
-
- return OK;
+ return frameLength - mode_.height;
}
-MdParser::Status MdParserImx477::GetExposureLines(unsigned int &lines)
+void CamHelperImx477::GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const
{
- if (reg_offsets_[EXPHI_INDEX] == -1 || reg_offsets_[EXPLO_INDEX] == -1)
- return NOTFOUND;
-
- lines = reg_values_[EXPHI_INDEX] * 256 + reg_values_[EXPLO_INDEX];
+ exposure_delay = 2;
+ gain_delay = 2;
+ vblank_delay = 3;
+}
- return OK;
+bool CamHelperImx477::SensorEmbeddedDataPresent() const
+{
+ return true;
}
-MdParser::Status MdParserImx477::GetGainCode(unsigned int &gain_code)
+void CamHelperImx477::PopulateMetadata(const MdParser::RegisterMap &registers,
+ Metadata &metadata) const
{
- if (reg_offsets_[GAINHI_INDEX] == -1 || reg_offsets_[GAINLO_INDEX] == -1)
- return NOTFOUND;
+ DeviceStatus deviceStatus;
- gain_code = reg_values_[GAINHI_INDEX] * 256 + reg_values_[GAINLO_INDEX];
+ deviceStatus.shutter_speed = Exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
+ deviceStatus.analogue_gain = Gain(registers.at(gainHiReg) * 256 + registers.at(gainLoReg));
+ deviceStatus.frame_length = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
- return OK;
+ metadata.Set("device.status", deviceStatus);
}
+
+static CamHelper *Create()
+{
+ return new CamHelperImx477();
+}
+
+static RegisterCamHelper reg("imx477", &Create);
diff --git a/src/ipa/raspberrypi/cam_helper_ov5647.cpp b/src/ipa/raspberrypi/cam_helper_ov5647.cpp
index dc5d8275..702c2d07 100644
--- a/src/ipa/raspberrypi/cam_helper_ov5647.cpp
+++ b/src/ipa/raspberrypi/cam_helper_ov5647.cpp
@@ -8,7 +8,6 @@
#include <assert.h>
#include "cam_helper.hpp"
-#include "md_parser_rpi.hpp"
using namespace RPiController;
@@ -18,10 +17,19 @@ public:
CamHelperOv5647();
uint32_t GainCode(double gain) const override;
double Gain(uint32_t gain_code) const override;
- void GetDelays(int &exposure_delay, int &gain_delay) const override;
+ void GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const override;
+ unsigned int HideFramesStartup() const override;
unsigned int HideFramesModeSwitch() const override;
unsigned int MistrustFramesStartup() const override;
unsigned int MistrustFramesModeSwitch() const override;
+
+private:
+ /*
+ * Smallest difference between the frame length and integration time,
+ * in units of lines.
+ */
+ static constexpr int frameIntegrationDiff = 4;
};
/*
@@ -30,7 +38,7 @@ public:
*/
CamHelperOv5647::CamHelperOv5647()
- : CamHelper(new MdParserRPi())
+ : CamHelper({}, frameIntegrationDiff)
{
}
@@ -44,7 +52,8 @@ double CamHelperOv5647::Gain(uint32_t gain_code) const
return static_cast<double>(gain_code) / 16.0;
}
-void CamHelperOv5647::GetDelays(int &exposure_delay, int &gain_delay) const
+void CamHelperOv5647::GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const
{
/*
* We run this sensor in a mode where the gain delay is bumped up to
@@ -52,6 +61,16 @@ void CamHelperOv5647::GetDelays(int &exposure_delay, int &gain_delay) const
*/
exposure_delay = 2;
gain_delay = 2;
+ vblank_delay = 2;
+}
+
+unsigned int CamHelperOv5647::HideFramesStartup() const
+{
+ /*
+ * On startup, we get a couple of under-exposed frames which
+ * we don't want shown.
+ */
+ return 2;
}
unsigned int CamHelperOv5647::HideFramesModeSwitch() const
diff --git a/src/ipa/raspberrypi/cam_helper_ov9281.cpp b/src/ipa/raspberrypi/cam_helper_ov9281.cpp
new file mode 100644
index 00000000..9de868c3
--- /dev/null
+++ b/src/ipa/raspberrypi/cam_helper_ov9281.cpp
@@ -0,0 +1,65 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2021, Raspberry Pi (Trading) Limited
+ *
+ * cam_helper_ov9281.cpp - camera information for ov9281 sensor
+ */
+
+#include <assert.h>
+
+#include "cam_helper.hpp"
+
+using namespace RPiController;
+
+class CamHelperOv9281 : public CamHelper
+{
+public:
+ CamHelperOv9281();
+ uint32_t GainCode(double gain) const override;
+ double Gain(uint32_t gain_code) const override;
+ void GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const override;
+
+private:
+ /*
+ * Smallest difference between the frame length and integration time,
+ * in units of lines.
+ */
+ static constexpr int frameIntegrationDiff = 4;
+};
+
+/*
+ * OV9281 doesn't output metadata, so we have to use the "unicam parser" which
+ * works by counting frames.
+ */
+
+CamHelperOv9281::CamHelperOv9281()
+ : CamHelper({}, frameIntegrationDiff)
+{
+}
+
+uint32_t CamHelperOv9281::GainCode(double gain) const
+{
+ return static_cast<uint32_t>(gain * 16.0);
+}
+
+double CamHelperOv9281::Gain(uint32_t gain_code) const
+{
+ return static_cast<double>(gain_code) / 16.0;
+}
+
+void CamHelperOv9281::GetDelays(int &exposure_delay, int &gain_delay,
+ int &vblank_delay) const
+{
+ /* The driver appears to behave as follows: */
+ exposure_delay = 2;
+ gain_delay = 2;
+ vblank_delay = 2;
+}
+
+static CamHelper *Create()
+{
+ return new CamHelperOv9281();
+}
+
+static RegisterCamHelper reg("ov9281", &Create);
diff --git a/src/ipa/raspberrypi/controller/agc_algorithm.hpp b/src/ipa/raspberrypi/controller/agc_algorithm.hpp
index b4ea54fb..61595ea2 100644
--- a/src/ipa/raspberrypi/controller/agc_algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/agc_algorithm.hpp
@@ -6,6 +6,8 @@
*/
#pragma once
+#include <libcamera/base/utils.h>
+
#include "algorithm.hpp"
namespace RPiController {
@@ -15,9 +17,11 @@ class AgcAlgorithm : public Algorithm
public:
AgcAlgorithm(Controller *controller) : Algorithm(controller) {}
// An AGC algorithm must provide the following:
+ virtual unsigned int GetConvergenceFrames() const = 0;
virtual void SetEv(double ev) = 0;
- virtual void SetFlickerPeriod(double flicker_period) = 0;
- virtual void SetFixedShutter(double fixed_shutter) = 0; // microseconds
+ virtual void SetFlickerPeriod(libcamera::utils::Duration flicker_period) = 0;
+ virtual void SetFixedShutter(libcamera::utils::Duration fixed_shutter) = 0;
+ virtual void SetMaxShutter(libcamera::utils::Duration max_shutter) = 0;
virtual void SetFixedAnalogueGain(double fixed_analogue_gain) = 0;
virtual void SetMeteringMode(std::string const &metering_mode_name) = 0;
virtual void SetExposureMode(std::string const &exposure_mode_name) = 0;
diff --git a/src/ipa/raspberrypi/controller/agc_status.h b/src/ipa/raspberrypi/controller/agc_status.h
index 10381c90..20cb1b62 100644
--- a/src/ipa/raspberrypi/controller/agc_status.h
+++ b/src/ipa/raspberrypi/controller/agc_status.h
@@ -6,6 +6,8 @@
*/
#pragma once
+#include <libcamera/base/utils.h>
+
// The AGC algorithm should post the following structure into the image's
// "agc.status" metadata.
@@ -18,17 +20,17 @@ extern "C" {
// ignored until then.
struct AgcStatus {
- double total_exposure_value; // value for all exposure and gain for this image
- double target_exposure_value; // (unfiltered) target total exposure AGC is aiming for
- double shutter_time;
+ libcamera::utils::Duration total_exposure_value; // value for all exposure and gain for this image
+ libcamera::utils::Duration target_exposure_value; // (unfiltered) target total exposure AGC is aiming for
+ libcamera::utils::Duration shutter_time;
double analogue_gain;
char exposure_mode[32];
char constraint_mode[32];
char metering_mode[32];
double ev;
- double flicker_period;
+ libcamera::utils::Duration flicker_period;
int floating_region_enable;
- double fixed_shutter;
+ libcamera::utils::Duration fixed_shutter;
double fixed_analogue_gain;
double digital_gain;
int locked;
diff --git a/src/ipa/raspberrypi/controller/algorithm.hpp b/src/ipa/raspberrypi/controller/algorithm.hpp
index 6196b2f9..5123c87b 100644
--- a/src/ipa/raspberrypi/controller/algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/algorithm.hpp
@@ -12,9 +12,7 @@
#include <string>
#include <memory>
#include <map>
-#include <atomic>
-#include "logging.hpp"
#include "controller.hpp"
#include <boost/property_tree/ptree.hpp>
@@ -30,7 +28,7 @@ public:
: controller_(controller), paused_(false)
{
}
- virtual ~Algorithm() {}
+ virtual ~Algorithm() = default;
virtual char const *Name() const = 0;
virtual bool IsPaused() const { return paused_; }
virtual void Pause() { paused_ = true; }
@@ -47,7 +45,7 @@ public:
private:
Controller *controller_;
- std::atomic<bool> paused_;
+ bool paused_;
};
// This code is for automatic registration of Front End algorithms with the
diff --git a/src/ipa/raspberrypi/controller/awb_algorithm.hpp b/src/ipa/raspberrypi/controller/awb_algorithm.hpp
index 5be0c9f4..96f88afc 100644
--- a/src/ipa/raspberrypi/controller/awb_algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/awb_algorithm.hpp
@@ -15,6 +15,7 @@ class AwbAlgorithm : public Algorithm
public:
AwbAlgorithm(Controller *controller) : Algorithm(controller) {}
// An AWB algorithm must provide the following:
+ virtual unsigned int GetConvergenceFrames() const = 0;
virtual void SetMode(std::string const &mode_name) = 0;
virtual void SetManualGains(double manual_r, double manual_b) = 0;
};
diff --git a/src/ipa/raspberrypi/controller/camera_mode.h b/src/ipa/raspberrypi/controller/camera_mode.h
index 920f11be..65888230 100644
--- a/src/ipa/raspberrypi/controller/camera_mode.h
+++ b/src/ipa/raspberrypi/controller/camera_mode.h
@@ -8,6 +8,8 @@
#include <libcamera/transform.h>
+#include <libcamera/base/utils.h>
+
// Description of a "camera mode", holding enough information for control
// algorithms to adapt their behaviour to the different modes of the camera,
// including binning, scaling, cropping etc.
@@ -33,10 +35,12 @@ struct CameraMode {
double scale_x, scale_y;
// scaling of the noise compared to the native sensor mode
double noise_factor;
- // line time in nanoseconds
- double line_length;
+ // line time
+ libcamera::utils::Duration line_length;
// any camera transform *not* reflected already in the camera tuning
libcamera::Transform transform;
+ // minimum and maximum fame lengths in units of lines
+ uint32_t min_frame_length, max_frame_length;
};
#ifdef __cplusplus
diff --git a/src/ipa/raspberrypi/controller/controller.cpp b/src/ipa/raspberrypi/controller/controller.cpp
index 22461cc4..d3433ad2 100644
--- a/src/ipa/raspberrypi/controller/controller.cpp
+++ b/src/ipa/raspberrypi/controller/controller.cpp
@@ -5,6 +5,8 @@
* controller.cpp - ISP controller
*/
+#include <libcamera/base/log.h>
+
#include "algorithm.hpp"
#include "controller.hpp"
@@ -12,6 +14,9 @@
#include <boost/property_tree/ptree.hpp>
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiController)
Controller::Controller()
: switch_mode_called_(false) {}
@@ -27,7 +32,6 @@ Controller::~Controller() {}
void Controller::Read(char const *filename)
{
- RPI_LOG("Controller starting");
boost::property_tree::ptree root;
boost::property_tree::read_json(filename, root);
for (auto const &key_and_value : root) {
@@ -36,10 +40,9 @@ void Controller::Read(char const *filename)
algo->Read(key_and_value.second);
algorithms_.push_back(AlgorithmPtr(algo));
} else
- RPI_LOG("WARNING: No algorithm found for \""
- << key_and_value.first << "\"");
+ LOG(RPiController, Warning)
+ << "No algorithm found for \"" << key_and_value.first << "\"";
}
- RPI_LOG("Controller finished");
}
Algorithm *Controller::CreateAlgorithm(char const *name)
@@ -50,39 +53,31 @@ Algorithm *Controller::CreateAlgorithm(char const *name)
void Controller::Initialise()
{
- RPI_LOG("Controller starting");
for (auto &algo : algorithms_)
algo->Initialise();
- RPI_LOG("Controller finished");
}
void Controller::SwitchMode(CameraMode const &camera_mode, Metadata *metadata)
{
- RPI_LOG("Controller starting");
for (auto &algo : algorithms_)
algo->SwitchMode(camera_mode, metadata);
switch_mode_called_ = true;
- RPI_LOG("Controller finished");
}
void Controller::Prepare(Metadata *image_metadata)
{
- RPI_LOG("Controller::Prepare starting");
assert(switch_mode_called_);
for (auto &algo : algorithms_)
if (!algo->IsPaused())
algo->Prepare(image_metadata);
- RPI_LOG("Controller::Prepare finished");
}
void Controller::Process(StatisticsPtr stats, Metadata *image_metadata)
{
- RPI_LOG("Controller::Process starting");
assert(switch_mode_called_);
for (auto &algo : algorithms_)
if (!algo->IsPaused())
algo->Process(stats, image_metadata);
- RPI_LOG("Controller::Process finished");
}
Metadata &Controller::GetGlobalMetadata()
diff --git a/src/ipa/raspberrypi/controller/denoise_algorithm.hpp b/src/ipa/raspberrypi/controller/denoise_algorithm.hpp
new file mode 100644
index 00000000..39fcd7e9
--- /dev/null
+++ b/src/ipa/raspberrypi/controller/denoise_algorithm.hpp
@@ -0,0 +1,23 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2021, Raspberry Pi (Trading) Limited
+ *
+ * denoise.hpp - Denoise control algorithm interface
+ */
+#pragma once
+
+#include "algorithm.hpp"
+
+namespace RPiController {
+
+enum class DenoiseMode { Off, ColourOff, ColourFast, ColourHighQuality };
+
+class DenoiseAlgorithm : public Algorithm
+{
+public:
+ DenoiseAlgorithm(Controller *controller) : Algorithm(controller) {}
+ // A Denoise algorithm must provide the following:
+ virtual void SetMode(DenoiseMode mode) = 0;
+};
+
+} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/denoise_status.h b/src/ipa/raspberrypi/controller/denoise_status.h
new file mode 100644
index 00000000..67a3c361
--- /dev/null
+++ b/src/ipa/raspberrypi/controller/denoise_status.h
@@ -0,0 +1,24 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
+ *
+ * denoise_status.h - Denoise control algorithm status
+ */
+#pragma once
+
+// This stores the parameters required for Denoise.
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+struct DenoiseStatus {
+ double noise_constant;
+ double noise_slope;
+ double strength;
+ unsigned int mode;
+};
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/src/ipa/raspberrypi/controller/device_status.cpp b/src/ipa/raspberrypi/controller/device_status.cpp
new file mode 100644
index 00000000..f052ea8b
--- /dev/null
+++ b/src/ipa/raspberrypi/controller/device_status.cpp
@@ -0,0 +1,21 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2021, Raspberry Pi (Trading) Limited
+ *
+ * device_status.cpp - device (image sensor) status
+ */
+#include "device_status.h"
+
+using namespace libcamera; /* for the Duration operator<< overload */
+
+std::ostream &operator<<(std::ostream &out, const DeviceStatus &d)
+{
+ out << "Exposure: " << d.shutter_speed
+ << " Frame length: " << d.frame_length
+ << " Gain: " << d.analogue_gain
+ << " Aperture: " << d.aperture
+ << " Lens: " << d.lens_position
+ << " Flash: " << d.flash_intensity;
+
+ return out;
+}
diff --git a/src/ipa/raspberrypi/controller/device_status.h b/src/ipa/raspberrypi/controller/device_status.h
index aa08608b..c4a5d9c8 100644
--- a/src/ipa/raspberrypi/controller/device_status.h
+++ b/src/ipa/raspberrypi/controller/device_status.h
@@ -1,30 +1,39 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
*
* device_status.h - device (image sensor) status
*/
#pragma once
-// Definition of "device metadata" which stores things like shutter time and
-// analogue gain that downstream control algorithms will want to know.
+#include <iostream>
-#ifdef __cplusplus
-extern "C" {
-#endif
+#include <libcamera/base/utils.h>
+
+/*
+ * Definition of "device metadata" which stores things like shutter time and
+ * analogue gain that downstream control algorithms will want to know.
+ */
struct DeviceStatus {
- // time shutter is open, in microseconds
- double shutter_speed;
+ DeviceStatus()
+ : shutter_speed(std::chrono::seconds(0)), frame_length(0),
+ analogue_gain(0.0), lens_position(0.0), aperture(0.0),
+ flash_intensity(0.0)
+ {
+ }
+
+ friend std::ostream &operator<<(std::ostream &out, const DeviceStatus &d);
+
+ /* time shutter is open */
+ libcamera::utils::Duration shutter_speed;
+ /* frame length given in number of lines */
+ uint32_t frame_length;
double analogue_gain;
- // 1.0/distance-in-metres, or 0 if unknown
+ /* 1.0/distance-in-metres, or 0 if unknown */
double lens_position;
- // 1/f so that brightness quadruples when this doubles, or 0 if unknown
+ /* 1/f so that brightness quadruples when this doubles, or 0 if unknown */
double aperture;
- // proportional to brightness with 0 = no flash, 1 = maximum flash
+ /* proportional to brightness with 0 = no flash, 1 = maximum flash */
double flash_intensity;
};
-
-#ifdef __cplusplus
-}
-#endif
diff --git a/src/ipa/raspberrypi/controller/logging.hpp b/src/ipa/raspberrypi/controller/logging.hpp
deleted file mode 100644
index f0d306b6..00000000
--- a/src/ipa/raspberrypi/controller/logging.hpp
+++ /dev/null
@@ -1,30 +0,0 @@
-/* SPDX-License-Identifier: BSD-2-Clause */
-/*
- * Copyright (C) 2019-2020, Raspberry Pi (Trading) Limited
- *
- * logging.hpp - logging macros
- */
-#pragma once
-
-#include <iostream>
-
-#ifndef RPI_LOGGING_ENABLE
-#define RPI_LOGGING_ENABLE 0
-#endif
-
-#ifndef RPI_WARNING_ENABLE
-#define RPI_WARNING_ENABLE 1
-#endif
-
-#define RPI_LOG(stuff) \
- do { \
- if (RPI_LOGGING_ENABLE) \
- std::cout << __FUNCTION__ << ": " << stuff << "\n"; \
- } while (0)
-
-#define RPI_WARN(stuff) \
- do { \
- if (RPI_WARNING_ENABLE) \
- std::cout << __FUNCTION__ << " ***WARNING*** " \
- << stuff << "\n"; \
- } while (0)
diff --git a/src/ipa/raspberrypi/controller/metadata.hpp b/src/ipa/raspberrypi/controller/metadata.hpp
index f3a8dfab..fd6aac88 100644
--- a/src/ipa/raspberrypi/controller/metadata.hpp
+++ b/src/ipa/raspberrypi/controller/metadata.hpp
@@ -1,6 +1,6 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
*
* metadata.hpp - general metadata class
*/
@@ -8,68 +8,104 @@
// A simple class for carrying arbitrary metadata, for example about an image.
-#include <string>
-#include <mutex>
+#include <any>
#include <map>
#include <memory>
-
-#include <boost/any.hpp>
+#include <mutex>
+#include <string>
namespace RPiController {
class Metadata
{
public:
- template<typename T> void Set(std::string const &tag, T const &value)
+ Metadata() = default;
+
+ Metadata(Metadata const &other)
{
- std::lock_guard<std::mutex> lock(mutex_);
+ std::scoped_lock other_lock(other.mutex_);
+ data_ = other.data_;
+ }
+
+ Metadata(Metadata &&other)
+ {
+ std::scoped_lock other_lock(other.mutex_);
+ data_ = std::move(other.data_);
+ other.data_.clear();
+ }
+
+ template<typename T>
+ void Set(std::string const &tag, T const &value)
+ {
+ std::scoped_lock lock(mutex_);
data_[tag] = value;
}
- template<typename T> int Get(std::string const &tag, T &value) const
+
+ template<typename T>
+ int Get(std::string const &tag, T &value) const
{
- std::lock_guard<std::mutex> lock(mutex_);
+ std::scoped_lock lock(mutex_);
auto it = data_.find(tag);
if (it == data_.end())
return -1;
- value = boost::any_cast<T>(it->second);
+ value = std::any_cast<T>(it->second);
return 0;
}
+
void Clear()
{
- std::lock_guard<std::mutex> lock(mutex_);
+ std::scoped_lock lock(mutex_);
data_.clear();
}
+
Metadata &operator=(Metadata const &other)
{
- std::lock_guard<std::mutex> lock(mutex_);
- std::lock_guard<std::mutex> other_lock(other.mutex_);
+ std::scoped_lock lock(mutex_, other.mutex_);
data_ = other.data_;
return *this;
}
- template<typename T> T *GetLocked(std::string const &tag)
+
+ Metadata &operator=(Metadata &&other)
+ {
+ std::scoped_lock lock(mutex_, other.mutex_);
+ data_ = std::move(other.data_);
+ other.data_.clear();
+ return *this;
+ }
+
+ void Merge(Metadata &other)
+ {
+ std::scoped_lock lock(mutex_, other.mutex_);
+ data_.merge(other.data_);
+ }
+
+ template<typename T>
+ T *GetLocked(std::string const &tag)
{
// This allows in-place access to the Metadata contents,
// for which you should be holding the lock.
auto it = data_.find(tag);
if (it == data_.end())
return nullptr;
- return boost::any_cast<T>(&it->second);
+ return std::any_cast<T>(&it->second);
}
+
template<typename T>
void SetLocked(std::string const &tag, T const &value)
{
// Use this only if you're holding the lock yourself.
data_[tag] = value;
}
+
// Note: use of (lowercase) lock and unlock means you can create scoped
// locks with the standard lock classes.
- // e.g. std::lock_guard<PisP::Metadata> lock(metadata)
+ // e.g. std::lock_guard<RPiController::Metadata> lock(metadata)
void lock() { mutex_.lock(); }
void unlock() { mutex_.unlock(); }
private:
mutable std::mutex mutex_;
- std::map<std::string, boost::any> data_;
+ std::map<std::string, std::any> data_;
};
typedef std::shared_ptr<Metadata> MetadataPtr;
diff --git a/src/ipa/raspberrypi/controller/pwl.cpp b/src/ipa/raspberrypi/controller/pwl.cpp
index aa134a1f..130c820b 100644
--- a/src/ipa/raspberrypi/controller/pwl.cpp
+++ b/src/ipa/raspberrypi/controller/pwl.cpp
@@ -114,6 +114,36 @@ Pwl::PerpType Pwl::Invert(Point const &xy, Point &perp, int &span,
return PerpType::None;
}
+Pwl Pwl::Inverse(bool *true_inverse, const double eps) const
+{
+ bool appended = false, prepended = false, neither = false;
+ Pwl inverse;
+
+ for (Point const &p : points_) {
+ if (inverse.Empty())
+ inverse.Append(p.y, p.x, eps);
+ else if (std::abs(inverse.points_.back().x - p.y) <= eps ||
+ std::abs(inverse.points_.front().x - p.y) <= eps)
+ /* do nothing */;
+ else if (p.y > inverse.points_.back().x) {
+ inverse.Append(p.y, p.x, eps);
+ appended = true;
+ } else if (p.y < inverse.points_.front().x) {
+ inverse.Prepend(p.y, p.x, eps);
+ prepended = true;
+ } else
+ neither = true;
+ }
+
+ // This is not a proper inverse if we found ourselves putting points
+ // onto both ends of the inverse, or if there were points that couldn't
+ // go on either.
+ if (true_inverse)
+ *true_inverse = !(neither || (appended && prepended));
+
+ return inverse;
+}
+
Pwl Pwl::Compose(Pwl const &other, const double eps) const
{
double this_x = points_[0].x, this_y = points_[0].y;
diff --git a/src/ipa/raspberrypi/controller/pwl.hpp b/src/ipa/raspberrypi/controller/pwl.hpp
index 4f168551..484672f6 100644
--- a/src/ipa/raspberrypi/controller/pwl.hpp
+++ b/src/ipa/raspberrypi/controller/pwl.hpp
@@ -80,6 +80,9 @@ public:
};
PerpType Invert(Point const &xy, Point &perp, int &span,
const double eps = 1e-6) const;
+ // Compute the inverse function. Indicate if it is a proper (true)
+ // inverse, or only a best effort (e.g. input was non-monotonic).
+ Pwl Inverse(bool *true_inverse = nullptr, const double eps = 1e-6) const;
// Compose two Pwls together, doing "this" first and "other" after.
Pwl Compose(Pwl const &other, const double eps = 1e-6) const;
// Apply function to (x,y) values at every control point.
diff --git a/src/ipa/raspberrypi/controller/rpi/agc.cpp b/src/ipa/raspberrypi/controller/rpi/agc.cpp
index df4d3647..f57783f8 100644
--- a/src/ipa/raspberrypi/controller/rpi/agc.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/agc.cpp
@@ -9,16 +9,21 @@
#include "linux/bcm2835-isp.h"
+#include <libcamera/base/log.h>
+
#include "../awb_status.h"
#include "../device_status.h"
#include "../histogram.hpp"
-#include "../logging.hpp"
#include "../lux_status.h"
#include "../metadata.hpp"
#include "agc.hpp"
using namespace RPiController;
+using namespace libcamera;
+using libcamera::utils::Duration;
+
+LOG_DEFINE_CATEGORY(RPiAgc)
#define NAME "rpi.agc"
@@ -51,19 +56,26 @@ read_metering_modes(std::map<std::string, AgcMeteringMode> &metering_modes,
return first;
}
-static int read_double_list(std::vector<double> &list,
- boost::property_tree::ptree const &params)
+static int read_list(std::vector<double> &list,
+ boost::property_tree::ptree const &params)
{
for (auto &p : params)
list.push_back(p.second.get_value<double>());
return list.size();
}
+static int read_list(std::vector<Duration> &list,
+ boost::property_tree::ptree const &params)
+{
+ for (auto &p : params)
+ list.push_back(p.second.get_value<double>() * 1us);
+ return list.size();
+}
+
void AgcExposureMode::Read(boost::property_tree::ptree const &params)
{
- int num_shutters =
- read_double_list(shutter, params.get_child("shutter"));
- int num_ags = read_double_list(gain, params.get_child("gain"));
+ int num_shutters = read_list(shutter, params.get_child("shutter"));
+ int num_ags = read_list(gain, params.get_child("gain"));
if (num_shutters < 2 || num_ags < 2)
throw std::runtime_error(
"AgcConfig: must have at least two entries in exposure profile");
@@ -128,7 +140,7 @@ static std::string read_constraint_modes(
void AgcConfig::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG("AgcConfig");
+ LOG(RPiAgc, Debug) << "AgcConfig";
default_metering_mode = read_metering_modes(
metering_modes, params.get_child("metering_modes"));
default_exposure_mode = read_exposure_modes(
@@ -138,25 +150,28 @@ void AgcConfig::Read(boost::property_tree::ptree const &params)
Y_target.Read(params.get_child("y_target"));
speed = params.get<double>("speed", 0.2);
startup_frames = params.get<uint16_t>("startup_frames", 10);
+ convergence_frames = params.get<unsigned int>("convergence_frames", 6);
fast_reduce_threshold =
params.get<double>("fast_reduce_threshold", 0.4);
base_ev = params.get<double>("base_ev", 1.0);
+ // Start with quite a low value as ramping up is easier than ramping down.
+ default_exposure_time = params.get<double>("default_exposure_time", 1000) * 1us;
+ default_analogue_gain = params.get<double>("default_analogue_gain", 1.0);
}
Agc::Agc(Controller *controller)
: AgcAlgorithm(controller), metering_mode_(nullptr),
exposure_mode_(nullptr), constraint_mode_(nullptr),
- frame_count_(0), lock_count_(0)
+ frame_count_(0), lock_count_(0),
+ last_target_exposure_(0s),
+ ev_(1.0), flicker_period_(0s),
+ max_shutter_(0s), fixed_shutter_(0s), fixed_analogue_gain_(0.0)
{
- ev_ = status_.ev = 1.0;
- flicker_period_ = status_.flicker_period = 0.0;
- fixed_shutter_ = status_.fixed_shutter = 0;
- fixed_analogue_gain_ = status_.fixed_analogue_gain = 0.0;
- // set to zero initially, so we can tell it's not been calculated
- status_.total_exposure_value = 0.0;
- status_.target_exposure_value = 0.0;
- status_.locked = false;
- output_status_ = status_;
+ memset(&awb_, 0, sizeof(awb_));
+ // Setting status_.total_exposure_value_ to zero initially tells us
+ // it's not been calculated yet (i.e. Process hasn't yet run).
+ memset(&status_, 0, sizeof(status_));
+ status_.ev = ev_;
}
char const *Agc::Name() const
@@ -166,7 +181,7 @@ char const *Agc::Name() const
void Agc::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG("Agc");
+ LOG(RPiAgc, Debug) << "Agc";
config_.Read(params);
// Set the config's defaults (which are the first ones it read) as our
// current modes, until someone changes them. (they're all known to
@@ -177,122 +192,155 @@ void Agc::Read(boost::property_tree::ptree const &params)
exposure_mode_ = &config_.exposure_modes[exposure_mode_name_];
constraint_mode_name_ = config_.default_constraint_mode;
constraint_mode_ = &config_.constraint_modes[constraint_mode_name_];
+ // Set up the "last shutter/gain" values, in case AGC starts "disabled".
+ status_.shutter_time = config_.default_exposure_time;
+ status_.analogue_gain = config_.default_analogue_gain;
+}
+
+bool Agc::IsPaused() const
+{
+ return false;
+}
+
+void Agc::Pause()
+{
+ fixed_shutter_ = status_.shutter_time;
+ fixed_analogue_gain_ = status_.analogue_gain;
+}
+
+void Agc::Resume()
+{
+ fixed_shutter_ = 0s;
+ fixed_analogue_gain_ = 0;
+}
+
+unsigned int Agc::GetConvergenceFrames() const
+{
+ // If shutter and gain have been explicitly set, there is no
+ // convergence to happen, so no need to drop any frames - return zero.
+ if (fixed_shutter_ && fixed_analogue_gain_)
+ return 0;
+ else
+ return config_.convergence_frames;
}
void Agc::SetEv(double ev)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
ev_ = ev;
}
-void Agc::SetFlickerPeriod(double flicker_period)
+void Agc::SetFlickerPeriod(Duration flicker_period)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
flicker_period_ = flicker_period;
}
-void Agc::SetFixedShutter(double fixed_shutter)
+void Agc::SetMaxShutter(Duration max_shutter)
+{
+ max_shutter_ = max_shutter;
+}
+
+void Agc::SetFixedShutter(Duration fixed_shutter)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
fixed_shutter_ = fixed_shutter;
+ // Set this in case someone calls Pause() straight after.
+ status_.shutter_time = clipShutter(fixed_shutter_);
}
void Agc::SetFixedAnalogueGain(double fixed_analogue_gain)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
fixed_analogue_gain_ = fixed_analogue_gain;
+ // Set this in case someone calls Pause() straight after.
+ status_.analogue_gain = fixed_analogue_gain;
}
void Agc::SetMeteringMode(std::string const &metering_mode_name)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
metering_mode_name_ = metering_mode_name;
}
void Agc::SetExposureMode(std::string const &exposure_mode_name)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
exposure_mode_name_ = exposure_mode_name;
}
void Agc::SetConstraintMode(std::string const &constraint_mode_name)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
constraint_mode_name_ = constraint_mode_name;
}
void Agc::SwitchMode([[maybe_unused]] CameraMode const &camera_mode,
Metadata *metadata)
{
- // On a mode switch, it's possible the exposure profile could change,
- // so we run through the dividing up of exposure/gain again and
- // write the results into the metadata we've been given.
- if (status_.total_exposure_value) {
- housekeepConfig();
- divvyupExposure();
- writeAndFinish(metadata, false);
+ housekeepConfig();
+
+ Duration fixed_shutter = clipShutter(fixed_shutter_);
+ if (fixed_shutter && fixed_analogue_gain_) {
+ // We're going to reset the algorithm here with these fixed values.
+
+ fetchAwbStatus(metadata);
+ double min_colour_gain = std::min({ awb_.gain_r, awb_.gain_g, awb_.gain_b, 1.0 });
+ ASSERT(min_colour_gain != 0.0);
+
+ // This is the equivalent of computeTargetExposure and applyDigitalGain.
+ target_.total_exposure_no_dg = fixed_shutter * fixed_analogue_gain_;
+ target_.total_exposure = target_.total_exposure_no_dg / min_colour_gain;
+
+ // Equivalent of filterExposure. This resets any "history".
+ filtered_ = target_;
+
+ // Equivalent of divideUpExposure.
+ filtered_.shutter = fixed_shutter;
+ filtered_.analogue_gain = fixed_analogue_gain_;
+ } else if (status_.total_exposure_value) {
+ // On a mode switch, it's possible the exposure profile could change,
+ // or a fixed exposure/gain might be set so we divide up the exposure/
+ // gain again, but we don't change any target values.
+ divideUpExposure();
+ } else {
+ // We come through here on startup, when at least one of the shutter
+ // or gain has not been fixed. We must still write those values out so
+ // that they will be applied immediately. We supply some arbitrary defaults
+ // for any that weren't set.
+
+ // Equivalent of divideUpExposure.
+ filtered_.shutter = fixed_shutter ? fixed_shutter : config_.default_exposure_time;
+ filtered_.analogue_gain = fixed_analogue_gain_ ? fixed_analogue_gain_ : config_.default_analogue_gain;
}
+
+ writeAndFinish(metadata, false);
}
void Agc::Prepare(Metadata *image_metadata)
{
- AgcStatus status;
- {
- std::unique_lock<std::mutex> lock(output_mutex_);
- status = output_status_;
- }
- int lock_count = lock_count_;
- lock_count_ = 0;
- status.digital_gain = 1.0;
+ status_.digital_gain = 1.0;
+ fetchAwbStatus(image_metadata); // always fetch it so that Process knows it's been done
+
if (status_.total_exposure_value) {
// Process has run, so we have meaningful values.
DeviceStatus device_status;
if (image_metadata->Get("device.status", device_status) == 0) {
- double actual_exposure = device_status.shutter_speed *
- device_status.analogue_gain;
+ Duration actual_exposure = device_status.shutter_speed *
+ device_status.analogue_gain;
if (actual_exposure) {
- status.digital_gain =
+ status_.digital_gain =
status_.total_exposure_value /
actual_exposure;
- RPI_LOG("Want total exposure " << status_.total_exposure_value);
+ LOG(RPiAgc, Debug) << "Want total exposure " << status_.total_exposure_value;
// Never ask for a gain < 1.0, and also impose
// some upper limit. Make it customisable?
- status.digital_gain = std::max(
+ status_.digital_gain = std::max(
1.0,
- std::min(status.digital_gain, 4.0));
- RPI_LOG("Actual exposure " << actual_exposure);
- RPI_LOG("Use digital_gain " << status.digital_gain);
- RPI_LOG("Effective exposure " << actual_exposure * status.digital_gain);
+ std::min(status_.digital_gain, 4.0));
+ LOG(RPiAgc, Debug) << "Actual exposure " << actual_exposure;
+ LOG(RPiAgc, Debug) << "Use digital_gain " << status_.digital_gain;
+ LOG(RPiAgc, Debug) << "Effective exposure "
+ << actual_exposure * status_.digital_gain;
// Decide whether AEC/AGC has converged.
- // Insist AGC is steady for MAX_LOCK_COUNT
- // frames before we say we are "locked".
- // (The hard-coded constants may need to
- // become customisable.)
- if (status.target_exposure_value) {
-#define MAX_LOCK_COUNT 3
- double err = 0.10 * status.target_exposure_value + 200;
- if (actual_exposure <
- status.target_exposure_value + err
- && actual_exposure >
- status.target_exposure_value - err)
- lock_count_ =
- std::min(lock_count + 1,
- MAX_LOCK_COUNT);
- else if (actual_exposure <
- status.target_exposure_value
- + 1.5 * err &&
- actual_exposure >
- status.target_exposure_value
- - 1.5 * err)
- lock_count_ = lock_count;
- RPI_LOG("Lock count: " << lock_count_);
- }
+ updateLockStatus(device_status);
}
} else
- RPI_LOG(Name() << ": no device metadata");
- status.locked = lock_count_ >= MAX_LOCK_COUNT;
- //printf("%s\n", status.locked ? "+++++++++" : "-");
- image_metadata->Set("agc.status", status);
+ LOG(RPiAgc, Warning) << Name() << ": no device metadata";
+ image_metadata->Set("agc.status", status_);
}
}
@@ -312,16 +360,53 @@ void Agc::Process(StatisticsPtr &stats, Metadata *image_metadata)
// Some of the exposure has to be applied as digital gain, so work out
// what that is. This function also tells us whether it's decided to
// "desaturate" the image more quickly.
- bool desaturate = applyDigitalGain(image_metadata, gain, target_Y);
+ bool desaturate = applyDigitalGain(gain, target_Y);
// The results have to be filtered so as not to change too rapidly.
filterExposure(desaturate);
- // The last thing is to divvy up the exposure value into a shutter time
+ // The last thing is to divide up the exposure value into a shutter time
// and analogue_gain, according to the current exposure mode.
- divvyupExposure();
+ divideUpExposure();
// Finally advertise what we've done.
writeAndFinish(image_metadata, desaturate);
}
+void Agc::updateLockStatus(DeviceStatus const &device_status)
+{
+ const double ERROR_FACTOR = 0.10; // make these customisable?
+ const int MAX_LOCK_COUNT = 5;
+ // Reset "lock count" when we exceed this multiple of ERROR_FACTOR
+ const double RESET_MARGIN = 1.5;
+
+ // Add 200us to the exposure time error to allow for line quantisation.
+ Duration exposure_error = last_device_status_.shutter_speed * ERROR_FACTOR + 200us;
+ double gain_error = last_device_status_.analogue_gain * ERROR_FACTOR;
+ Duration target_error = last_target_exposure_ * ERROR_FACTOR;
+
+ // Note that we don't know the exposure/gain limits of the sensor, so
+ // the values we keep requesting may be unachievable. For this reason
+ // we only insist that we're close to values in the past few frames.
+ if (device_status.shutter_speed > last_device_status_.shutter_speed - exposure_error &&
+ device_status.shutter_speed < last_device_status_.shutter_speed + exposure_error &&
+ device_status.analogue_gain > last_device_status_.analogue_gain - gain_error &&
+ device_status.analogue_gain < last_device_status_.analogue_gain + gain_error &&
+ status_.target_exposure_value > last_target_exposure_ - target_error &&
+ status_.target_exposure_value < last_target_exposure_ + target_error)
+ lock_count_ = std::min(lock_count_ + 1, MAX_LOCK_COUNT);
+ else if (device_status.shutter_speed < last_device_status_.shutter_speed - RESET_MARGIN * exposure_error ||
+ device_status.shutter_speed > last_device_status_.shutter_speed + RESET_MARGIN * exposure_error ||
+ device_status.analogue_gain < last_device_status_.analogue_gain - RESET_MARGIN * gain_error ||
+ device_status.analogue_gain > last_device_status_.analogue_gain + RESET_MARGIN * gain_error ||
+ status_.target_exposure_value < last_target_exposure_ - RESET_MARGIN * target_error ||
+ status_.target_exposure_value > last_target_exposure_ + RESET_MARGIN * target_error)
+ lock_count_ = 0;
+
+ last_device_status_ = device_status;
+ last_target_exposure_ = status_.target_exposure_value;
+
+ LOG(RPiAgc, Debug) << "Lock count updated to " << lock_count_;
+ status_.locked = lock_count_ == MAX_LOCK_COUNT;
+}
+
static void copy_string(std::string const &s, char *d, size_t size)
{
size_t length = s.copy(d, size - 1);
@@ -331,55 +416,47 @@ static void copy_string(std::string const &s, char *d, size_t size)
void Agc::housekeepConfig()
{
// First fetch all the up-to-date settings, so no one else has to do it.
- std::string new_exposure_mode_name, new_constraint_mode_name,
- new_metering_mode_name;
- {
- std::unique_lock<std::mutex> lock(settings_mutex_);
- new_metering_mode_name = metering_mode_name_;
- new_exposure_mode_name = exposure_mode_name_;
- new_constraint_mode_name = constraint_mode_name_;
- status_.ev = ev_;
- status_.fixed_shutter = fixed_shutter_;
- status_.fixed_analogue_gain = fixed_analogue_gain_;
- status_.flicker_period = flicker_period_;
- }
- RPI_LOG("ev " << status_.ev << " fixed_shutter "
- << status_.fixed_shutter << " fixed_analogue_gain "
- << status_.fixed_analogue_gain);
+ status_.ev = ev_;
+ status_.fixed_shutter = clipShutter(fixed_shutter_);
+ status_.fixed_analogue_gain = fixed_analogue_gain_;
+ status_.flicker_period = flicker_period_;
+ LOG(RPiAgc, Debug) << "ev " << status_.ev << " fixed_shutter "
+ << status_.fixed_shutter << " fixed_analogue_gain "
+ << status_.fixed_analogue_gain;
// Make sure the "mode" pointers point to the up-to-date things, if
// they've changed.
- if (strcmp(new_metering_mode_name.c_str(), status_.metering_mode)) {
- auto it = config_.metering_modes.find(new_metering_mode_name);
+ if (strcmp(metering_mode_name_.c_str(), status_.metering_mode)) {
+ auto it = config_.metering_modes.find(metering_mode_name_);
if (it == config_.metering_modes.end())
throw std::runtime_error("Agc: no metering mode " +
- new_metering_mode_name);
+ metering_mode_name_);
metering_mode_ = &it->second;
- copy_string(new_metering_mode_name, status_.metering_mode,
+ copy_string(metering_mode_name_, status_.metering_mode,
sizeof(status_.metering_mode));
}
- if (strcmp(new_exposure_mode_name.c_str(), status_.exposure_mode)) {
- auto it = config_.exposure_modes.find(new_exposure_mode_name);
+ if (strcmp(exposure_mode_name_.c_str(), status_.exposure_mode)) {
+ auto it = config_.exposure_modes.find(exposure_mode_name_);
if (it == config_.exposure_modes.end())
throw std::runtime_error("Agc: no exposure profile " +
- new_exposure_mode_name);
+ exposure_mode_name_);
exposure_mode_ = &it->second;
- copy_string(new_exposure_mode_name, status_.exposure_mode,
+ copy_string(exposure_mode_name_, status_.exposure_mode,
sizeof(status_.exposure_mode));
}
- if (strcmp(new_constraint_mode_name.c_str(), status_.constraint_mode)) {
+ if (strcmp(constraint_mode_name_.c_str(), status_.constraint_mode)) {
auto it =
- config_.constraint_modes.find(new_constraint_mode_name);
+ config_.constraint_modes.find(constraint_mode_name_);
if (it == config_.constraint_modes.end())
throw std::runtime_error("Agc: no constraint list " +
- new_constraint_mode_name);
+ constraint_mode_name_);
constraint_mode_ = &it->second;
- copy_string(new_constraint_mode_name, status_.constraint_mode,
+ copy_string(constraint_mode_name_, status_.constraint_mode,
sizeof(status_.constraint_mode));
}
- RPI_LOG("exposure_mode "
- << new_exposure_mode_name << " constraint_mode "
- << new_constraint_mode_name << " metering_mode "
- << new_metering_mode_name);
+ LOG(RPiAgc, Debug) << "exposure_mode "
+ << exposure_mode_name_ << " constraint_mode "
+ << constraint_mode_name_ << " metering_mode "
+ << metering_mode_name_;
}
void Agc::fetchCurrentExposure(Metadata *image_metadata)
@@ -393,30 +470,44 @@ void Agc::fetchCurrentExposure(Metadata *image_metadata)
current_.analogue_gain = device_status->analogue_gain;
AgcStatus *agc_status =
image_metadata->GetLocked<AgcStatus>("agc.status");
- current_.total_exposure = agc_status ? agc_status->total_exposure_value : 0;
+ current_.total_exposure = agc_status ? agc_status->total_exposure_value : 0s;
current_.total_exposure_no_dg = current_.shutter * current_.analogue_gain;
}
-static double compute_initial_Y(bcm2835_isp_stats *stats, Metadata *image_metadata,
- double weights[])
+void Agc::fetchAwbStatus(Metadata *image_metadata)
+{
+ awb_.gain_r = 1.0; // in case not found in metadata
+ awb_.gain_g = 1.0;
+ awb_.gain_b = 1.0;
+ if (image_metadata->Get("awb.status", awb_) != 0)
+ LOG(RPiAgc, Debug) << "Agc: no AWB status found";
+}
+
+static double compute_initial_Y(bcm2835_isp_stats *stats, AwbStatus const &awb,
+ double weights[], double gain)
{
bcm2835_isp_stats_region *regions = stats->agc_stats;
- struct AwbStatus awb;
- awb.gain_r = awb.gain_g = awb.gain_b = 1.0; // in case no metadata
- if (image_metadata->Get("awb.status", awb) != 0)
- RPI_WARN("Agc: no AWB status found");
- double Y_sum = 0, weight_sum = 0;
+ // Note how the calculation below means that equal weights give you
+ // "average" metering (i.e. all pixels equally important).
+ double R_sum = 0, G_sum = 0, B_sum = 0, pixel_sum = 0;
for (int i = 0; i < AGC_STATS_SIZE; i++) {
- if (regions[i].counted == 0)
- continue;
- weight_sum += weights[i];
- double Y = regions[i].r_sum * awb.gain_r * .299 +
- regions[i].g_sum * awb.gain_g * .587 +
- regions[i].b_sum * awb.gain_b * .114;
- Y /= regions[i].counted;
- Y_sum += Y * weights[i];
+ double counted = regions[i].counted;
+ double r_sum = std::min(regions[i].r_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
+ double g_sum = std::min(regions[i].g_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
+ double b_sum = std::min(regions[i].b_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
+ R_sum += r_sum * weights[i];
+ G_sum += g_sum * weights[i];
+ B_sum += b_sum * weights[i];
+ pixel_sum += counted * weights[i];
}
- return Y_sum / weight_sum / (1 << PIPELINE_BITS);
+ if (pixel_sum == 0.0) {
+ LOG(RPiAgc, Warning) << "compute_initial_Y: pixel_sum is zero";
+ return 0;
+ }
+ double Y_sum = R_sum * awb.gain_r * .299 +
+ G_sum * awb.gain_g * .587 +
+ B_sum * awb.gain_b * .114;
+ return Y_sum / pixel_sum / (1 << PIPELINE_BITS);
}
// We handle extra gain through EV by adjusting our Y targets. However, you
@@ -443,7 +534,7 @@ void Agc::computeGain(bcm2835_isp_stats *statistics, Metadata *image_metadata,
struct LuxStatus lux = {};
lux.lux = 400; // default lux level to 400 in case no metadata found
if (image_metadata->Get("lux.status", lux) != 0)
- RPI_WARN("Agc: no lux level found");
+ LOG(RPiAgc, Warning) << "Agc: no lux level found";
Histogram h(statistics->hist[0].g_hist, NUM_HISTOGRAM_BINS);
double ev_gain = status_.ev * config_.base_ev;
// The initial gain and target_Y come from some of the regions. After
@@ -451,67 +542,84 @@ void Agc::computeGain(bcm2835_isp_stats *statistics, Metadata *image_metadata,
target_Y =
config_.Y_target.Eval(config_.Y_target.Domain().Clip(lux.lux));
target_Y = std::min(EV_GAIN_Y_TARGET_LIMIT, target_Y * ev_gain);
- double initial_Y = compute_initial_Y(statistics, image_metadata,
- metering_mode_->weights);
- gain = std::min(10.0, target_Y / (initial_Y + .001));
- RPI_LOG("Initially Y " << initial_Y << " target " << target_Y
- << " gives gain " << gain);
+
+ // Do this calculation a few times as brightness increase can be
+ // non-linear when there are saturated regions.
+ gain = 1.0;
+ for (int i = 0; i < 8; i++) {
+ double initial_Y = compute_initial_Y(statistics, awb_,
+ metering_mode_->weights, gain);
+ double extra_gain = std::min(10.0, target_Y / (initial_Y + .001));
+ gain *= extra_gain;
+ LOG(RPiAgc, Debug) << "Initial Y " << initial_Y << " target " << target_Y
+ << " gives gain " << gain;
+ if (extra_gain < 1.01) // close enough
+ break;
+ }
+
for (auto &c : *constraint_mode_) {
double new_target_Y;
double new_gain =
constraint_compute_gain(c, h, lux.lux, ev_gain,
new_target_Y);
- RPI_LOG("Constraint has target_Y "
- << new_target_Y << " giving gain " << new_gain);
+ LOG(RPiAgc, Debug) << "Constraint has target_Y "
+ << new_target_Y << " giving gain " << new_gain;
if (c.bound == AgcConstraint::Bound::LOWER &&
new_gain > gain) {
- RPI_LOG("Lower bound constraint adopted");
+ LOG(RPiAgc, Debug) << "Lower bound constraint adopted";
gain = new_gain, target_Y = new_target_Y;
} else if (c.bound == AgcConstraint::Bound::UPPER &&
new_gain < gain) {
- RPI_LOG("Upper bound constraint adopted");
+ LOG(RPiAgc, Debug) << "Upper bound constraint adopted";
gain = new_gain, target_Y = new_target_Y;
}
}
- RPI_LOG("Final gain " << gain << " (target_Y " << target_Y << " ev "
- << status_.ev << " base_ev " << config_.base_ev
- << ")");
+ LOG(RPiAgc, Debug) << "Final gain " << gain << " (target_Y " << target_Y << " ev "
+ << status_.ev << " base_ev " << config_.base_ev
+ << ")";
}
void Agc::computeTargetExposure(double gain)
{
- // The statistics reflect the image without digital gain, so the final
- // total exposure we're aiming for is:
- target_.total_exposure = current_.total_exposure_no_dg * gain;
- // The final target exposure is also limited to what the exposure
- // mode allows.
- double max_total_exposure =
- (status_.fixed_shutter != 0.0
- ? status_.fixed_shutter
- : exposure_mode_->shutter.back()) *
- (status_.fixed_analogue_gain != 0.0
- ? status_.fixed_analogue_gain
- : exposure_mode_->gain.back());
- target_.total_exposure = std::min(target_.total_exposure,
- max_total_exposure);
- RPI_LOG("Target total_exposure " << target_.total_exposure);
-}
-
-bool Agc::applyDigitalGain(Metadata *image_metadata, double gain,
- double target_Y)
-{
- double dg = 1.0;
+ if (status_.fixed_shutter && status_.fixed_analogue_gain) {
+ // When ag and shutter are both fixed, we need to drive the
+ // total exposure so that we end up with a digital gain of at least
+ // 1/min_colour_gain. Otherwise we'd desaturate channels causing
+ // white to go cyan or magenta.
+ double min_colour_gain = std::min({ awb_.gain_r, awb_.gain_g, awb_.gain_b, 1.0 });
+ ASSERT(min_colour_gain != 0.0);
+ target_.total_exposure =
+ status_.fixed_shutter * status_.fixed_analogue_gain / min_colour_gain;
+ } else {
+ // The statistics reflect the image without digital gain, so the final
+ // total exposure we're aiming for is:
+ target_.total_exposure = current_.total_exposure_no_dg * gain;
+ // The final target exposure is also limited to what the exposure
+ // mode allows.
+ Duration max_shutter = status_.fixed_shutter
+ ? status_.fixed_shutter
+ : exposure_mode_->shutter.back();
+ max_shutter = clipShutter(max_shutter);
+ Duration max_total_exposure =
+ max_shutter *
+ (status_.fixed_analogue_gain != 0.0
+ ? status_.fixed_analogue_gain
+ : exposure_mode_->gain.back());
+ target_.total_exposure = std::min(target_.total_exposure,
+ max_total_exposure);
+ }
+ LOG(RPiAgc, Debug) << "Target total_exposure " << target_.total_exposure;
+}
+
+bool Agc::applyDigitalGain(double gain, double target_Y)
+{
+ double min_colour_gain = std::min({ awb_.gain_r, awb_.gain_g, awb_.gain_b, 1.0 });
+ ASSERT(min_colour_gain != 0.0);
+ double dg = 1.0 / min_colour_gain;
// I think this pipeline subtracts black level and rescales before we
// get the stats, so no need to worry about it.
- struct AwbStatus awb;
- if (image_metadata->Get("awb.status", awb) == 0) {
- double min_gain = std::min(awb.gain_r,
- std::min(awb.gain_g, awb.gain_b));
- dg *= std::max(1.0, 1.0 / min_gain);
- } else
- RPI_WARN("Agc: no AWB status found");
- RPI_LOG("after AWB, target dg " << dg << " gain " << gain
- << " target_Y " << target_Y);
+ LOG(RPiAgc, Debug) << "after AWB, target dg " << dg << " gain " << gain
+ << " target_Y " << target_Y;
// Finally, if we're trying to reduce exposure but the target_Y is
// "close" to 1.0, then the gain computed for that constraint will be
// only slightly less than one, because the measured Y can never be
@@ -523,16 +631,21 @@ bool Agc::applyDigitalGain(Metadata *image_metadata, double gain,
gain < sqrt(target_Y);
if (desaturate)
dg /= config_.fast_reduce_threshold;
- RPI_LOG("Digital gain " << dg << " desaturate? " << desaturate);
+ LOG(RPiAgc, Debug) << "Digital gain " << dg << " desaturate? " << desaturate;
target_.total_exposure_no_dg = target_.total_exposure / dg;
- RPI_LOG("Target total_exposure_no_dg " << target_.total_exposure_no_dg);
+ LOG(RPiAgc, Debug) << "Target total_exposure_no_dg " << target_.total_exposure_no_dg;
return desaturate;
}
void Agc::filterExposure(bool desaturate)
{
- double speed = frame_count_ <= config_.startup_frames ? 1.0 : config_.speed;
- if (filtered_.total_exposure == 0.0) {
+ double speed = config_.speed;
+ // AGC adapts instantly if both shutter and gain are directly specified
+ // or we're in the startup phase.
+ if ((status_.fixed_shutter && status_.fixed_analogue_gain) ||
+ frame_count_ <= config_.startup_frames)
+ speed = 1.0;
+ if (!filtered_.total_exposure) {
filtered_.total_exposure = target_.total_exposure;
filtered_.total_exposure_no_dg = target_.total_exposure_no_dg;
} else {
@@ -560,35 +673,38 @@ void Agc::filterExposure(bool desaturate)
filtered_.total_exposure * config_.fast_reduce_threshold)
filtered_.total_exposure_no_dg = filtered_.total_exposure *
config_.fast_reduce_threshold;
- RPI_LOG("After filtering, total_exposure " << filtered_.total_exposure <<
- " no dg " << filtered_.total_exposure_no_dg);
+ LOG(RPiAgc, Debug) << "After filtering, total_exposure " << filtered_.total_exposure
+ << " no dg " << filtered_.total_exposure_no_dg;
}
-void Agc::divvyupExposure()
+void Agc::divideUpExposure()
{
// Sending the fixed shutter/gain cases through the same code may seem
// unnecessary, but it will make more sense when extend this to cover
// variable aperture.
- double exposure_value = filtered_.total_exposure_no_dg;
- double shutter_time, analogue_gain;
- shutter_time = status_.fixed_shutter != 0.0
+ Duration exposure_value = filtered_.total_exposure_no_dg;
+ Duration shutter_time;
+ double analogue_gain;
+ shutter_time = status_.fixed_shutter
? status_.fixed_shutter
: exposure_mode_->shutter[0];
+ shutter_time = clipShutter(shutter_time);
analogue_gain = status_.fixed_analogue_gain != 0.0
? status_.fixed_analogue_gain
: exposure_mode_->gain[0];
if (shutter_time * analogue_gain < exposure_value) {
for (unsigned int stage = 1;
stage < exposure_mode_->gain.size(); stage++) {
- if (status_.fixed_shutter == 0.0) {
- if (exposure_mode_->shutter[stage] *
- analogue_gain >=
+ if (!status_.fixed_shutter) {
+ Duration stage_shutter =
+ clipShutter(exposure_mode_->shutter[stage]);
+ if (stage_shutter * analogue_gain >=
exposure_value) {
shutter_time =
exposure_value / analogue_gain;
break;
}
- shutter_time = exposure_mode_->shutter[stage];
+ shutter_time = stage_shutter;
}
if (status_.fixed_analogue_gain == 0.0) {
if (exposure_mode_->gain[stage] *
@@ -602,16 +718,15 @@ void Agc::divvyupExposure()
}
}
}
- RPI_LOG("Divided up shutter and gain are " << shutter_time << " and "
- << analogue_gain);
+ LOG(RPiAgc, Debug) << "Divided up shutter and gain are " << shutter_time << " and "
+ << analogue_gain;
// Finally adjust shutter time for flicker avoidance (require both
// shutter and gain not to be fixed).
- if (status_.fixed_shutter == 0.0 &&
- status_.fixed_analogue_gain == 0.0 &&
- status_.flicker_period != 0.0) {
+ if (!status_.fixed_shutter && !status_.fixed_analogue_gain &&
+ status_.flicker_period) {
int flicker_periods = shutter_time / status_.flicker_period;
- if (flicker_periods > 0) {
- double new_shutter_time = flicker_periods * status_.flicker_period;
+ if (flicker_periods) {
+ Duration new_shutter_time = flicker_periods * status_.flicker_period;
analogue_gain *= shutter_time / new_shutter_time;
// We should still not allow the ag to go over the
// largest value in the exposure mode. Note that this
@@ -621,8 +736,8 @@ void Agc::divvyupExposure()
exposure_mode_->gain.back());
shutter_time = new_shutter_time;
}
- RPI_LOG("After flicker avoidance, shutter "
- << shutter_time << " gain " << analogue_gain);
+ LOG(RPiAgc, Debug) << "After flicker avoidance, shutter "
+ << shutter_time << " gain " << analogue_gain;
}
filtered_.shutter = shutter_time;
filtered_.analogue_gain = analogue_gain;
@@ -631,20 +746,23 @@ void Agc::divvyupExposure()
void Agc::writeAndFinish(Metadata *image_metadata, bool desaturate)
{
status_.total_exposure_value = filtered_.total_exposure;
- status_.target_exposure_value = desaturate ? 0 : target_.total_exposure_no_dg;
+ status_.target_exposure_value = desaturate ? 0s : target_.total_exposure_no_dg;
status_.shutter_time = filtered_.shutter;
status_.analogue_gain = filtered_.analogue_gain;
- {
- std::unique_lock<std::mutex> lock(output_mutex_);
- output_status_ = status_;
- }
// Write to metadata as well, in case anyone wants to update the camera
// immediately.
image_metadata->Set("agc.status", status_);
- RPI_LOG("Output written, total exposure requested is "
- << filtered_.total_exposure);
- RPI_LOG("Camera exposure update: shutter time " << filtered_.shutter <<
- " analogue gain " << filtered_.analogue_gain);
+ LOG(RPiAgc, Debug) << "Output written, total exposure requested is "
+ << filtered_.total_exposure;
+ LOG(RPiAgc, Debug) << "Camera exposure update: shutter time " << filtered_.shutter
+ << " analogue gain " << filtered_.analogue_gain;
+}
+
+Duration Agc::clipShutter(Duration shutter)
+{
+ if (max_shutter_)
+ shutter = std::min(shutter, max_shutter_);
+ return shutter;
}
// Register algorithm with the system.
diff --git a/src/ipa/raspberrypi/controller/rpi/agc.hpp b/src/ipa/raspberrypi/controller/rpi/agc.hpp
index ba7ae092..85067dc6 100644
--- a/src/ipa/raspberrypi/controller/rpi/agc.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/agc.hpp
@@ -9,6 +9,8 @@
#include <vector>
#include <mutex>
+#include <libcamera/base/utils.h>
+
#include "../agc_algorithm.hpp"
#include "../agc_status.h"
#include "../pwl.hpp"
@@ -22,13 +24,15 @@
namespace RPiController {
+using namespace std::literals::chrono_literals;
+
struct AgcMeteringMode {
double weights[AGC_STATS_SIZE];
void Read(boost::property_tree::ptree const &params);
};
struct AgcExposureMode {
- std::vector<double> shutter;
+ std::vector<libcamera::utils::Duration> shutter;
std::vector<double> gain;
void Read(boost::property_tree::ptree const &params);
};
@@ -52,6 +56,7 @@ struct AgcConfig {
Pwl Y_target;
double speed;
uint16_t startup_frames;
+ unsigned int convergence_frames;
double max_change;
double min_change;
double fast_reduce_threshold;
@@ -60,6 +65,8 @@ struct AgcConfig {
std::string default_exposure_mode;
std::string default_constraint_mode;
double base_ev;
+ libcamera::utils::Duration default_exposure_time;
+ double default_analogue_gain;
};
class Agc : public AgcAlgorithm
@@ -68,9 +75,15 @@ public:
Agc(Controller *controller);
char const *Name() const override;
void Read(boost::property_tree::ptree const &params) override;
+ // AGC handles "pausing" for itself.
+ bool IsPaused() const override;
+ void Pause() override;
+ void Resume() override;
+ unsigned int GetConvergenceFrames() const override;
void SetEv(double ev) override;
- void SetFlickerPeriod(double flicker_period) override;
- void SetFixedShutter(double fixed_shutter) override; // microseconds
+ void SetFlickerPeriod(libcamera::utils::Duration flicker_period) override;
+ void SetMaxShutter(libcamera::utils::Duration max_shutter) override;
+ void SetFixedShutter(libcamera::utils::Duration fixed_shutter) override;
void SetFixedAnalogueGain(double fixed_analogue_gain) override;
void SetMeteringMode(std::string const &metering_mode_name) override;
void SetExposureMode(std::string const &exposure_mode_name) override;
@@ -80,44 +93,47 @@ public:
void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
private:
+ void updateLockStatus(DeviceStatus const &device_status);
AgcConfig config_;
void housekeepConfig();
void fetchCurrentExposure(Metadata *image_metadata);
+ void fetchAwbStatus(Metadata *image_metadata);
void computeGain(bcm2835_isp_stats *statistics, Metadata *image_metadata,
double &gain, double &target_Y);
void computeTargetExposure(double gain);
- bool applyDigitalGain(Metadata *image_metadata, double gain,
- double target_Y);
+ bool applyDigitalGain(double gain, double target_Y);
void filterExposure(bool desaturate);
- void divvyupExposure();
+ void divideUpExposure();
void writeAndFinish(Metadata *image_metadata, bool desaturate);
+ libcamera::utils::Duration clipShutter(libcamera::utils::Duration shutter);
AgcMeteringMode *metering_mode_;
AgcExposureMode *exposure_mode_;
AgcConstraintMode *constraint_mode_;
uint64_t frame_count_;
+ AwbStatus awb_;
struct ExposureValues {
- ExposureValues() : shutter(0), analogue_gain(0),
- total_exposure(0), total_exposure_no_dg(0) {}
- double shutter;
+ ExposureValues() : shutter(0s), analogue_gain(0),
+ total_exposure(0s), total_exposure_no_dg(0s) {}
+ libcamera::utils::Duration shutter;
double analogue_gain;
- double total_exposure;
- double total_exposure_no_dg; // without digital gain
+ libcamera::utils::Duration total_exposure;
+ libcamera::utils::Duration total_exposure_no_dg; // without digital gain
};
ExposureValues current_; // values for the current frame
ExposureValues target_; // calculate the values we want here
ExposureValues filtered_; // these values are filtered towards target
- AgcStatus status_; // to "latch" settings so they can't change
- AgcStatus output_status_; // the status we will write out
- std::mutex output_mutex_;
+ AgcStatus status_;
int lock_count_;
+ DeviceStatus last_device_status_;
+ libcamera::utils::Duration last_target_exposure_;
// Below here the "settings" that applications can change.
- std::mutex settings_mutex_;
std::string metering_mode_name_;
std::string exposure_mode_name_;
std::string constraint_mode_name_;
double ev_;
- double flicker_period_;
- double fixed_shutter_;
+ libcamera::utils::Duration flicker_period_;
+ libcamera::utils::Duration max_shutter_;
+ libcamera::utils::Duration fixed_shutter_;
double fixed_analogue_gain_;
};
diff --git a/src/ipa/raspberrypi/controller/rpi/alsc.cpp b/src/ipa/raspberrypi/controller/rpi/alsc.cpp
index 42fbc8a4..be3d1ae4 100644
--- a/src/ipa/raspberrypi/controller/rpi/alsc.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/alsc.cpp
@@ -6,12 +6,17 @@
*/
#include <math.h>
+#include <libcamera/base/log.h>
+
#include "../awb_status.h"
#include "alsc.hpp"
// Raspberry Pi ALSC (Auto Lens Shading Correction) algorithm.
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiAlsc)
#define NAME "rpi.alsc"
@@ -110,15 +115,14 @@ static void read_calibrations(std::vector<AlscCalibration> &calibrations,
"Alsc: too few values for ct " +
std::to_string(ct) + " in " + name);
calibrations.push_back(calibration);
- RPI_LOG("Read " << name << " calibration for ct "
- << ct);
+ LOG(RPiAlsc, Debug)
+ << "Read " << name << " calibration for ct " << ct;
}
}
}
void Alsc::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG("Alsc");
config_.frame_period = params.get<uint16_t>("frame_period", 12);
config_.startup_frames = params.get<uint16_t>("startup_frames", 10);
config_.speed = params.get<double>("speed", 0.05);
@@ -139,13 +143,15 @@ void Alsc::Read(boost::property_tree::ptree const &params)
read_lut(config_.luminance_lut,
params.get_child("luminance_lut"));
else
- RPI_WARN("Alsc: no luminance table - assume unity everywhere");
+ LOG(RPiAlsc, Warning)
+ << "no luminance table - assume unity everywhere";
read_calibrations(config_.calibrations_Cr, params, "calibrations_Cr");
read_calibrations(config_.calibrations_Cb, params, "calibrations_Cb");
config_.default_ct = params.get<double>("default_ct", 4500.0);
config_.threshold = params.get<double>("threshold", 1e-3);
}
+static double get_ct(Metadata *metadata, double default_ct);
static void get_cal_table(double ct,
std::vector<AlscCalibration> const &calibrations,
double cal_table[XY]);
@@ -163,7 +169,6 @@ static void add_luminance_to_tables(double results[3][Y][X],
void Alsc::Initialise()
{
- RPI_LOG("Alsc");
frame_count2_ = frame_count_ = frame_phase_ = 0;
first_time_ = true;
ct_ = config_.default_ct;
@@ -210,6 +215,9 @@ void Alsc::SwitchMode(CameraMode const &camera_mode,
// change.
bool reset_tables = first_time_ || compare_modes(camera_mode_, camera_mode);
+ // Believe the colour temperature from the AWB, if there is one.
+ ct_ = get_ct(metadata, ct_);
+
// Ensure the other thread isn't running while we do this.
waitForAysncThread();
@@ -248,22 +256,22 @@ void Alsc::SwitchMode(CameraMode const &camera_mode,
void Alsc::fetchAsyncResults()
{
- RPI_LOG("Fetch ALSC results");
+ LOG(RPiAlsc, Debug) << "Fetch ALSC results";
async_finished_ = false;
async_started_ = false;
memcpy(sync_results_, async_results_, sizeof(sync_results_));
}
-static double get_ct(Metadata *metadata, double default_ct)
+double get_ct(Metadata *metadata, double default_ct)
{
AwbStatus awb_status;
awb_status.temperature_K = default_ct; // in case nothing found
if (metadata->Get("awb.status", awb_status) != 0)
- RPI_WARN("Alsc: no AWB results found, using "
- << awb_status.temperature_K);
+ LOG(RPiAlsc, Debug) << "no AWB results found, using "
+ << awb_status.temperature_K;
else
- RPI_LOG("Alsc: AWB results found, using "
- << awb_status.temperature_K);
+ LOG(RPiAlsc, Debug) << "AWB results found, using "
+ << awb_status.temperature_K;
return awb_status.temperature_K;
}
@@ -285,7 +293,7 @@ static void copy_stats(bcm2835_isp_stats_region regions[XY], StatisticsPtr &stat
void Alsc::restartAsync(StatisticsPtr &stats, Metadata *image_metadata)
{
- RPI_LOG("Starting ALSC thread");
+ LOG(RPiAlsc, Debug) << "Starting ALSC calculation";
// Get the current colour temperature. It's all we need from the
// metadata. Default to the last CT value (which could be the default).
ct_ = get_ct(image_metadata, ct_);
@@ -293,7 +301,8 @@ void Alsc::restartAsync(StatisticsPtr &stats, Metadata *image_metadata)
// the LSC table that the pipeline applied to them.
AlscStatus alsc_status;
if (image_metadata->Get("alsc.status", alsc_status) != 0) {
- RPI_WARN("No ALSC status found for applied gains!");
+ LOG(RPiAlsc, Warning)
+ << "No ALSC status found for applied gains!";
for (int y = 0; y < Y; y++)
for (int x = 0; x < X; x++) {
alsc_status.r[y][x] = 1.0;
@@ -320,13 +329,12 @@ void Alsc::Prepare(Metadata *image_metadata)
double speed = frame_count_ < (int)config_.startup_frames
? 1.0
: config_.speed;
- RPI_LOG("Alsc: frame_count " << frame_count_ << " speed " << speed);
+ LOG(RPiAlsc, Debug)
+ << "frame_count " << frame_count_ << " speed " << speed;
{
std::unique_lock<std::mutex> lock(mutex_);
- if (async_started_ && async_finished_) {
- RPI_LOG("ALSC thread finished");
+ if (async_started_ && async_finished_)
fetchAsyncResults();
- }
}
// Apply IIR filter to results and program into the pipeline.
double *ptr = (double *)sync_results_,
@@ -350,13 +358,11 @@ void Alsc::Process(StatisticsPtr &stats, Metadata *image_metadata)
frame_phase_++;
if (frame_count2_ < (int)config_.startup_frames)
frame_count2_++;
- RPI_LOG("Alsc: frame_phase " << frame_phase_);
+ LOG(RPiAlsc, Debug) << "frame_phase " << frame_phase_;
if (frame_phase_ >= (int)config_.frame_period ||
frame_count2_ < (int)config_.startup_frames) {
- if (async_started_ == false) {
- RPI_LOG("ALSC thread starting");
+ if (async_started_ == false)
restartAsync(stats, image_metadata);
- }
}
}
@@ -387,25 +393,26 @@ void get_cal_table(double ct, std::vector<AlscCalibration> const &calibrations,
if (calibrations.empty()) {
for (int i = 0; i < XY; i++)
cal_table[i] = 1.0;
- RPI_LOG("Alsc: no calibrations found");
+ LOG(RPiAlsc, Debug) << "no calibrations found";
} else if (ct <= calibrations.front().ct) {
memcpy(cal_table, calibrations.front().table,
XY * sizeof(double));
- RPI_LOG("Alsc: using calibration for "
- << calibrations.front().ct);
+ LOG(RPiAlsc, Debug) << "using calibration for "
+ << calibrations.front().ct;
} else if (ct >= calibrations.back().ct) {
memcpy(cal_table, calibrations.back().table,
XY * sizeof(double));
- RPI_LOG("Alsc: using calibration for "
- << calibrations.front().ct);
+ LOG(RPiAlsc, Debug) << "using calibration for "
+ << calibrations.back().ct;
} else {
int idx = 0;
while (ct > calibrations[idx + 1].ct)
idx++;
double ct0 = calibrations[idx].ct,
ct1 = calibrations[idx + 1].ct;
- RPI_LOG("Alsc: ct is " << ct << ", interpolating between "
- << ct0 << " and " << ct1);
+ LOG(RPiAlsc, Debug)
+ << "ct is " << ct << ", interpolating between "
+ << ct0 << " and " << ct1;
for (int i = 0; i < XY; i++)
cal_table[i] =
(calibrations[idx].table[i] * (ct1 - ct) +
@@ -606,9 +613,9 @@ static double gauss_seidel2_SOR(double const M[XY][4], double omega,
double lambda[XY])
{
double old_lambda[XY];
- for (int i = 0; i < XY; i++)
- old_lambda[i] = lambda[i];
int i;
+ for (i = 0; i < XY; i++)
+ old_lambda[i] = lambda[i];
lambda[0] = compute_lambda_bottom_start(0, M, lambda);
for (i = 1; i < X; i++)
lambda[i] = compute_lambda_bottom(i, M, lambda);
@@ -628,7 +635,7 @@ static double gauss_seidel2_SOR(double const M[XY][4], double omega,
lambda[i] = compute_lambda_bottom(i, M, lambda);
lambda[0] = compute_lambda_bottom_start(0, M, lambda);
double max_diff = 0;
- for (int i = 0; i < XY; i++) {
+ for (i = 0; i < XY; i++) {
lambda[i] = old_lambda[i] + (lambda[i] - old_lambda[i]) * omega;
if (fabs(lambda[i] - old_lambda[i]) > fabs(max_diff))
max_diff = lambda[i] - old_lambda[i];
@@ -656,15 +663,16 @@ static void run_matrix_iterations(double const C[XY], double lambda[XY],
for (int i = 0; i < n_iter; i++) {
double max_diff = fabs(gauss_seidel2_SOR(M, omega, lambda));
if (max_diff < threshold) {
- RPI_LOG("Stop after " << i + 1 << " iterations");
+ LOG(RPiAlsc, Debug)
+ << "Stop after " << i + 1 << " iterations";
break;
}
// this happens very occasionally (so make a note), though
// doesn't seem to matter
if (max_diff > last_max_diff)
- RPI_LOG("Iteration " << i << ": max_diff gone up "
- << last_max_diff << " to "
- << max_diff);
+ LOG(RPiAlsc, Debug)
+ << "Iteration " << i << ": max_diff gone up "
+ << last_max_diff << " to " << max_diff;
last_max_diff = max_diff;
}
// We're going to normalise the lambdas so the smallest is 1. Not sure
diff --git a/src/ipa/raspberrypi/controller/rpi/awb.cpp b/src/ipa/raspberrypi/controller/rpi/awb.cpp
index a5536e47..5cfd33a3 100644
--- a/src/ipa/raspberrypi/controller/rpi/awb.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/awb.cpp
@@ -5,19 +5,24 @@
* awb.cpp - AWB control algorithm
*/
-#include "../logging.hpp"
+#include <libcamera/base/log.h>
+
#include "../lux_status.h"
#include "awb.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiAwb)
#define NAME "rpi.awb"
#define AWB_STATS_SIZE_X DEFAULT_AWB_REGIONS_X
#define AWB_STATS_SIZE_Y DEFAULT_AWB_REGIONS_Y
-const double Awb::RGB::INVALID = -1.0;
+// todo - the locking in this algorithm needs some tidying up as has been done
+// elsewhere (ALSC and AGC).
void AwbMode::Read(boost::property_tree::ptree const &params)
{
@@ -55,10 +60,10 @@ static void read_ct_curve(Pwl &ct_r, Pwl &ct_b,
void AwbConfig::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG("AwbConfig");
bayes = params.get<int>("bayes", 1);
frame_period = params.get<uint16_t>("frame_period", 10);
startup_frames = params.get<uint16_t>("startup_frames", 10);
+ convergence_frames = params.get<unsigned int>("convergence_frames", 3);
speed = params.get<double>("speed", 0.05);
if (params.get_child_optional("ct_curve"))
read_ct_curve(ct_r, ct_b, params.get_child("ct_curve"));
@@ -100,8 +105,8 @@ void AwbConfig::Read(boost::property_tree::ptree const &params)
if (bayes) {
if (ct_r.Empty() || ct_b.Empty() || priors.empty() ||
default_mode == nullptr) {
- RPI_WARN(
- "Bayesian AWB mis-configured - switch to Grey method");
+ LOG(RPiAwb, Warning)
+ << "Bayesian AWB mis-configured - switch to Grey method";
bayes = false;
}
}
@@ -120,6 +125,7 @@ Awb::Awb(Controller *controller)
async_abort_ = async_start_ = async_started_ = async_finished_ = false;
mode_ = nullptr;
manual_r_ = manual_b_ = 0.0;
+ first_switch_mode_ = true;
async_thread_ = std::thread(std::bind(&Awb::asyncFunc, this));
}
@@ -128,8 +134,8 @@ Awb::~Awb()
{
std::lock_guard<std::mutex> lock(mutex_);
async_abort_ = true;
- async_signal_.notify_one();
}
+ async_signal_.notify_one();
async_thread_.join();
}
@@ -145,7 +151,7 @@ void Awb::Read(boost::property_tree::ptree const &params)
void Awb::Initialise()
{
- frame_count2_ = frame_count_ = frame_phase_ = 0;
+ frame_count_ = frame_phase_ = 0;
// Put something sane into the status that we are filtering towards,
// just in case the first few frames don't have anything meaningful in
// them.
@@ -163,48 +169,92 @@ void Awb::Initialise()
sync_results_.gain_b = 1.0;
}
prev_sync_results_ = sync_results_;
+ async_results_ = sync_results_;
+}
+
+unsigned int Awb::GetConvergenceFrames() const
+{
+ // If not in auto mode, there is no convergence
+ // to happen, so no need to drop any frames - return zero.
+ if (!isAutoEnabled())
+ return 0;
+ else
+ return config_.convergence_frames;
}
void Awb::SetMode(std::string const &mode_name)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
mode_name_ = mode_name;
}
void Awb::SetManualGains(double manual_r, double manual_b)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
// If any of these are 0.0, we swich back to auto.
manual_r_ = manual_r;
manual_b_ = manual_b;
+ // If not in auto mode, set these values into the sync_results which
+ // means that Prepare() will adopt them immediately.
+ if (!isAutoEnabled()) {
+ sync_results_.gain_r = prev_sync_results_.gain_r = manual_r_;
+ sync_results_.gain_g = prev_sync_results_.gain_g = 1.0;
+ sync_results_.gain_b = prev_sync_results_.gain_b = manual_b_;
+ }
+}
+
+void Awb::SwitchMode([[maybe_unused]] CameraMode const &camera_mode,
+ Metadata *metadata)
+{
+ // On the first mode switch we'll have no meaningful colour
+ // temperature, so try to dead reckon one if in manual mode.
+ if (!isAutoEnabled() && first_switch_mode_ && config_.bayes) {
+ Pwl ct_r_inverse = config_.ct_r.Inverse();
+ Pwl ct_b_inverse = config_.ct_b.Inverse();
+ double ct_r = ct_r_inverse.Eval(ct_r_inverse.Domain().Clip(1 / manual_r_));
+ double ct_b = ct_b_inverse.Eval(ct_b_inverse.Domain().Clip(1 / manual_b_));
+ prev_sync_results_.temperature_K = (ct_r + ct_b) / 2;
+ sync_results_.temperature_K = prev_sync_results_.temperature_K;
+ }
+ // Let other algorithms know the current white balance values.
+ metadata->Set("awb.status", prev_sync_results_);
+ first_switch_mode_ = false;
+}
+
+bool Awb::isAutoEnabled() const
+{
+ return manual_r_ == 0.0 || manual_b_ == 0.0;
}
void Awb::fetchAsyncResults()
{
- RPI_LOG("Fetch AWB results");
+ LOG(RPiAwb, Debug) << "Fetch AWB results";
async_finished_ = false;
async_started_ = false;
- sync_results_ = async_results_;
+ // It's possible manual gains could be set even while the async
+ // thread was running, so only copy the results if still in auto mode.
+ if (isAutoEnabled())
+ sync_results_ = async_results_;
}
-void Awb::restartAsync(StatisticsPtr &stats, std::string const &mode_name,
- double lux)
+void Awb::restartAsync(StatisticsPtr &stats, double lux)
{
- RPI_LOG("Starting AWB thread");
+ LOG(RPiAwb, Debug) << "Starting AWB calculation";
// this makes a new reference which belongs to the asynchronous thread
statistics_ = stats;
// store the mode as it could technically change
- auto m = config_.modes.find(mode_name);
+ auto m = config_.modes.find(mode_name_);
mode_ = m != config_.modes.end()
? &m->second
: (mode_ == nullptr ? config_.default_mode : mode_);
lux_ = lux;
frame_phase_ = 0;
- async_start_ = true;
async_started_ = true;
- size_t len = mode_name.copy(async_results_.mode,
- sizeof(async_results_.mode) - 1);
+ size_t len = mode_name_.copy(async_results_.mode,
+ sizeof(async_results_.mode) - 1);
async_results_.mode[len] = '\0';
+ {
+ std::lock_guard<std::mutex> lock(mutex_);
+ async_start_ = true;
+ }
async_signal_.notify_one();
}
@@ -215,13 +265,12 @@ void Awb::Prepare(Metadata *image_metadata)
double speed = frame_count_ < (int)config_.startup_frames
? 1.0
: config_.speed;
- RPI_LOG("Awb: frame_count " << frame_count_ << " speed " << speed);
+ LOG(RPiAwb, Debug)
+ << "frame_count " << frame_count_ << " speed " << speed;
{
std::unique_lock<std::mutex> lock(mutex_);
- if (async_started_ && async_finished_) {
- RPI_LOG("AWB thread finished");
+ if (async_started_ && async_finished_)
fetchAsyncResults();
- }
}
// Finally apply IIR filter to results and put into metadata.
memcpy(prev_sync_results_.mode, sync_results_.mode,
@@ -236,9 +285,10 @@ void Awb::Prepare(Metadata *image_metadata)
prev_sync_results_.gain_b = speed * sync_results_.gain_b +
(1.0 - speed) * prev_sync_results_.gain_b;
image_metadata->Set("awb.status", prev_sync_results_);
- RPI_LOG("Using AWB gains r " << prev_sync_results_.gain_r << " g "
- << prev_sync_results_.gain_g << " b "
- << prev_sync_results_.gain_b);
+ LOG(RPiAwb, Debug)
+ << "Using AWB gains r " << prev_sync_results_.gain_r << " g "
+ << prev_sync_results_.gain_g << " b "
+ << prev_sync_results_.gain_b;
}
void Awb::Process(StatisticsPtr &stats, Metadata *image_metadata)
@@ -246,28 +296,20 @@ void Awb::Process(StatisticsPtr &stats, Metadata *image_metadata)
// Count frames since we last poked the async thread.
if (frame_phase_ < (int)config_.frame_period)
frame_phase_++;
- if (frame_count2_ < (int)config_.startup_frames)
- frame_count2_++;
- RPI_LOG("Awb: frame_phase " << frame_phase_);
- if (frame_phase_ >= (int)config_.frame_period ||
- frame_count2_ < (int)config_.startup_frames) {
+ LOG(RPiAwb, Debug) << "frame_phase " << frame_phase_;
+ // We do not restart the async thread if we're not in auto mode.
+ if (isAutoEnabled() &&
+ (frame_phase_ >= (int)config_.frame_period ||
+ frame_count_ < (int)config_.startup_frames)) {
// Update any settings and any image metadata that we need.
- std::string mode_name;
- {
- std::unique_lock<std::mutex> lock(settings_mutex_);
- mode_name = mode_name_;
- }
struct LuxStatus lux_status = {};
lux_status.lux = 400; // in case no metadata
if (image_metadata->Get("lux.status", lux_status) != 0)
- RPI_LOG("No lux metadata found");
- RPI_LOG("Awb lux value is " << lux_status.lux);
+ LOG(RPiAwb, Debug) << "No lux metadata found";
+ LOG(RPiAwb, Debug) << "Awb lux value is " << lux_status.lux;
- std::unique_lock<std::mutex> lock(mutex_);
- if (async_started_ == false) {
- RPI_LOG("AWB thread starting");
- restartAsync(stats, mode_name, lux_status.lux);
- }
+ if (async_started_ == false)
+ restartAsync(stats, lux_status.lux);
}
}
@@ -287,8 +329,8 @@ void Awb::asyncFunc()
{
std::lock_guard<std::mutex> lock(mutex_);
async_finished_ = true;
- sync_signal_.notify_one();
}
+ sync_signal_.notify_one();
}
}
@@ -297,16 +339,16 @@ static void generate_stats(std::vector<Awb::RGB> &zones,
double min_G)
{
for (int i = 0; i < AWB_STATS_SIZE_X * AWB_STATS_SIZE_Y; i++) {
- Awb::RGB zone; // this is "invalid", unless R gets overwritten later
+ Awb::RGB zone;
double counted = stats[i].counted;
if (counted >= min_pixels) {
zone.G = stats[i].g_sum / counted;
if (zone.G >= min_G) {
zone.R = stats[i].r_sum / counted;
zone.B = stats[i].b_sum / counted;
+ zones.push_back(zone);
}
}
- zones.push_back(zone);
}
}
@@ -336,7 +378,7 @@ double Awb::computeDelta2Sum(double gain_r, double gain_b)
double delta_r = gain_r * z.R - 1 - config_.whitepoint_r;
double delta_b = gain_b * z.B - 1 - config_.whitepoint_b;
double delta2 = delta_r * delta_r + delta_b * delta_b;
- //RPI_LOG("delta_r " << delta_r << " delta_b " << delta_b << " delta2 " << delta2);
+ //LOG(RPiAwb, Debug) << "delta_r " << delta_r << " delta_b " << delta_b << " delta2 " << delta2;
delta2 = std::min(delta2, config_.delta_limit);
delta2_sum += delta2;
}
@@ -399,10 +441,11 @@ double Awb::coarseSearch(Pwl const &prior)
double prior_log_likelihood =
prior.Eval(prior.Domain().Clip(t));
double final_log_likelihood = delta2_sum - prior_log_likelihood;
- RPI_LOG("t: " << t << " gain_r " << gain_r << " gain_b "
- << gain_b << " delta2_sum " << delta2_sum
- << " prior " << prior_log_likelihood << " final "
- << final_log_likelihood);
+ LOG(RPiAwb, Debug)
+ << "t: " << t << " gain_r " << gain_r << " gain_b "
+ << gain_b << " delta2_sum " << delta2_sum
+ << " prior " << prior_log_likelihood << " final "
+ << final_log_likelihood;
points_.push_back(Pwl::Point(t, final_log_likelihood));
if (points_.back().y < points_[best_point].y)
best_point = points_.size() - 1;
@@ -413,7 +456,7 @@ double Awb::coarseSearch(Pwl const &prior)
mode_->ct_hi);
}
t = points_[best_point].x;
- RPI_LOG("Coarse search found CT " << t);
+ LOG(RPiAwb, Debug) << "Coarse search found CT " << t;
// We have the best point of the search, but refine it with a quadratic
// interpolation around its neighbours.
if (points_.size() > 2) {
@@ -422,15 +465,16 @@ double Awb::coarseSearch(Pwl const &prior)
t = interpolate_quadatric(points_[best_point - 1],
points_[best_point],
points_[best_point + 1]);
- RPI_LOG("After quadratic refinement, coarse search has CT "
- << t);
+ LOG(RPiAwb, Debug)
+ << "After quadratic refinement, coarse search has CT "
+ << t;
}
return t;
}
void Awb::fineSearch(double &t, double &r, double &b, Pwl const &prior)
{
- int span_r, span_b;
+ int span_r = -1, span_b = -1;
config_.ct_r.Eval(t, &span_r);
config_.ct_b.Eval(t, &span_b);
double step = t / 10 * config_.coarse_step * 0.1;
@@ -475,8 +519,9 @@ void Awb::fineSearch(double &t, double &r, double &b, Pwl const &prior)
double gain_r = 1 / r_test, gain_b = 1 / b_test;
double delta2_sum = computeDelta2Sum(gain_r, gain_b);
points[j].y = delta2_sum - prior_log_likelihood;
- RPI_LOG("At t " << t_test << " r " << r_test << " b "
- << b_test << ": " << points[j].y);
+ LOG(RPiAwb, Debug)
+ << "At t " << t_test << " r " << r_test << " b "
+ << b_test << ": " << points[j].y;
if (points[j].y < points[best_point].y)
best_point = j;
}
@@ -493,17 +538,18 @@ void Awb::fineSearch(double &t, double &r, double &b, Pwl const &prior)
double gain_r = 1 / r_test, gain_b = 1 / b_test;
double delta2_sum = computeDelta2Sum(gain_r, gain_b);
double final_log_likelihood = delta2_sum - prior_log_likelihood;
- RPI_LOG("Finally "
+ LOG(RPiAwb, Debug)
+ << "Finally "
<< t_test << " r " << r_test << " b " << b_test << ": "
<< final_log_likelihood
- << (final_log_likelihood < best_log_likelihood ? " BEST"
- : ""));
+ << (final_log_likelihood < best_log_likelihood ? " BEST" : "");
if (best_t == 0 || final_log_likelihood < best_log_likelihood)
best_log_likelihood = final_log_likelihood,
best_t = t_test, best_r = r_test, best_b = b_test;
}
t = best_t, r = best_r, b = best_b;
- RPI_LOG("Fine search found t " << t << " r " << r << " b " << b);
+ LOG(RPiAwb, Debug)
+ << "Fine search found t " << t << " r " << r << " b " << b;
}
void Awb::awbBayes()
@@ -517,13 +563,14 @@ void Awb::awbBayes()
Pwl prior = interpolatePrior();
prior *= zones_.size() / (double)(AWB_STATS_SIZE_X * AWB_STATS_SIZE_Y);
prior.Map([](double x, double y) {
- RPI_LOG("(" << x << "," << y << ")");
+ LOG(RPiAwb, Debug) << "(" << x << "," << y << ")";
});
double t = coarseSearch(prior);
double r = config_.ct_r.Eval(t);
double b = config_.ct_b.Eval(t);
- RPI_LOG("After coarse search: r " << r << " b " << b << " (gains r "
- << 1 / r << " b " << 1 / b << ")");
+ LOG(RPiAwb, Debug)
+ << "After coarse search: r " << r << " b " << b << " (gains r "
+ << 1 / r << " b " << 1 / b << ")";
// Not entirely sure how to handle the fine search yet. Mostly the
// estimated CT is already good enough, but the fine search allows us to
// wander transverely off the CT curve. Under some illuminants, where
@@ -531,8 +578,9 @@ void Awb::awbBayes()
// though I probably need more real datasets before deciding exactly how
// this should be controlled and tuned.
fineSearch(t, r, b, prior);
- RPI_LOG("After fine search: r " << r << " b " << b << " (gains r "
- << 1 / r << " b " << 1 / b << ")");
+ LOG(RPiAwb, Debug)
+ << "After fine search: r " << r << " b " << b << " (gains r "
+ << 1 / r << " b " << 1 / b << ")";
// Write results out for the main thread to pick up. Remember to adjust
// the gains from the ones that the "canonical sensor" would require to
// the ones needed by *this* sensor.
@@ -544,7 +592,7 @@ void Awb::awbBayes()
void Awb::awbGrey()
{
- RPI_LOG("Grey world AWB");
+ LOG(RPiAwb, Debug) << "Grey world AWB";
// Make a separate list of the derivatives for each of red and blue, so
// that we can sort them to exclude the extreme gains. We could
// consider some variations, such as normalising all the zones first, or
@@ -576,27 +624,18 @@ void Awb::awbGrey()
void Awb::doAwb()
{
- if (manual_r_ != 0.0 && manual_b_ != 0.0) {
- async_results_.temperature_K = 4500; // don't know what it is
- async_results_.gain_r = manual_r_;
- async_results_.gain_g = 1.0;
- async_results_.gain_b = manual_b_;
- RPI_LOG("Using manual white balance: gain_r "
- << async_results_.gain_r << " gain_b "
- << async_results_.gain_b);
- } else {
- prepareStats();
- RPI_LOG("Valid zones: " << zones_.size());
- if (zones_.size() > config_.min_regions) {
- if (config_.bayes)
- awbBayes();
- else
- awbGrey();
- RPI_LOG("CT found is "
- << async_results_.temperature_K
- << " with gains r " << async_results_.gain_r
- << " and b " << async_results_.gain_b);
- }
+ prepareStats();
+ LOG(RPiAwb, Debug) << "Valid zones: " << zones_.size();
+ if (zones_.size() > config_.min_regions) {
+ if (config_.bayes)
+ awbBayes();
+ else
+ awbGrey();
+ LOG(RPiAwb, Debug)
+ << "CT found is "
+ << async_results_.temperature_K
+ << " with gains r " << async_results_.gain_r
+ << " and b " << async_results_.gain_b;
}
}
diff --git a/src/ipa/raspberrypi/controller/rpi/awb.hpp b/src/ipa/raspberrypi/controller/rpi/awb.hpp
index 9124d042..8af1f27c 100644
--- a/src/ipa/raspberrypi/controller/rpi/awb.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/awb.hpp
@@ -37,6 +37,7 @@ struct AwbConfig {
uint16_t frame_period;
// number of initial frames for which speed taken as 1.0 (maximum)
uint16_t startup_frames;
+ unsigned int convergence_frames; // approx number of frames to converge
double speed; // IIR filter speed applied to algorithm results
bool fast; // "fast" mode uses a 16x16 rather than 32x32 grid
Pwl ct_r; // function maps CT to r (= R/G)
@@ -82,29 +83,27 @@ public:
char const *Name() const override;
void Initialise() override;
void Read(boost::property_tree::ptree const &params) override;
+ unsigned int GetConvergenceFrames() const override;
void SetMode(std::string const &name) override;
void SetManualGains(double manual_r, double manual_b) override;
+ void SwitchMode(CameraMode const &camera_mode, Metadata *metadata) override;
void Prepare(Metadata *image_metadata) override;
void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
struct RGB {
- RGB(double _R = INVALID, double _G = INVALID,
- double _B = INVALID)
+ RGB(double _R = 0, double _G = 0, double _B = 0)
: R(_R), G(_G), B(_B)
{
}
double R, G, B;
- static const double INVALID;
- bool Valid() const { return G != INVALID; }
- bool Invalid() const { return G == INVALID; }
RGB &operator+=(RGB const &other)
{
R += other.R, G += other.G, B += other.B;
return *this;
}
- RGB Square() const { return RGB(R * R, G * G, B * B); }
};
private:
+ bool isAutoEnabled() const;
// configuration is read-only, and available to both threads
AwbConfig config_;
std::thread async_thread_;
@@ -127,15 +126,12 @@ private:
// counts up to frame_period before restarting the async thread
int frame_phase_;
int frame_count_; // counts up to startup_frames
- int frame_count2_; // counts up to startup_frames for Process method
AwbStatus sync_results_;
AwbStatus prev_sync_results_;
std::string mode_name_;
- std::mutex settings_mutex_;
// The following are for the asynchronous thread to use, though the main
// thread can set/reset them if the async thread is known to be idle:
- void restartAsync(StatisticsPtr &stats, std::string const &mode_name,
- double lux);
+ void restartAsync(StatisticsPtr &stats, double lux);
// copy out the results from the async thread so that it can be restarted
void fetchAsyncResults();
StatisticsPtr statistics_;
@@ -156,6 +152,7 @@ private:
double manual_r_;
// manual b setting
double manual_b_;
+ bool first_switch_mode_; // is this the first call to SwitchMode?
};
static inline Awb::RGB operator+(Awb::RGB const &a, Awb::RGB const &b)
diff --git a/src/ipa/raspberrypi/controller/rpi/black_level.cpp b/src/ipa/raspberrypi/controller/rpi/black_level.cpp
index 0629b77c..6b3497f1 100644
--- a/src/ipa/raspberrypi/controller/rpi/black_level.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/black_level.cpp
@@ -8,12 +8,16 @@
#include <math.h>
#include <stdint.h>
+#include <libcamera/base/log.h>
+
#include "../black_level_status.h"
-#include "../logging.hpp"
#include "black_level.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiBlackLevel)
#define NAME "rpi.black_level"
@@ -29,12 +33,15 @@ char const *BlackLevel::Name() const
void BlackLevel::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG(Name());
uint16_t black_level = params.get<uint16_t>(
"black_level", 4096); // 64 in 10 bits scaled to 16 bits
black_level_r_ = params.get<uint16_t>("black_level_r", black_level);
black_level_g_ = params.get<uint16_t>("black_level_g", black_level);
black_level_b_ = params.get<uint16_t>("black_level_b", black_level);
+ LOG(RPiBlackLevel, Debug)
+ << " Read black levels red " << black_level_r_
+ << " green " << black_level_g_
+ << " blue " << black_level_b_;
}
void BlackLevel::Prepare(Metadata *image_metadata)
diff --git a/src/ipa/raspberrypi/controller/rpi/ccm.cpp b/src/ipa/raspberrypi/controller/rpi/ccm.cpp
index a8a2caff..821a4c7c 100644
--- a/src/ipa/raspberrypi/controller/rpi/ccm.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/ccm.cpp
@@ -5,15 +5,19 @@
* ccm.cpp - CCM (colour correction matrix) control algorithm
*/
+#include <libcamera/base/log.h>
+
#include "../awb_status.h"
#include "../ccm_status.h"
-#include "../logging.hpp"
#include "../lux_status.h"
#include "../metadata.hpp"
#include "ccm.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiCcm)
// This algorithm selects a CCM (Colour Correction Matrix) according to the
// colour temperature estimated by AWB (interpolating between known matricies as
@@ -129,9 +133,9 @@ void Ccm::Prepare(Metadata *image_metadata)
lux_ok = get_locked(image_metadata, "lux.status", lux);
}
if (!awb_ok)
- RPI_WARN("Ccm: no colour temperature found");
+ LOG(RPiCcm, Warning) << "no colour temperature found";
if (!lux_ok)
- RPI_WARN("Ccm: no lux value found");
+ LOG(RPiCcm, Warning) << "no lux value found";
Matrix ccm = calculate_ccm(config_.ccms, awb.temperature_K);
double saturation = saturation_;
struct CcmStatus ccm_status;
@@ -144,13 +148,15 @@ void Ccm::Prepare(Metadata *image_metadata)
for (int i = 0; i < 3; i++)
ccm_status.matrix[j * 3 + i] =
std::max(-8.0, std::min(7.9999, ccm.m[j][i]));
- RPI_LOG("CCM: colour temperature " << awb.temperature_K << "K");
- RPI_LOG("CCM: " << ccm_status.matrix[0] << " " << ccm_status.matrix[1]
- << " " << ccm_status.matrix[2] << " "
- << ccm_status.matrix[3] << " " << ccm_status.matrix[4]
- << " " << ccm_status.matrix[5] << " "
- << ccm_status.matrix[6] << " " << ccm_status.matrix[7]
- << " " << ccm_status.matrix[8]);
+ LOG(RPiCcm, Debug)
+ << "colour temperature " << awb.temperature_K << "K";
+ LOG(RPiCcm, Debug)
+ << "CCM: " << ccm_status.matrix[0] << " " << ccm_status.matrix[1]
+ << " " << ccm_status.matrix[2] << " "
+ << ccm_status.matrix[3] << " " << ccm_status.matrix[4]
+ << " " << ccm_status.matrix[5] << " "
+ << ccm_status.matrix[6] << " " << ccm_status.matrix[7]
+ << " " << ccm_status.matrix[8];
image_metadata->Set("ccm.status", ccm_status);
}
diff --git a/src/ipa/raspberrypi/controller/rpi/ccm.hpp b/src/ipa/raspberrypi/controller/rpi/ccm.hpp
index fcf077e7..330ed51f 100644
--- a/src/ipa/raspberrypi/controller/rpi/ccm.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/ccm.hpp
@@ -7,7 +7,6 @@
#pragma once
#include <vector>
-#include <atomic>
#include "../ccm_algorithm.hpp"
#include "../pwl.hpp"
@@ -70,7 +69,7 @@ public:
private:
CcmConfig config_;
- std::atomic<double> saturation_;
+ double saturation_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/rpi/contrast.cpp b/src/ipa/raspberrypi/controller/rpi/contrast.cpp
index 103153db..ae55aad5 100644
--- a/src/ipa/raspberrypi/controller/rpi/contrast.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/contrast.cpp
@@ -6,12 +6,17 @@
*/
#include <stdint.h>
+#include <libcamera/base/log.h>
+
#include "../contrast_status.h"
#include "../histogram.hpp"
#include "contrast.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiContrast)
// This is a very simple control algorithm which simply retrieves the results of
// AGC and AWB via their "status" metadata, and applies digital gain to the
@@ -97,11 +102,13 @@ Pwl compute_stretch_curve(Histogram const &histogram,
double hist_lo = histogram.Quantile(config.lo_histogram) *
(65536 / NUM_HISTOGRAM_BINS);
double level_lo = config.lo_level * 65536;
- RPI_LOG("Move histogram point " << hist_lo << " to " << level_lo);
+ LOG(RPiContrast, Debug)
+ << "Move histogram point " << hist_lo << " to " << level_lo;
hist_lo = std::max(
level_lo,
std::min(65535.0, std::min(hist_lo, level_lo + config.lo_max)));
- RPI_LOG("Final values " << hist_lo << " -> " << level_lo);
+ LOG(RPiContrast, Debug)
+ << "Final values " << hist_lo << " -> " << level_lo;
enhance.Append(hist_lo, level_lo);
// Keep the mid-point (median) in the same place, though, to limit the
// apparent amount of global brightness shift.
@@ -113,11 +120,13 @@ Pwl compute_stretch_curve(Histogram const &histogram,
double hist_hi = histogram.Quantile(config.hi_histogram) *
(65536 / NUM_HISTOGRAM_BINS);
double level_hi = config.hi_level * 65536;
- RPI_LOG("Move histogram point " << hist_hi << " to " << level_hi);
+ LOG(RPiContrast, Debug)
+ << "Move histogram point " << hist_hi << " to " << level_hi;
hist_hi = std::min(
level_hi,
std::max(0.0, std::max(hist_hi, level_hi - config.hi_max)));
- RPI_LOG("Final values " << hist_hi << " -> " << level_hi);
+ LOG(RPiContrast, Debug)
+ << "Final values " << hist_hi << " -> " << level_hi;
enhance.Append(hist_hi, level_hi);
enhance.Append(65535, 65535);
return enhance;
@@ -127,7 +136,8 @@ Pwl apply_manual_contrast(Pwl const &gamma_curve, double brightness,
double contrast)
{
Pwl new_gamma_curve;
- RPI_LOG("Manual brightness " << brightness << " contrast " << contrast);
+ LOG(RPiContrast, Debug)
+ << "Manual brightness " << brightness << " contrast " << contrast;
gamma_curve.Map([&](double x, double y) {
new_gamma_curve.Append(
x, std::max(0.0, std::min(65535.0,
@@ -140,7 +150,6 @@ Pwl apply_manual_contrast(Pwl const &gamma_curve, double brightness,
void Contrast::Process(StatisticsPtr &stats,
[[maybe_unused]] Metadata *image_metadata)
{
- double brightness = brightness_, contrast = contrast_;
Histogram histogram(stats->hist[0].g_hist, NUM_HISTOGRAM_BINS);
// We look at the histogram and adjust the gamma curve in the following
// ways: 1. Adjust the gamma curve so as to pull the start of the
@@ -155,13 +164,13 @@ void Contrast::Process(StatisticsPtr &stats,
}
// 2. Finally apply any manually selected brightness/contrast
// adjustment.
- if (brightness != 0 || contrast != 1.0)
- gamma_curve = apply_manual_contrast(gamma_curve, brightness,
- contrast);
+ if (brightness_ != 0 || contrast_ != 1.0)
+ gamma_curve = apply_manual_contrast(gamma_curve, brightness_,
+ contrast_);
// And fill in the status for output. Use more points towards the bottom
// of the curve.
ContrastStatus status;
- fill_in_status(status, brightness, contrast, gamma_curve);
+ fill_in_status(status, brightness_, contrast_, gamma_curve);
{
std::unique_lock<std::mutex> lock(mutex_);
status_ = status;
diff --git a/src/ipa/raspberrypi/controller/rpi/contrast.hpp b/src/ipa/raspberrypi/controller/rpi/contrast.hpp
index 6836f181..85624539 100644
--- a/src/ipa/raspberrypi/controller/rpi/contrast.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/contrast.hpp
@@ -6,7 +6,6 @@
*/
#pragma once
-#include <atomic>
#include <mutex>
#include "../contrast_algorithm.hpp"
@@ -42,8 +41,8 @@ public:
private:
ContrastConfig config_;
- std::atomic<double> brightness_;
- std::atomic<double> contrast_;
+ double brightness_;
+ double contrast_;
ContrastStatus status_;
std::mutex mutex_;
};
diff --git a/src/ipa/raspberrypi/controller/rpi/dpc.cpp b/src/ipa/raspberrypi/controller/rpi/dpc.cpp
index 348e1609..110f5056 100644
--- a/src/ipa/raspberrypi/controller/rpi/dpc.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/dpc.cpp
@@ -5,10 +5,14 @@
* dpc.cpp - DPC (defective pixel correction) control algorithm
*/
-#include "../logging.hpp"
+#include <libcamera/base/log.h>
+
#include "dpc.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiDpc)
// We use the lux status so that we can apply stronger settings in darkness (if
// necessary).
@@ -37,7 +41,7 @@ void Dpc::Prepare(Metadata *image_metadata)
DpcStatus dpc_status = {};
// Should we vary this with lux level or analogue gain? TBD.
dpc_status.strength = config_.strength;
- RPI_LOG("Dpc: strength " << dpc_status.strength);
+ LOG(RPiDpc, Debug) << "strength " << dpc_status.strength;
image_metadata->Set("dpc.status", dpc_status);
}
diff --git a/src/ipa/raspberrypi/controller/rpi/focus.cpp b/src/ipa/raspberrypi/controller/rpi/focus.cpp
index bab4406f..a87ec802 100644
--- a/src/ipa/raspberrypi/controller/rpi/focus.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/focus.cpp
@@ -6,7 +6,7 @@
*/
#include <stdint.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
#include "../focus_status.h"
#include "focus.hpp"
diff --git a/src/ipa/raspberrypi/controller/rpi/geq.cpp b/src/ipa/raspberrypi/controller/rpi/geq.cpp
index b6c98414..4530cb75 100644
--- a/src/ipa/raspberrypi/controller/rpi/geq.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/geq.cpp
@@ -5,14 +5,18 @@
* geq.cpp - GEQ (green equalisation) control algorithm
*/
+#include <libcamera/base/log.h>
+
#include "../device_status.h"
-#include "../logging.hpp"
#include "../lux_status.h"
#include "../pwl.hpp"
#include "geq.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiGeq)
// We use the lux status so that we can apply stronger settings in darkness (if
// necessary).
@@ -44,11 +48,12 @@ void Geq::Prepare(Metadata *image_metadata)
LuxStatus lux_status = {};
lux_status.lux = 400;
if (image_metadata->Get("lux.status", lux_status))
- RPI_WARN("Geq: no lux data found");
- DeviceStatus device_status = {};
+ LOG(RPiGeq, Warning) << "no lux data found";
+ DeviceStatus device_status;
device_status.analogue_gain = 1.0; // in case not found
if (image_metadata->Get("device.status", device_status))
- RPI_WARN("Geq: no device metadata - use analogue gain of 1x");
+ LOG(RPiGeq, Warning)
+ << "no device metadata - use analogue gain of 1x";
GeqStatus geq_status = {};
double strength =
config_.strength.Empty()
@@ -60,10 +65,11 @@ void Geq::Prepare(Metadata *image_metadata)
double slope = config_.slope * strength;
geq_status.offset = std::min(65535.0, std::max(0.0, offset));
geq_status.slope = std::min(.99999, std::max(0.0, slope));
- RPI_LOG("Geq: offset " << geq_status.offset << " slope "
- << geq_status.slope << " (analogue gain "
- << device_status.analogue_gain << " lux "
- << lux_status.lux << ")");
+ LOG(RPiGeq, Debug)
+ << "offset " << geq_status.offset << " slope "
+ << geq_status.slope << " (analogue gain "
+ << device_status.analogue_gain << " lux "
+ << lux_status.lux << ")";
image_metadata->Set("geq.status", geq_status);
}
diff --git a/src/ipa/raspberrypi/controller/rpi/lux.cpp b/src/ipa/raspberrypi/controller/rpi/lux.cpp
index 5acd49a0..6367b17d 100644
--- a/src/ipa/raspberrypi/controller/rpi/lux.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/lux.cpp
@@ -8,12 +8,17 @@
#include "linux/bcm2835-isp.h"
+#include <libcamera/base/log.h>
+
#include "../device_status.h"
-#include "../logging.hpp"
#include "lux.hpp"
using namespace RPiController;
+using namespace libcamera;
+using namespace std::literals::chrono_literals;
+
+LOG_DEFINE_CATEGORY(RPiLux)
#define NAME "rpi.lux"
@@ -33,9 +38,8 @@ char const *Lux::Name() const
void Lux::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG(Name());
reference_shutter_speed_ =
- params.get<double>("reference_shutter_speed");
+ params.get<double>("reference_shutter_speed") * 1.0us;
reference_gain_ = params.get<double>("reference_gain");
reference_aperture_ = params.get<double>("reference_aperture", 1.0);
reference_Y_ = params.get<double>("reference_Y");
@@ -43,6 +47,11 @@ void Lux::Read(boost::property_tree::ptree const &params)
current_aperture_ = reference_aperture_;
}
+void Lux::SetCurrentAperture(double aperture)
+{
+ current_aperture_ = aperture;
+}
+
void Lux::Prepare(Metadata *image_metadata)
{
std::unique_lock<std::mutex> lock(mutex_);
@@ -51,16 +60,9 @@ void Lux::Prepare(Metadata *image_metadata)
void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
{
- // set some initial values to shut the compiler up
- DeviceStatus device_status =
- { .shutter_speed = 1.0,
- .analogue_gain = 1.0,
- .lens_position = 0.0,
- .aperture = 0.0,
- .flash_intensity = 0.0 };
+ DeviceStatus device_status;
if (image_metadata->Get("device.status", device_status) == 0) {
double current_gain = device_status.analogue_gain;
- double current_shutter_speed = device_status.shutter_speed;
double current_aperture = device_status.aperture;
if (current_aperture == 0)
current_aperture = current_aperture_;
@@ -75,7 +77,7 @@ void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
double current_Y = sum / (double)num + .5;
double gain_ratio = reference_gain_ / current_gain;
double shutter_speed_ratio =
- reference_shutter_speed_ / current_shutter_speed;
+ reference_shutter_speed_ / device_status.shutter_speed;
double aperture_ratio = reference_aperture_ / current_aperture;
double Y_ratio = current_Y * (65536 / num_bins) / reference_Y_;
double estimated_lux = shutter_speed_ratio * gain_ratio *
@@ -84,7 +86,7 @@ void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
LuxStatus status;
status.lux = estimated_lux;
status.aperture = current_aperture;
- RPI_LOG(Name() << ": estimated lux " << estimated_lux);
+ LOG(RPiLux, Debug) << ": estimated lux " << estimated_lux;
{
std::unique_lock<std::mutex> lock(mutex_);
status_ = status;
@@ -93,7 +95,7 @@ void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
// algorithms get the latest value.
image_metadata->Set("lux.status", status);
} else
- RPI_WARN(Name() << ": no device metadata");
+ LOG(RPiLux, Warning) << ": no device metadata";
}
// Register algorithm with the system.
diff --git a/src/ipa/raspberrypi/controller/rpi/lux.hpp b/src/ipa/raspberrypi/controller/rpi/lux.hpp
index 7b6c7258..3ebd35d1 100644
--- a/src/ipa/raspberrypi/controller/rpi/lux.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/lux.hpp
@@ -6,9 +6,10 @@
*/
#pragma once
-#include <atomic>
#include <mutex>
+#include <libcamera/base/utils.h>
+
#include "../lux_status.h"
#include "../algorithm.hpp"
@@ -29,12 +30,12 @@ public:
private:
// These values define the conditions of the reference image, against
// which we compare the new image.
- double reference_shutter_speed_; // in micro-seconds
+ libcamera::utils::Duration reference_shutter_speed_;
double reference_gain_;
double reference_aperture_; // units of 1/f
double reference_Y_; // out of 65536
double reference_lux_;
- std::atomic<double> current_aperture_;
+ double current_aperture_;
LuxStatus status_;
std::mutex mutex_;
};
diff --git a/src/ipa/raspberrypi/controller/rpi/noise.cpp b/src/ipa/raspberrypi/controller/rpi/noise.cpp
index 9e9eaf1b..63cad639 100644
--- a/src/ipa/raspberrypi/controller/rpi/noise.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/noise.cpp
@@ -7,13 +7,17 @@
#include <math.h>
+#include <libcamera/base/log.h>
+
#include "../device_status.h"
-#include "../logging.hpp"
#include "../noise_status.h"
#include "noise.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiNoise)
#define NAME "rpi.noise"
@@ -37,7 +41,6 @@ void Noise::SwitchMode(CameraMode const &camera_mode,
void Noise::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG(Name());
reference_constant_ = params.get<double>("reference_constant");
reference_slope_ = params.get<double>("reference_slope");
}
@@ -58,10 +61,11 @@ void Noise::Prepare(Metadata *image_metadata)
status.noise_constant = reference_constant_ * factor;
status.noise_slope = reference_slope_ * factor;
image_metadata->Set("noise.status", status);
- RPI_LOG(Name() << ": constant " << status.noise_constant
- << " slope " << status.noise_slope);
+ LOG(RPiNoise, Debug)
+ << "constant " << status.noise_constant
+ << " slope " << status.noise_slope;
} else
- RPI_WARN(Name() << " no metadata");
+ LOG(RPiNoise, Warning) << " no metadata";
}
// Register algorithm with the system.
diff --git a/src/ipa/raspberrypi/controller/rpi/noise.hpp b/src/ipa/raspberrypi/controller/rpi/noise.hpp
index 6f6e0be9..1c9de5c8 100644
--- a/src/ipa/raspberrypi/controller/rpi/noise.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/noise.hpp
@@ -26,7 +26,7 @@ private:
// the noise profile for analogue gain of 1.0
double reference_constant_;
double reference_slope_;
- std::atomic<double> mode_factor_;
+ double mode_factor_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/rpi/sdn.cpp b/src/ipa/raspberrypi/controller/rpi/sdn.cpp
index aa82830b..93845509 100644
--- a/src/ipa/raspberrypi/controller/rpi/sdn.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/sdn.cpp
@@ -1,16 +1,21 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
*
* sdn.cpp - SDN (spatial denoise) control algorithm
*/
+#include <libcamera/base/log.h>
+
+#include "../denoise_status.h"
#include "../noise_status.h"
-#include "../sdn_status.h"
#include "sdn.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiSdn)
// Calculate settings for the spatial denoise block using the noise profile in
// the image metadata.
@@ -18,7 +23,7 @@ using namespace RPiController;
#define NAME "rpi.sdn"
Sdn::Sdn(Controller *controller)
- : Algorithm(controller)
+ : DenoiseAlgorithm(controller), mode_(DenoiseMode::ColourOff)
{
}
@@ -40,19 +45,26 @@ void Sdn::Prepare(Metadata *image_metadata)
struct NoiseStatus noise_status = {};
noise_status.noise_slope = 3.0; // in case no metadata
if (image_metadata->Get("noise.status", noise_status) != 0)
- RPI_WARN("Sdn: no noise profile found");
- RPI_LOG("Noise profile: constant " << noise_status.noise_constant
- << " slope "
- << noise_status.noise_slope);
- struct SdnStatus status;
+ LOG(RPiSdn, Warning) << "no noise profile found";
+ LOG(RPiSdn, Debug)
+ << "Noise profile: constant " << noise_status.noise_constant
+ << " slope " << noise_status.noise_slope;
+ struct DenoiseStatus status;
status.noise_constant = noise_status.noise_constant * deviation_;
status.noise_slope = noise_status.noise_slope * deviation_;
status.strength = strength_;
- image_metadata->Set("sdn.status", status);
- RPI_LOG("Sdn: programmed constant " << status.noise_constant
- << " slope " << status.noise_slope
- << " strength "
- << status.strength);
+ status.mode = static_cast<std::underlying_type_t<DenoiseMode>>(mode_);
+ image_metadata->Set("denoise.status", status);
+ LOG(RPiSdn, Debug)
+ << "programmed constant " << status.noise_constant
+ << " slope " << status.noise_slope
+ << " strength " << status.strength;
+}
+
+void Sdn::SetMode(DenoiseMode mode)
+{
+ // We only distinguish between off and all other modes.
+ mode_ = mode;
}
// Register algorithm with the system.
diff --git a/src/ipa/raspberrypi/controller/rpi/sdn.hpp b/src/ipa/raspberrypi/controller/rpi/sdn.hpp
index 486c000d..2371ce04 100644
--- a/src/ipa/raspberrypi/controller/rpi/sdn.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/sdn.hpp
@@ -7,12 +7,13 @@
#pragma once
#include "../algorithm.hpp"
+#include "../denoise_algorithm.hpp"
namespace RPiController {
// Algorithm to calculate correct spatial denoise (SDN) settings.
-class Sdn : public Algorithm
+class Sdn : public DenoiseAlgorithm
{
public:
Sdn(Controller *controller = NULL);
@@ -20,10 +21,12 @@ public:
void Read(boost::property_tree::ptree const &params) override;
void Initialise() override;
void Prepare(Metadata *image_metadata) override;
+ void SetMode(DenoiseMode mode) override;
private:
double deviation_;
double strength_;
+ DenoiseMode mode_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/rpi/sharpen.cpp b/src/ipa/raspberrypi/controller/rpi/sharpen.cpp
index c953a7d9..b0c2e00a 100644
--- a/src/ipa/raspberrypi/controller/rpi/sharpen.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/sharpen.cpp
@@ -7,12 +7,16 @@
#include <math.h>
-#include "../logging.hpp"
+#include <libcamera/base/log.h>
+
#include "../sharpen_status.h"
#include "sharpen.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiSharpen)
#define NAME "rpi.sharpen"
@@ -35,10 +39,13 @@ void Sharpen::SwitchMode(CameraMode const &camera_mode,
void Sharpen::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG(Name());
threshold_ = params.get<double>("threshold", 1.0);
strength_ = params.get<double>("strength", 1.0);
limit_ = params.get<double>("limit", 1.0);
+ LOG(RPiSharpen, Debug)
+ << "Read threshold " << threshold_
+ << " strength " << strength_
+ << " limit " << limit_;
}
void Sharpen::SetStrength(double strength)
diff --git a/src/ipa/raspberrypi/controller/sdn_status.h b/src/ipa/raspberrypi/controller/sdn_status.h
deleted file mode 100644
index 871e0b62..00000000
--- a/src/ipa/raspberrypi/controller/sdn_status.h
+++ /dev/null
@@ -1,23 +0,0 @@
-/* SPDX-License-Identifier: BSD-2-Clause */
-/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
- *
- * sdn_status.h - SDN (spatial denoise) control algorithm status
- */
-#pragma once
-
-// This stores the parameters required for Spatial Denoise (SDN).
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-struct SdnStatus {
- double noise_constant;
- double noise_slope;
- double strength;
-};
-
-#ifdef __cplusplus
-}
-#endif
diff --git a/src/ipa/raspberrypi/data/imx219.json b/src/ipa/raspberrypi/data/imx219.json
index ce7ff36f..1ec338be 100644
--- a/src/ipa/raspberrypi/data/imx219.json
+++ b/src/ipa/raspberrypi/data/imx219.json
@@ -133,18 +133,29 @@
{
"shutter":
[
- 100, 10000, 30000, 60000, 120000
+ 100, 10000, 30000, 60000, 66666
],
"gain":
[
1.0, 2.0, 4.0, 6.0, 6.0
]
},
- "sport":
+ "short":
{
"shutter":
[
- 100, 5000, 10000, 20000, 120000
+ 100, 5000, 10000, 20000, 33333
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 4.0, 6.0, 6.0
+ ]
+ },
+ "long":
+ {
+ "shutter":
+ [
+ 100, 10000, 30000, 60000, 120000
],
"gain":
[
diff --git a/src/ipa/raspberrypi/data/imx290.json b/src/ipa/raspberrypi/data/imx290.json
new file mode 100644
index 00000000..6fb92cc4
--- /dev/null
+++ b/src/ipa/raspberrypi/data/imx290.json
@@ -0,0 +1,165 @@
+{
+ "rpi.black_level":
+ {
+ "black_level": 3840
+ },
+ "rpi.dpc":
+ {
+ },
+ "rpi.lux":
+ {
+ "reference_shutter_speed": 6813,
+ "reference_gain": 1.0,
+ "reference_aperture": 1.0,
+ "reference_lux": 890,
+ "reference_Y": 12900
+ },
+ "rpi.noise":
+ {
+ "reference_constant": 0,
+ "reference_slope": 2.67
+ },
+ "rpi.geq":
+ {
+ "offset": 187,
+ "slope": 0.00842
+ },
+ "rpi.sdn":
+ {
+ },
+ "rpi.awb":
+ {
+ "bayes": 0
+ },
+ "rpi.agc":
+ {
+ "speed": 0.2,
+ "metering_modes":
+ {
+ "matrix":
+ {
+ "weights":
+ [
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ ]
+ },
+ "centre-weighted":
+ {
+ "weights":
+ [
+ 3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
+ ]
+ },
+ "spot":
+ {
+ "weights":
+ [
+ 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ ]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter":
+ [
+ 10, 30000, 60000
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 8.0
+ ]
+ },
+ "sport":
+ {
+ "shutter":
+ [
+ 10, 5000, 10000, 20000, 120000
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 4.0, 6.0, 6.0
+ ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal":
+ [
+ ],
+ "highlight":
+ [
+ {
+ "bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target":
+ [
+ 0, 0.5, 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER", "q_lo": 0.98, "q_hi": 1.0, "y_target":
+ [
+ 0, 0.8, 1000, 0.8
+ ]
+ }
+ ]
+ },
+ "y_target":
+ [
+ 0, 0.16, 1000, 0.16, 10000, 0.16
+ ]
+ },
+ "rpi.alsc":
+ {
+ "omega": 1.3,
+ "n_iter": 100,
+ "luminance_strength": 0.7,
+ "luminance_lut":
+ [
+ 2.844, 2.349, 2.018, 1.775, 1.599, 1.466, 1.371, 1.321, 1.306, 1.316, 1.357, 1.439, 1.552, 1.705, 1.915, 2.221,
+ 2.576, 2.151, 1.851, 1.639, 1.478, 1.358, 1.272, 1.231, 1.218, 1.226, 1.262, 1.335, 1.438, 1.571, 1.766, 2.067,
+ 2.381, 2.005, 1.739, 1.545, 1.389, 1.278, 1.204, 1.166, 1.153, 1.161, 1.194, 1.263, 1.356, 1.489, 1.671, 1.943,
+ 2.242, 1.899, 1.658, 1.481, 1.329, 1.225, 1.156, 1.113, 1.096, 1.107, 1.143, 1.201, 1.289, 1.423, 1.607, 1.861,
+ 2.152, 1.831, 1.602, 1.436, 1.291, 1.193, 1.121, 1.069, 1.047, 1.062, 1.107, 1.166, 1.249, 1.384, 1.562, 1.801,
+ 2.104, 1.795, 1.572, 1.407, 1.269, 1.174, 1.099, 1.041, 1.008, 1.029, 1.083, 1.146, 1.232, 1.364, 1.547, 1.766,
+ 2.104, 1.796, 1.572, 1.403, 1.264, 1.171, 1.097, 1.036, 1.001, 1.025, 1.077, 1.142, 1.231, 1.363, 1.549, 1.766,
+ 2.148, 1.827, 1.594, 1.413, 1.276, 1.184, 1.114, 1.062, 1.033, 1.049, 1.092, 1.153, 1.242, 1.383, 1.577, 1.795,
+ 2.211, 1.881, 1.636, 1.455, 1.309, 1.214, 1.149, 1.104, 1.081, 1.089, 1.125, 1.184, 1.273, 1.423, 1.622, 1.846,
+ 2.319, 1.958, 1.698, 1.516, 1.362, 1.262, 1.203, 1.156, 1.137, 1.142, 1.171, 1.229, 1.331, 1.484, 1.682, 1.933,
+ 2.459, 2.072, 1.789, 1.594, 1.441, 1.331, 1.261, 1.219, 1.199, 1.205, 1.232, 1.301, 1.414, 1.571, 1.773, 2.052,
+ 2.645, 2.206, 1.928, 1.728, 1.559, 1.451, 1.352, 1.301, 1.282, 1.289, 1.319, 1.395, 1.519, 1.685, 1.904, 2.227
+ ],
+ "sigma": 0.005,
+ "sigma_Cb": 0.005
+ },
+ "rpi.contrast":
+ {
+ "ce_enable": 1,
+ "gamma_curve":
+ [
+ 0, 0, 1024, 5040, 2048, 9338, 3072, 12356, 4096, 15312, 5120, 18051, 6144, 20790, 7168, 23193,
+ 8192, 25744, 9216, 27942, 10240, 30035, 11264, 32005, 12288, 33975, 13312, 35815, 14336, 37600, 15360, 39168,
+ 16384, 40642, 18432, 43379, 20480, 45749, 22528, 47753, 24576, 49621, 26624, 51253, 28672, 52698, 30720, 53796,
+ 32768, 54876, 36864, 57012, 40960, 58656, 45056, 59954, 49152, 61183, 53248, 62355, 57344, 63419, 61440, 64476,
+ 65535, 65535
+ ]
+ },
+ "rpi.sharpen":
+ {
+ },
+ "rpi.ccm":
+ {
+ "ccms":
+ [
+ {
+ "ct": 3900, "ccm":
+ [
+ 1.54659, -0.17707, -0.36953, -0.51471, 1.72733, -0.21262, 0.06667, -0.92279, 1.85612
+ ]
+ }
+ ]
+ },
+ "rpi.focus":
+ {
+ }
+}
diff --git a/src/ipa/raspberrypi/data/imx477.json b/src/ipa/raspberrypi/data/imx477.json
index 73ad1ae9..9bee3f16 100644
--- a/src/ipa/raspberrypi/data/imx477.json
+++ b/src/ipa/raspberrypi/data/imx477.json
@@ -133,18 +133,29 @@
{
"shutter":
[
- 100, 10000, 30000, 60000, 120000
+ 100, 10000, 30000, 60000, 66666
],
"gain":
[
1.0, 2.0, 4.0, 6.0, 6.0
]
},
- "sport":
+ "short":
{
"shutter":
[
- 100, 5000, 10000, 20000, 120000
+ 100, 5000, 10000, 20000, 33333
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 4.0, 6.0, 6.0
+ ]
+ },
+ "long":
+ {
+ "shutter":
+ [
+ 100, 10000, 30000, 60000, 120000
],
"gain":
[
diff --git a/src/ipa/raspberrypi/data/meson.build b/src/ipa/raspberrypi/data/meson.build
index 5236bf1e..abb1f928 100644
--- a/src/ipa/raspberrypi/data/meson.build
+++ b/src/ipa/raspberrypi/data/meson.build
@@ -2,10 +2,13 @@
conf_files = files([
'imx219.json',
+ 'imx290.json',
'imx477.json',
'ov5647.json',
+ 'ov9281.json',
+ 'se327m12.json',
'uncalibrated.json',
])
install_data(conf_files,
- install_dir : join_paths(ipa_data_dir, 'raspberrypi'))
+ install_dir : ipa_data_dir / 'raspberrypi')
diff --git a/src/ipa/raspberrypi/data/ov5647.json b/src/ipa/raspberrypi/data/ov5647.json
index a2469059..1a354f7c 100644
--- a/src/ipa/raspberrypi/data/ov5647.json
+++ b/src/ipa/raspberrypi/data/ov5647.json
@@ -133,18 +133,29 @@
{
"shutter":
[
- 100, 10000, 30000, 30000, 30000
+ 100, 10000, 30000, 60000, 66666
],
"gain":
[
1.0, 2.0, 4.0, 6.0, 6.0
]
},
- "sport":
+ "short":
{
"shutter":
[
- 100, 5000, 10000, 20000, 30000
+ 100, 5000, 10000, 20000, 33333
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 4.0, 6.0, 6.0
+ ]
+ },
+ "long":
+ {
+ "shutter":
+ [
+ 100, 10000, 30000, 60000, 120000
],
"gain":
[
diff --git a/src/ipa/raspberrypi/data/ov9281.json b/src/ipa/raspberrypi/data/ov9281.json
new file mode 100644
index 00000000..ecd262be
--- /dev/null
+++ b/src/ipa/raspberrypi/data/ov9281.json
@@ -0,0 +1,92 @@
+{
+ "rpi.black_level":
+ {
+ "black_level": 4096
+ },
+ "rpi.lux":
+ {
+ "reference_shutter_speed": 2000,
+ "reference_gain": 1.0,
+ "reference_aperture": 1.0,
+ "reference_lux": 800,
+ "reference_Y": 20000
+ },
+ "rpi.noise":
+ {
+ "reference_constant": 0,
+ "reference_slope": 2.5
+ },
+ "rpi.sdn":
+ {
+ },
+ "rpi.agc":
+ {
+ "metering_modes":
+ {
+ "centre-weighted": {
+ "weights": [4, 4, 4, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter": [ 100, 15000, 30000, 60000, 120000 ],
+ "gain": [ 1.0, 2.0, 3.0, 4.0, 6.0 ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal":
+ [
+ { "bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target": [ 0, 0.4, 1000, 0.4 ] }
+ ]
+ },
+ "y_target": [ 0, 0.16, 1000, 0.165, 10000, 0.17 ]
+ },
+ "rpi.alsc":
+ {
+ "n_iter": 0,
+ "luminance_strength": 1.0,
+ "corner_strength": 1.5
+ },
+ "rpi.contrast":
+ {
+ "ce_enable": 0,
+ "gamma_curve": [
+ 0, 0,
+ 1024, 5040,
+ 2048, 9338,
+ 3072, 12356,
+ 4096, 15312,
+ 5120, 18051,
+ 6144, 20790,
+ 7168, 23193,
+ 8192, 25744,
+ 9216, 27942,
+ 10240, 30035,
+ 11264, 32005,
+ 12288, 33975,
+ 13312, 35815,
+ 14336, 37600,
+ 15360, 39168,
+ 16384, 40642,
+ 18432, 43379,
+ 20480, 45749,
+ 22528, 47753,
+ 24576, 49621,
+ 26624, 51253,
+ 28672, 52698,
+ 30720, 53796,
+ 32768, 54876,
+ 36864, 57012,
+ 40960, 58656,
+ 45056, 59954,
+ 49152, 61183,
+ 53248, 62355,
+ 57344, 63419,
+ 61440, 64476,
+ 65535, 65535
+ ]
+ }
+}
diff --git a/src/ipa/raspberrypi/data/se327m12.json b/src/ipa/raspberrypi/data/se327m12.json
new file mode 100644
index 00000000..3245ed98
--- /dev/null
+++ b/src/ipa/raspberrypi/data/se327m12.json
@@ -0,0 +1,341 @@
+{
+ "rpi.black_level":
+ {
+ "black_level": 3840
+ },
+ "rpi.dpc":
+ {
+ },
+ "rpi.lux":
+ {
+ "reference_shutter_speed": 6873,
+ "reference_gain": 1.0,
+ "reference_aperture": 1.0,
+ "reference_lux": 800,
+ "reference_Y": 12293
+ },
+ "rpi.noise":
+ {
+ "reference_constant": 0,
+ "reference_slope": 1.986
+ },
+ "rpi.geq":
+ {
+ "offset": 207,
+ "slope": 0.00539
+ },
+ "rpi.sdn":
+ {
+ },
+ "rpi.awb":
+ {
+ "priors":
+ [
+ {
+ "lux": 0, "prior":
+ [
+ 2000, 1.0, 3000, 0.0, 13000, 0.0
+ ]
+ },
+ {
+ "lux": 800, "prior":
+ [
+ 2000, 0.0, 6000, 2.0, 13000, 2.0
+ ]
+ },
+ {
+ "lux": 1500, "prior":
+ [
+ 2000, 0.0, 4000, 1.0, 6000, 6.0, 6500, 7.0, 7000, 1.0, 13000, 1.0
+ ]
+ }
+ ],
+ "modes":
+ {
+ "auto":
+ {
+ "lo": 2500,
+ "hi": 8000
+ },
+ "incandescent":
+ {
+ "lo": 2500,
+ "hi": 3000
+ },
+ "tungsten":
+ {
+ "lo": 3000,
+ "hi": 3500
+ },
+ "fluorescent":
+ {
+ "lo": 4000,
+ "hi": 4700
+ },
+ "indoor":
+ {
+ "lo": 3000,
+ "hi": 5000
+ },
+ "daylight":
+ {
+ "lo": 5500,
+ "hi": 6500
+ },
+ "cloudy":
+ {
+ "lo": 7000,
+ "hi": 8600
+ }
+ },
+ "bayes": 1,
+ "ct_curve":
+ [
+ 2900.0, 0.9217, 0.3657, 3600.0, 0.7876, 0.4651, 4600.0, 0.6807, 0.5684, 5800.0, 0.5937, 0.6724, 8100.0, 0.5447, 0.7403
+ ],
+ "sensitivity_r": 1.0,
+ "sensitivity_b": 1.0,
+ "transverse_pos": 0.0162,
+ "transverse_neg": 0.0204
+ },
+ "rpi.agc":
+ {
+ "metering_modes":
+ {
+ "centre-weighted":
+ {
+ "weights":
+ [
+ 3, 3, 3, 2, 2, 2, 2, 1, 1, 1, 1, 0, 0, 0, 0
+ ]
+ },
+ "spot":
+ {
+ "weights":
+ [
+ 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+ ]
+ },
+ "matrix":
+ {
+ "weights":
+ [
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1
+ ]
+ }
+ },
+ "exposure_modes":
+ {
+ "normal":
+ {
+ "shutter":
+ [
+ 100, 10000, 30000, 60000, 120000
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 4.0, 6.0, 6.0
+ ]
+ },
+ "short":
+ {
+ "shutter":
+ [
+ 100, 5000, 10000, 20000, 120000
+ ],
+ "gain":
+ [
+ 1.0, 2.0, 4.0, 6.0, 6.0
+ ]
+ }
+ },
+ "constraint_modes":
+ {
+ "normal":
+ [
+ {
+ "bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target":
+ [
+ 0, 0.5, 1000, 0.5
+ ]
+ }
+ ],
+ "highlight":
+ [
+ {
+ "bound": "LOWER", "q_lo": 0.98, "q_hi": 1.0, "y_target":
+ [
+ 0, 0.5, 1000, 0.5
+ ]
+ },
+ {
+ "bound": "UPPER", "q_lo": 0.98, "q_hi": 1.0, "y_target":
+ [
+ 0, 0.8, 1000, 0.8
+ ]
+ }
+ ]
+ },
+ "y_target":
+ [
+ 0, 0.16, 1000, 0.165, 10000, 0.17
+ ]
+ },
+ "rpi.alsc":
+ {
+ "omega": 1.3,
+ "n_iter": 100,
+ "luminance_strength": 0.5,
+ "calibrations_Cr":
+ [
+ {
+ "ct": 4000, "table":
+ [
+ 1.481, 1.471, 1.449, 1.429, 1.416, 1.404, 1.394, 1.389, 1.389, 1.389, 1.392, 1.397, 1.404, 1.416, 1.429, 1.437,
+ 1.472, 1.456, 1.436, 1.418, 1.405, 1.394, 1.389, 1.384, 1.382, 1.382, 1.386, 1.388, 1.398, 1.407, 1.422, 1.429,
+ 1.465, 1.443, 1.426, 1.411, 1.397, 1.389, 1.383, 1.377, 1.377, 1.377, 1.379, 1.384, 1.388, 1.398, 1.411, 1.422,
+ 1.462, 1.441, 1.423, 1.409, 1.395, 1.385, 1.379, 1.376, 1.374, 1.374, 1.375, 1.379, 1.384, 1.394, 1.407, 1.418,
+ 1.461, 1.439, 1.421, 1.407, 1.394, 1.385, 1.381, 1.376, 1.373, 1.373, 1.373, 1.376, 1.381, 1.389, 1.403, 1.415,
+ 1.461, 1.439, 1.419, 1.404, 1.392, 1.384, 1.379, 1.376, 1.373, 1.372, 1.374, 1.375, 1.379, 1.389, 1.401, 1.413,
+ 1.461, 1.438, 1.419, 1.402, 1.389, 1.383, 1.377, 1.375, 1.373, 1.372, 1.372, 1.375, 1.381, 1.388, 1.401, 1.414,
+ 1.462, 1.438, 1.419, 1.403, 1.391, 1.381, 1.377, 1.374, 1.373, 1.373, 1.374, 1.376, 1.381, 1.389, 1.401, 1.414,
+ 1.462, 1.441, 1.423, 1.405, 1.392, 1.383, 1.377, 1.374, 1.373, 1.372, 1.373, 1.376, 1.382, 1.391, 1.402, 1.414,
+ 1.465, 1.444, 1.424, 1.407, 1.393, 1.382, 1.378, 1.373, 1.369, 1.369, 1.372, 1.375, 1.381, 1.389, 1.402, 1.417,
+ 1.469, 1.449, 1.427, 1.413, 1.396, 1.384, 1.381, 1.375, 1.371, 1.371, 1.373, 1.377, 1.385, 1.393, 1.407, 1.422,
+ 1.474, 1.456, 1.436, 1.419, 1.407, 1.391, 1.383, 1.379, 1.377, 1.377, 1.378, 1.381, 1.391, 1.404, 1.422, 1.426
+ ]
+ },
+ {
+ "ct": 5000, "table":
+ [
+ 1.742, 1.721, 1.689, 1.661, 1.639, 1.623, 1.613, 1.609, 1.607, 1.606, 1.609, 1.617, 1.626, 1.641, 1.665, 1.681,
+ 1.728, 1.703, 1.672, 1.645, 1.631, 1.614, 1.602, 1.599, 1.596, 1.597, 1.601, 1.608, 1.618, 1.631, 1.653, 1.671,
+ 1.713, 1.691, 1.658, 1.635, 1.618, 1.606, 1.595, 1.591, 1.588, 1.588, 1.591, 1.601, 1.608, 1.624, 1.641, 1.658,
+ 1.707, 1.681, 1.651, 1.627, 1.613, 1.599, 1.591, 1.585, 1.583, 1.584, 1.587, 1.591, 1.601, 1.615, 1.633, 1.655,
+ 1.699, 1.672, 1.644, 1.622, 1.606, 1.593, 1.586, 1.581, 1.579, 1.581, 1.583, 1.587, 1.597, 1.611, 1.631, 1.652,
+ 1.697, 1.665, 1.637, 1.617, 1.601, 1.589, 1.584, 1.579, 1.577, 1.578, 1.581, 1.585, 1.597, 1.607, 1.627, 1.652,
+ 1.697, 1.662, 1.634, 1.613, 1.599, 1.591, 1.583, 1.578, 1.576, 1.576, 1.579, 1.586, 1.597, 1.607, 1.628, 1.653,
+ 1.697, 1.662, 1.633, 1.613, 1.598, 1.589, 1.582, 1.578, 1.576, 1.577, 1.582, 1.589, 1.598, 1.611, 1.635, 1.655,
+ 1.701, 1.666, 1.636, 1.616, 1.602, 1.589, 1.583, 1.578, 1.577, 1.581, 1.583, 1.591, 1.601, 1.617, 1.639, 1.659,
+ 1.708, 1.671, 1.641, 1.618, 1.603, 1.591, 1.584, 1.581, 1.578, 1.581, 1.585, 1.594, 1.604, 1.622, 1.646, 1.666,
+ 1.714, 1.681, 1.648, 1.622, 1.608, 1.599, 1.591, 1.584, 1.583, 1.584, 1.589, 1.599, 1.614, 1.629, 1.653, 1.673,
+ 1.719, 1.691, 1.659, 1.631, 1.618, 1.606, 1.596, 1.591, 1.591, 1.593, 1.599, 1.608, 1.623, 1.642, 1.665, 1.681
+ ]
+ }
+ ],
+ "calibrations_Cb":
+ [
+ {
+ "ct": 4000, "table":
+ [
+ 2.253, 2.267, 2.289, 2.317, 2.342, 2.359, 2.373, 2.381, 2.381, 2.378, 2.368, 2.361, 2.344, 2.337, 2.314, 2.301,
+ 2.262, 2.284, 2.314, 2.335, 2.352, 2.371, 2.383, 2.391, 2.393, 2.391, 2.381, 2.368, 2.361, 2.342, 2.322, 2.308,
+ 2.277, 2.303, 2.321, 2.346, 2.364, 2.381, 2.391, 2.395, 2.397, 2.397, 2.395, 2.381, 2.367, 2.354, 2.332, 2.321,
+ 2.277, 2.304, 2.327, 2.349, 2.369, 2.388, 2.393, 2.396, 2.396, 2.398, 2.396, 2.391, 2.376, 2.359, 2.339, 2.328,
+ 2.279, 2.311, 2.327, 2.354, 2.377, 2.389, 2.393, 2.397, 2.397, 2.398, 2.395, 2.393, 2.382, 2.363, 2.344, 2.332,
+ 2.282, 2.311, 2.329, 2.354, 2.377, 2.386, 2.396, 2.396, 2.395, 2.396, 2.397, 2.394, 2.383, 2.367, 2.346, 2.333,
+ 2.283, 2.314, 2.333, 2.353, 2.375, 2.389, 2.394, 2.395, 2.395, 2.395, 2.396, 2.394, 2.386, 2.368, 2.354, 2.336,
+ 2.287, 2.309, 2.331, 2.352, 2.373, 2.386, 2.394, 2.395, 2.395, 2.396, 2.396, 2.394, 2.384, 2.371, 2.354, 2.339,
+ 2.289, 2.307, 2.326, 2.347, 2.369, 2.385, 2.392, 2.397, 2.398, 2.398, 2.397, 2.392, 2.383, 2.367, 2.352, 2.337,
+ 2.286, 2.303, 2.322, 2.342, 2.361, 2.379, 2.389, 2.394, 2.397, 2.398, 2.396, 2.389, 2.381, 2.366, 2.346, 2.332,
+ 2.284, 2.291, 2.312, 2.329, 2.351, 2.372, 2.381, 2.389, 2.393, 2.394, 2.389, 2.385, 2.374, 2.362, 2.338, 2.325,
+ 2.283, 2.288, 2.305, 2.319, 2.339, 2.365, 2.374, 2.381, 2.384, 2.386, 2.385, 2.379, 2.368, 2.342, 2.325, 2.318
+ ]
+ },
+ {
+ "ct": 5000, "table":
+ [
+ 1.897, 1.919, 1.941, 1.969, 1.989, 2.003, 2.014, 2.019, 2.019, 2.017, 2.014, 2.008, 1.999, 1.988, 1.968, 1.944,
+ 1.914, 1.932, 1.957, 1.982, 1.998, 2.014, 2.023, 2.029, 2.031, 2.029, 2.022, 2.014, 2.006, 1.995, 1.976, 1.955,
+ 1.925, 1.951, 1.974, 1.996, 2.013, 2.027, 2.035, 2.039, 2.039, 2.038, 2.035, 2.026, 2.015, 2.002, 1.984, 1.963,
+ 1.932, 1.958, 1.986, 2.007, 2.024, 2.034, 2.041, 2.041, 2.045, 2.045, 2.042, 2.033, 2.023, 2.009, 1.995, 1.971,
+ 1.942, 1.964, 1.994, 2.012, 2.029, 2.038, 2.043, 2.046, 2.047, 2.046, 2.045, 2.039, 2.029, 2.014, 1.997, 1.977,
+ 1.946, 1.974, 1.999, 2.015, 2.031, 2.041, 2.046, 2.047, 2.048, 2.047, 2.044, 2.041, 2.031, 2.019, 1.999, 1.978,
+ 1.948, 1.975, 2.002, 2.018, 2.031, 2.041, 2.046, 2.047, 2.048, 2.048, 2.045, 2.041, 2.029, 2.019, 1.998, 1.978,
+ 1.948, 1.973, 2.002, 2.018, 2.029, 2.042, 2.045, 2.048, 2.048, 2.048, 2.044, 2.037, 2.027, 2.014, 1.993, 1.978,
+ 1.945, 1.969, 1.998, 2.015, 2.028, 2.037, 2.045, 2.046, 2.047, 2.044, 2.039, 2.033, 2.022, 2.008, 1.989, 1.971,
+ 1.939, 1.964, 1.991, 2.011, 2.024, 2.032, 2.036, 2.042, 2.042, 2.039, 2.035, 2.024, 2.012, 1.998, 1.977, 1.964,
+ 1.932, 1.953, 1.981, 2.006, 2.016, 2.024, 2.028, 2.031, 2.034, 2.031, 2.024, 2.015, 2.005, 1.989, 1.966, 1.955,
+ 1.928, 1.944, 1.973, 1.999, 2.007, 2.016, 2.019, 2.025, 2.026, 2.025, 2.017, 2.008, 1.997, 1.975, 1.958, 1.947
+ ]
+ }
+ ],
+ "luminance_lut":
+ [
+ 1.877, 1.597, 1.397, 1.269, 1.191, 1.131, 1.093, 1.078, 1.071, 1.069, 1.086, 1.135, 1.221, 1.331, 1.474, 1.704,
+ 1.749, 1.506, 1.334, 1.229, 1.149, 1.088, 1.058, 1.053, 1.051, 1.046, 1.053, 1.091, 1.163, 1.259, 1.387, 1.587,
+ 1.661, 1.451, 1.295, 1.195, 1.113, 1.061, 1.049, 1.048, 1.047, 1.049, 1.049, 1.066, 1.124, 1.211, 1.333, 1.511,
+ 1.615, 1.411, 1.267, 1.165, 1.086, 1.052, 1.047, 1.047, 1.047, 1.049, 1.052, 1.056, 1.099, 1.181, 1.303, 1.471,
+ 1.576, 1.385, 1.252, 1.144, 1.068, 1.049, 1.044, 1.044, 1.045, 1.049, 1.053, 1.054, 1.083, 1.163, 1.283, 1.447,
+ 1.561, 1.373, 1.245, 1.135, 1.064, 1.049, 1.044, 1.044, 1.044, 1.046, 1.048, 1.054, 1.073, 1.153, 1.271, 1.432,
+ 1.571, 1.377, 1.242, 1.137, 1.066, 1.055, 1.052, 1.051, 1.051, 1.049, 1.047, 1.048, 1.068, 1.148, 1.271, 1.427,
+ 1.582, 1.396, 1.259, 1.156, 1.085, 1.068, 1.059, 1.054, 1.049, 1.045, 1.041, 1.043, 1.074, 1.157, 1.284, 1.444,
+ 1.623, 1.428, 1.283, 1.178, 1.105, 1.074, 1.069, 1.063, 1.056, 1.048, 1.046, 1.051, 1.094, 1.182, 1.311, 1.473,
+ 1.691, 1.471, 1.321, 1.213, 1.135, 1.088, 1.073, 1.069, 1.063, 1.059, 1.053, 1.071, 1.129, 1.222, 1.351, 1.521,
+ 1.808, 1.543, 1.371, 1.253, 1.174, 1.118, 1.085, 1.072, 1.067, 1.064, 1.071, 1.106, 1.176, 1.274, 1.398, 1.582,
+ 1.969, 1.666, 1.447, 1.316, 1.223, 1.166, 1.123, 1.094, 1.089, 1.097, 1.118, 1.163, 1.239, 1.336, 1.471, 1.681
+ ],
+ "sigma": 0.00218,
+ "sigma_Cb": 0.00194
+ },
+ "rpi.contrast":
+ {
+ "ce_enable": 1,
+ "gamma_curve":
+ [
+ 0, 0, 1024, 5040, 2048, 9338, 3072, 12356, 4096, 15312, 5120, 18051, 6144, 20790, 7168, 23193,
+ 8192, 25744, 9216, 27942, 10240, 30035, 11264, 32005, 12288, 33975, 13312, 35815, 14336, 37600, 15360, 39168,
+ 16384, 40642, 18432, 43379, 20480, 45749, 22528, 47753, 24576, 49621, 26624, 51253, 28672, 52698, 30720, 53796,
+ 32768, 54876, 36864, 57012, 40960, 58656, 45056, 59954, 49152, 61183, 53248, 62355, 57344, 63419, 61440, 64476,
+ 65535, 65535
+ ]
+ },
+ "rpi.ccm":
+ {
+ "ccms":
+ [
+ {
+ "ct": 2900, "ccm":
+ [
+ 1.44924, -0.12935, -0.31989, -0.65839, 1.95441, -0.29602, 0.18344, -1.22282, 2.03938
+ ]
+ },
+ {
+ "ct": 3000, "ccm":
+ [
+ 1.38736, 0.07714, -0.46451, -0.59691, 1.84335, -0.24644, 0.10092, -1.30441, 2.20349
+ ]
+ },
+ {
+ "ct": 3600, "ccm":
+ [
+ 1.51261, -0.27921, -0.23339, -0.55129, 1.83241, -0.28111, 0.11649, -0.93195, 1.81546
+ ]
+ },
+ {
+ "ct": 4600, "ccm":
+ [
+ 1.47082, -0.18523, -0.28559, -0.48923, 1.95126, -0.46203, 0.07951, -0.83987, 1.76036
+ ]
+ },
+ {
+ "ct": 5800, "ccm":
+ [
+ 1.57294, -0.36229, -0.21065, -0.42272, 1.80305, -0.38032, 0.03671, -0.66862, 1.63191
+ ]
+ },
+ {
+ "ct": 8100, "ccm":
+ [
+ 1.58803, -0.09912, -0.48891, -0.42594, 2.22303, -0.79709, -0.00621, -0.90516, 1.91137
+ ]
+ }
+ ]
+ },
+ "rpi.sharpen":
+ {
+ "threshold": 2.0,
+ "strength": 0.5,
+ "limit": 0.5
+ }
+}
diff --git a/src/ipa/raspberrypi/md_parser.cpp b/src/ipa/raspberrypi/md_parser.cpp
deleted file mode 100644
index d82c102c..00000000
--- a/src/ipa/raspberrypi/md_parser.cpp
+++ /dev/null
@@ -1,101 +0,0 @@
-/* SPDX-License-Identifier: BSD-2-Clause */
-/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
- *
- * md_parser.cpp - image sensor metadata parsers
- */
-
-#include <assert.h>
-#include <map>
-#include <string.h>
-
-#include "md_parser.hpp"
-
-using namespace RPiController;
-
-// This function goes through the embedded data to find the offsets (not
-// values!), in the data block, where the values of the given registers can
-// subsequently be found.
-
-// Embedded data tag bytes, from Sony IMX219 datasheet but general to all SMIA
-// sensors, I think.
-
-#define LINE_START 0x0a
-#define LINE_END_TAG 0x07
-#define REG_HI_BITS 0xaa
-#define REG_LOW_BITS 0xa5
-#define REG_VALUE 0x5a
-#define REG_SKIP 0x55
-
-MdParserSmia::ParseStatus MdParserSmia::findRegs(unsigned char *data,
- uint32_t regs[], int offsets[],
- unsigned int num_regs)
-{
- assert(num_regs > 0);
- if (data[0] != LINE_START)
- return NO_LINE_START;
-
- unsigned int current_offset = 1; // after the LINE_START
- unsigned int current_line_start = 0, current_line = 0;
- unsigned int reg_num = 0, first_reg = 0;
- ParseStatus retcode = PARSE_OK;
- while (1) {
- int tag = data[current_offset++];
- if ((bits_per_pixel_ == 10 &&
- (current_offset + 1 - current_line_start) % 5 == 0) ||
- (bits_per_pixel_ == 12 &&
- (current_offset + 1 - current_line_start) % 3 == 0)) {
- if (data[current_offset++] != REG_SKIP)
- return BAD_DUMMY;
- }
- int data_byte = data[current_offset++];
- //printf("Offset %u, tag 0x%02x data_byte 0x%02x\n", current_offset-1, tag, data_byte);
- if (tag == LINE_END_TAG) {
- if (data_byte != LINE_END_TAG)
- return BAD_LINE_END;
- if (num_lines_ && ++current_line == num_lines_)
- return MISSING_REGS;
- if (line_length_bytes_) {
- current_offset =
- current_line_start + line_length_bytes_;
- // Require whole line to be in the buffer (if buffer size set).
- if (buffer_size_bytes_ &&
- current_offset + line_length_bytes_ >
- buffer_size_bytes_)
- return MISSING_REGS;
- if (data[current_offset] != LINE_START)
- return NO_LINE_START;
- } else {
- // allow a zero line length to mean "hunt for the next line"
- while (data[current_offset] != LINE_START &&
- current_offset < buffer_size_bytes_)
- current_offset++;
- if (current_offset == buffer_size_bytes_)
- return NO_LINE_START;
- }
- // inc current_offset to after LINE_START
- current_line_start =
- current_offset++;
- } else {
- if (tag == REG_HI_BITS)
- reg_num = (reg_num & 0xff) | (data_byte << 8);
- else if (tag == REG_LOW_BITS)
- reg_num = (reg_num & 0xff00) | data_byte;
- else if (tag == REG_SKIP)
- reg_num++;
- else if (tag == REG_VALUE) {
- while (reg_num >=
- // assumes registers are in order...
- regs[first_reg]) {
- if (reg_num == regs[first_reg])
- offsets[first_reg] =
- current_offset - 1;
- if (++first_reg == num_regs)
- return retcode;
- }
- reg_num++;
- } else
- return ILLEGAL_TAG;
- }
- }
-}
diff --git a/src/ipa/raspberrypi/md_parser.hpp b/src/ipa/raspberrypi/md_parser.hpp
index c9db62c0..e3e27385 100644
--- a/src/ipa/raspberrypi/md_parser.hpp
+++ b/src/ipa/raspberrypi/md_parser.hpp
@@ -6,107 +6,137 @@
*/
#pragma once
+#include <initializer_list>
+#include <map>
+#include <optional>
#include <stdint.h>
-/* Camera metadata parser class. Usage as shown below.
+#include <libcamera/base/span.h>
-Setup:
-
-Usually the metadata parser will be made as part of the CamHelper class so
-application code doesn't have to worry which to kind to instantiate. But for
-the sake of example let's suppose we're parsing imx219 metadata.
-
-MdParser *parser = new MdParserImx219(); // for example
-parser->SetBitsPerPixel(bpp);
-parser->SetLineLengthBytes(pitch);
-parser->SetNumLines(2);
-
-Note 1: if you don't know how many lines there are, you can use SetBufferSize
-instead to limit the total buffer size.
-
-Note 2: if you don't know the line length, you can leave the line length unset
-(or set to zero) and the parser will hunt for the line start instead. In this
-case SetBufferSize *must* be used so that the parser won't run off the end of
-the buffer.
-
-Then on every frame:
-
-if (parser->Parse(data) != MdParser::OK)
- much badness;
-unsigned int exposure_lines, gain_code
-if (parser->GetExposureLines(exposure_lines) != MdParser::OK)
- exposure was not found;
-if (parser->GetGainCode(parser, gain_code) != MdParser::OK)
- gain code was not found;
-
-(Note that the CamHelper class converts to/from exposure lines and time,
-and gain_code / actual gain.)
-
-If you suspect your embedded data may have changed its layout, change any line
-lengths, number of lines, bits per pixel etc. that are different, and
-then:
-
-parser->Reset();
-
-before calling Parse again. */
+/*
+ * Camera metadata parser class. Usage as shown below.
+ *
+ * Setup:
+ *
+ * Usually the metadata parser will be made as part of the CamHelper class so
+ * application code doesn't have to worry which kind to instantiate. But for
+ * the sake of example let's suppose we're parsing imx219 metadata.
+ *
+ * MdParser *parser = new MdParserSmia({ expHiReg, expLoReg, gainReg });
+ * parser->SetBitsPerPixel(bpp);
+ * parser->SetLineLengthBytes(pitch);
+ * parser->SetNumLines(2);
+ *
+ * Note 1: if you don't know how many lines there are, the size of the input
+ * buffer is used as a limit instead.
+ *
+ * Note 2: if you don't know the line length, you can leave the line length unset
+ * (or set to zero) and the parser will hunt for the line start instead.
+ *
+ * Then on every frame:
+ *
+ * RegisterMap registers;
+ * if (parser->Parse(buffer, registers) != MdParser::OK)
+ * much badness;
+ * Metadata metadata;
+ * CamHelper::PopulateMetadata(registers, metadata);
+ *
+ * (Note that the CamHelper class converts to/from exposure lines and time,
+ * and gain_code / actual gain.)
+ *
+ * If you suspect your embedded data may have changed its layout, change any line
+ * lengths, number of lines, bits per pixel etc. that are different, and
+ * then:
+ *
+ * parser->Reset();
+ *
+ * before calling Parse again.
+ */
namespace RPiController {
-// Abstract base class from which other metadata parsers are derived.
+/* Abstract base class from which other metadata parsers are derived. */
class MdParser
{
public:
- // Parser status codes:
- // OK - success
- // NOTFOUND - value such as exposure or gain was not found
- // ERROR - all other errors
+ using RegisterMap = std::map<uint32_t, uint32_t>;
+
+ /*
+ * Parser status codes:
+ * OK - success
+ * NOTFOUND - value such as exposure or gain was not found
+ * ERROR - all other errors
+ */
enum Status {
OK = 0,
NOTFOUND = 1,
ERROR = 2
};
- MdParser() : reset_(true) {}
- virtual ~MdParser() {}
- void Reset() { reset_ = true; }
- void SetBitsPerPixel(int bpp) { bits_per_pixel_ = bpp; }
- void SetNumLines(unsigned int num_lines) { num_lines_ = num_lines; }
- void SetLineLengthBytes(unsigned int num_bytes)
+
+ MdParser()
+ : reset_(true), bits_per_pixel_(0), num_lines_(0), line_length_bytes_(0)
{
- line_length_bytes_ = num_bytes;
}
- void SetBufferSize(unsigned int num_bytes)
+
+ virtual ~MdParser() = default;
+
+ void Reset()
+ {
+ reset_ = true;
+ }
+
+ void SetBitsPerPixel(int bpp)
+ {
+ bits_per_pixel_ = bpp;
+ }
+
+ void SetNumLines(unsigned int num_lines)
{
- buffer_size_bytes_ = num_bytes;
+ num_lines_ = num_lines;
}
- virtual Status Parse(void *data) = 0;
- virtual Status GetExposureLines(unsigned int &lines) = 0;
- virtual Status GetGainCode(unsigned int &gain_code) = 0;
+
+ void SetLineLengthBytes(unsigned int num_bytes)
+ {
+ line_length_bytes_ = num_bytes;
+ }
+
+ virtual Status Parse(libcamera::Span<const uint8_t> buffer,
+ RegisterMap &registers) = 0;
protected:
bool reset_;
int bits_per_pixel_;
unsigned int num_lines_;
unsigned int line_length_bytes_;
- unsigned int buffer_size_bytes_;
};
-// This isn't a full implementation of a metadata parser for SMIA sensors,
-// however, it does provide the findRegs method which will prove useful and make
-// it easier to implement parsers for other SMIA-like sensors (see
-// md_parser_imx219.cpp for an example).
+/*
+ * This isn't a full implementation of a metadata parser for SMIA sensors,
+ * however, it does provide the findRegs method which will prove useful and make
+ * it easier to implement parsers for other SMIA-like sensors (see
+ * md_parser_imx219.cpp for an example).
+ */
-class MdParserSmia : public MdParser
+class MdParserSmia final : public MdParser
{
public:
- MdParserSmia() : MdParser() {}
-
-protected:
- // Note that error codes > 0 are regarded as non-fatal; codes < 0
- // indicate a bad data buffer. Status codes are:
- // PARSE_OK - found all registers, much happiness
- // MISSING_REGS - some registers found; should this be a hard error?
- // The remaining codes are all hard errors.
+ MdParserSmia(std::initializer_list<uint32_t> registerList);
+
+ MdParser::Status Parse(libcamera::Span<const uint8_t> buffer,
+ RegisterMap &registers) override;
+
+private:
+ /* Maps register address to offset in the buffer. */
+ using OffsetMap = std::map<uint32_t, std::optional<uint32_t>>;
+
+ /*
+ * Note that error codes > 0 are regarded as non-fatal; codes < 0
+ * indicate a bad data buffer. Status codes are:
+ * PARSE_OK - found all registers, much happiness
+ * MISSING_REGS - some registers found; should this be a hard error?
+ * The remaining codes are all hard errors.
+ */
enum ParseStatus {
PARSE_OK = 0,
MISSING_REGS = 1,
@@ -116,8 +146,10 @@ protected:
BAD_LINE_END = -4,
BAD_PADDING = -5
};
- ParseStatus findRegs(unsigned char *data, uint32_t regs[],
- int offsets[], unsigned int num_regs);
+
+ ParseStatus findRegs(libcamera::Span<const uint8_t> buffer);
+
+ OffsetMap offsets_;
};
} // namespace RPi
diff --git a/src/ipa/raspberrypi/md_parser_rpi.cpp b/src/ipa/raspberrypi/md_parser_rpi.cpp
deleted file mode 100644
index 2b0bcfc5..00000000
--- a/src/ipa/raspberrypi/md_parser_rpi.cpp
+++ /dev/null
@@ -1,37 +0,0 @@
-/* SPDX-License-Identifier: BSD-2-Clause */
-/*
- * Copyright (C) 2020, Raspberry Pi (Trading) Limited
- *
- * md_parser_rpi.cpp - Metadata parser for generic Raspberry Pi metadata
- */
-
-#include <string.h>
-
-#include "md_parser_rpi.hpp"
-
-using namespace RPiController;
-
-MdParserRPi::MdParserRPi()
-{
-}
-
-MdParser::Status MdParserRPi::Parse(void *data)
-{
- if (buffer_size_bytes_ < sizeof(rpiMetadata))
- return ERROR;
-
- memcpy(&metadata, data, sizeof(rpiMetadata));
- return OK;
-}
-
-MdParser::Status MdParserRPi::GetExposureLines(unsigned int &lines)
-{
- lines = metadata.exposure;
- return OK;
-}
-
-MdParser::Status MdParserRPi::GetGainCode(unsigned int &gain_code)
-{
- gain_code = metadata.gain;
- return OK;
-}
diff --git a/src/ipa/raspberrypi/md_parser_rpi.hpp b/src/ipa/raspberrypi/md_parser_rpi.hpp
deleted file mode 100644
index 52f54f00..00000000
--- a/src/ipa/raspberrypi/md_parser_rpi.hpp
+++ /dev/null
@@ -1,32 +0,0 @@
-/* SPDX-License-Identifier: BSD-2-Clause */
-/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
- *
- * md_parser_rpi.hpp - Raspberry Pi metadata parser interface
- */
-#pragma once
-
-#include "md_parser.hpp"
-
-namespace RPiController {
-
-class MdParserRPi : public MdParser
-{
-public:
- MdParserRPi();
- Status Parse(void *data) override;
- Status GetExposureLines(unsigned int &lines) override;
- Status GetGainCode(unsigned int &gain_code) override;
-
-private:
- // This must be the same struct that is filled into the metadata buffer
- // in the pipeline handler.
- struct rpiMetadata
- {
- uint32_t exposure;
- uint32_t gain;
- };
- rpiMetadata metadata;
-};
-
-}
diff --git a/src/ipa/raspberrypi/md_parser_smia.cpp b/src/ipa/raspberrypi/md_parser_smia.cpp
new file mode 100644
index 00000000..ea5eac41
--- /dev/null
+++ b/src/ipa/raspberrypi/md_parser_smia.cpp
@@ -0,0 +1,149 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
+ *
+ * md_parser_smia.cpp - SMIA specification based embedded data parser
+ */
+
+#include <libcamera/base/log.h>
+#include "md_parser.hpp"
+
+using namespace RPiController;
+using namespace libcamera;
+
+/*
+ * This function goes through the embedded data to find the offsets (not
+ * values!), in the data block, where the values of the given registers can
+ * subsequently be found.
+ *
+ * Embedded data tag bytes, from Sony IMX219 datasheet but general to all SMIA
+ * sensors, I think.
+ */
+
+constexpr unsigned int LINE_START = 0x0a;
+constexpr unsigned int LINE_END_TAG = 0x07;
+constexpr unsigned int REG_HI_BITS = 0xaa;
+constexpr unsigned int REG_LOW_BITS = 0xa5;
+constexpr unsigned int REG_VALUE = 0x5a;
+constexpr unsigned int REG_SKIP = 0x55;
+
+MdParserSmia::MdParserSmia(std::initializer_list<uint32_t> registerList)
+{
+ for (auto r : registerList)
+ offsets_[r] = {};
+}
+
+MdParser::Status MdParserSmia::Parse(libcamera::Span<const uint8_t> buffer,
+ RegisterMap &registers)
+{
+ if (reset_) {
+ /*
+ * Search again through the metadata for all the registers
+ * requested.
+ */
+ ASSERT(bits_per_pixel_);
+
+ for (const auto &kv : offsets_)
+ offsets_[kv.first] = {};
+
+ ParseStatus ret = findRegs(buffer);
+ /*
+ * > 0 means "worked partially but parse again next time",
+ * < 0 means "hard error".
+ *
+ * In either case, we retry parsing on the next frame.
+ */
+ if (ret != PARSE_OK)
+ return ERROR;
+
+ reset_ = false;
+ }
+
+ /* Populate the register values requested. */
+ registers.clear();
+ for (const auto &[reg, offset] : offsets_) {
+ if (!offset) {
+ reset_ = true;
+ return NOTFOUND;
+ }
+ registers[reg] = buffer[offset.value()];
+ }
+
+ return OK;
+}
+
+MdParserSmia::ParseStatus MdParserSmia::findRegs(libcamera::Span<const uint8_t> buffer)
+{
+ ASSERT(offsets_.size());
+
+ if (buffer[0] != LINE_START)
+ return NO_LINE_START;
+
+ unsigned int current_offset = 1; /* after the LINE_START */
+ unsigned int current_line_start = 0, current_line = 0;
+ unsigned int reg_num = 0, regs_done = 0;
+
+ while (1) {
+ int tag = buffer[current_offset++];
+
+ if ((bits_per_pixel_ == 10 &&
+ (current_offset + 1 - current_line_start) % 5 == 0) ||
+ (bits_per_pixel_ == 12 &&
+ (current_offset + 1 - current_line_start) % 3 == 0)) {
+ if (buffer[current_offset++] != REG_SKIP)
+ return BAD_DUMMY;
+ }
+
+ int data_byte = buffer[current_offset++];
+
+ if (tag == LINE_END_TAG) {
+ if (data_byte != LINE_END_TAG)
+ return BAD_LINE_END;
+
+ if (num_lines_ && ++current_line == num_lines_)
+ return MISSING_REGS;
+
+ if (line_length_bytes_) {
+ current_offset = current_line_start + line_length_bytes_;
+
+ /* Require whole line to be in the buffer (if buffer size set). */
+ if (buffer.size() &&
+ current_offset + line_length_bytes_ > buffer.size())
+ return MISSING_REGS;
+
+ if (buffer[current_offset] != LINE_START)
+ return NO_LINE_START;
+ } else {
+ /* allow a zero line length to mean "hunt for the next line" */
+ while (current_offset < buffer.size() &&
+ buffer[current_offset] != LINE_START)
+ current_offset++;
+
+ if (current_offset == buffer.size())
+ return NO_LINE_START;
+ }
+
+ /* inc current_offset to after LINE_START */
+ current_line_start = current_offset++;
+ } else {
+ if (tag == REG_HI_BITS)
+ reg_num = (reg_num & 0xff) | (data_byte << 8);
+ else if (tag == REG_LOW_BITS)
+ reg_num = (reg_num & 0xff00) | data_byte;
+ else if (tag == REG_SKIP)
+ reg_num++;
+ else if (tag == REG_VALUE) {
+ auto reg = offsets_.find(reg_num);
+
+ if (reg != offsets_.end()) {
+ offsets_[reg_num] = current_offset - 1;
+
+ if (++regs_done == offsets_.size())
+ return PARSE_OK;
+ }
+ reg_num++;
+ } else
+ return ILLEGAL_TAG;
+ }
+ }
+}
diff --git a/src/ipa/raspberrypi/meson.build b/src/ipa/raspberrypi/meson.build
index 9445cd09..1af31e4a 100644
--- a/src/ipa/raspberrypi/meson.build
+++ b/src/ipa/raspberrypi/meson.build
@@ -3,7 +3,7 @@
ipa_name = 'ipa_rpi'
rpi_ipa_deps = [
- libcamera_dep,
+ libcamera_private,
dependency('boost'),
libatomic,
]
@@ -16,12 +16,13 @@ rpi_ipa_includes = [
rpi_ipa_sources = files([
'raspberrypi.cpp',
- 'md_parser.cpp',
- 'md_parser_rpi.cpp',
+ 'md_parser_smia.cpp',
'cam_helper.cpp',
'cam_helper_ov5647.cpp',
'cam_helper_imx219.cpp',
+ 'cam_helper_imx290.cpp',
'cam_helper_imx477.cpp',
+ 'cam_helper_ov9281.cpp',
'controller/controller.cpp',
'controller/histogram.cpp',
'controller/algorithm.cpp',
@@ -39,10 +40,11 @@ rpi_ipa_sources = files([
'controller/rpi/contrast.cpp',
'controller/rpi/sdn.cpp',
'controller/pwl.cpp',
+ 'controller/device_status.cpp',
])
mod = shared_module(ipa_name,
- rpi_ipa_sources,
+ [rpi_ipa_sources, libcamera_generated_ipa_headers],
name_prefix : '',
include_directories : rpi_ipa_includes,
dependencies : rpi_ipa_deps,
@@ -54,7 +56,7 @@ if ipa_sign_module
custom_target(ipa_name + '.so.sign',
input : mod,
output : ipa_name + '.so.sign',
- command : [ ipa_sign, ipa_priv_key, '@INPUT@', '@OUTPUT@' ],
+ command : [ipa_sign, ipa_priv_key, '@INPUT@', '@OUTPUT@'],
install : false,
build_by_default : true)
endif
diff --git a/src/ipa/raspberrypi/raspberrypi.cpp b/src/ipa/raspberrypi/raspberrypi.cpp
index b0c7d1c1..5cd33304 100644
--- a/src/ipa/raspberrypi/raspberrypi.cpp
+++ b/src/ipa/raspberrypi/raspberrypi.cpp
@@ -1,34 +1,34 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright (C) 2019-2020, Raspberry Pi (Trading) Ltd.
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Ltd.
*
* rpi.cpp - Raspberry Pi Image Processing Algorithms
*/
#include <algorithm>
+#include <array>
#include <fcntl.h>
#include <math.h>
#include <stdint.h>
#include <string.h>
#include <sys/mman.h>
-#include <libcamera/buffer.h>
+#include <linux/bcm2835-isp.h>
+
+#include <libcamera/base/log.h>
+#include <libcamera/base/span.h>
+
#include <libcamera/control_ids.h>
#include <libcamera/controls.h>
#include <libcamera/file_descriptor.h>
+#include <libcamera/framebuffer.h>
#include <libcamera/ipa/ipa_interface.h>
#include <libcamera/ipa/ipa_module_info.h>
#include <libcamera/ipa/raspberrypi.h>
+#include <libcamera/ipa/raspberrypi_ipa_interface.h>
#include <libcamera/request.h>
-#include <libcamera/span.h>
-#include <libipa/ipa_interface_wrapper.h>
-
-#include "libcamera/internal/camera_sensor.h"
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/utils.h"
-
-#include <linux/bcm2835-isp.h>
+#include "libcamera/internal/framebuffer.h"
#include "agc_algorithm.hpp"
#include "agc_status.h"
@@ -42,61 +42,79 @@
#include "contrast_algorithm.hpp"
#include "contrast_status.h"
#include "controller.hpp"
+#include "denoise_algorithm.hpp"
+#include "denoise_status.h"
#include "dpc_status.h"
#include "focus_status.h"
#include "geq_status.h"
#include "lux_status.h"
#include "metadata.hpp"
#include "noise_status.h"
-#include "sdn_status.h"
#include "sharpen_algorithm.hpp"
#include "sharpen_status.h"
namespace libcamera {
+using namespace std::literals::chrono_literals;
+using utils::Duration;
+
/* Configure the sensor with these values initially. */
-constexpr double DefaultAnalogueGain = 1.0;
-constexpr unsigned int DefaultExposureTime = 20000;
+constexpr double defaultAnalogueGain = 1.0;
+constexpr Duration defaultExposureTime = 20.0ms;
+constexpr Duration defaultMinFrameDuration = 1.0s / 30.0;
+constexpr Duration defaultMaxFrameDuration = 250.0s;
+
+/*
+ * Determine the minimum allowable inter-frame duration to run the controller
+ * algorithms. If the pipeline handler provider frames at a rate higher than this,
+ * we rate-limit the controller Prepare() and Process() calls to lower than or
+ * equal to this rate.
+ */
+constexpr Duration controllerMinFrameDuration = 1.0s / 60.0;
LOG_DEFINE_CATEGORY(IPARPI)
-class IPARPi : public IPAInterface
+class IPARPi : public ipa::RPi::IPARPiInterface
{
public:
IPARPi()
- : lastMode_({}), controller_(), controllerInit_(false),
- frameCount_(0), checkCount_(0), mistrustCount_(0),
- lsTable_(nullptr)
+ : controller_(), frameCount_(0), checkCount_(0), mistrustCount_(0),
+ lastRunTimestamp_(0), lsTable_(nullptr), firstStart_(true)
{
}
~IPARPi()
{
if (lsTable_)
- munmap(lsTable_, RPi::MaxLsGridSize);
+ munmap(lsTable_, ipa::RPi::MaxLsGridSize);
}
- int init(const IPASettings &settings) override;
- int start() override { return 0; }
+ int init(const IPASettings &settings, ipa::RPi::SensorConfig *sensorConfig) override;
+ void start(const ControlList &controls, ipa::RPi::StartConfig *startConfig) override;
void stop() override {}
- void configure(const CameraSensorInfo &sensorInfo,
- const std::map<unsigned int, IPAStream> &streamConfig,
- const std::map<unsigned int, const ControlInfoMap &> &entityControls,
- const IPAOperationData &data,
- IPAOperationData *response) override;
+ int configure(const IPACameraSensorInfo &sensorInfo,
+ const std::map<unsigned int, IPAStream> &streamConfig,
+ const std::map<unsigned int, ControlInfoMap> &entityControls,
+ const ipa::RPi::IPAConfig &data,
+ ControlList *controls) override;
void mapBuffers(const std::vector<IPABuffer> &buffers) override;
void unmapBuffers(const std::vector<unsigned int> &ids) override;
- void processEvent(const IPAOperationData &event) override;
+ void signalStatReady(const uint32_t bufferId) override;
+ void signalQueueRequest(const ControlList &controls) override;
+ void signalIspPrepare(const ipa::RPi::ISPConfig &data) override;
private:
- void setMode(const CameraSensorInfo &sensorInfo);
+ void setMode(const IPACameraSensorInfo &sensorInfo);
+ bool validateSensorControls();
+ bool validateIspControls();
void queueRequest(const ControlList &controls);
void returnEmbeddedBuffer(unsigned int bufferId);
- void prepareISP(unsigned int bufferId);
+ void prepareISP(const ipa::RPi::ISPConfig &data);
void reportMetadata();
- bool parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &deviceStatus);
+ void fillDeviceStatus(const ControlList &sensorControls);
void processStats(unsigned int bufferId);
+ void applyFrameDurations(Duration minFrameDuration, Duration maxFrameDuration);
void applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls);
void applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls);
void applyDG(const struct AgcStatus *dgStatus, ControlList &ctrls);
@@ -104,30 +122,24 @@ private:
void applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, ControlList &ctrls);
void applyGamma(const struct ContrastStatus *contrastStatus, ControlList &ctrls);
void applyGEQ(const struct GeqStatus *geqStatus, ControlList &ctrls);
- void applyDenoise(const struct SdnStatus *denoiseStatus, ControlList &ctrls);
+ void applyDenoise(const struct DenoiseStatus *denoiseStatus, ControlList &ctrls);
void applySharpen(const struct SharpenStatus *sharpenStatus, ControlList &ctrls);
void applyDPC(const struct DpcStatus *dpcStatus, ControlList &ctrls);
void applyLS(const struct AlscStatus *lsStatus, ControlList &ctrls);
void resampleTable(uint16_t dest[], double const src[12][16], int destW, int destH);
- std::map<unsigned int, FrameBuffer> buffers_;
- std::map<unsigned int, void *> buffersMemory_;
+ std::map<unsigned int, MappedFrameBuffer> buffers_;
- ControlInfoMap unicamCtrls_;
+ ControlInfoMap sensorCtrls_;
ControlInfoMap ispCtrls_;
ControlList libcameraMetadata_;
- /* IPA configuration. */
- std::string tuningFile_;
-
/* Camera sensor params. */
CameraMode mode_;
- CameraMode lastMode_;
/* Raspberry Pi controller specific defines. */
std::unique_ptr<RPiController::CamHelper> helper_;
RPiController::Controller controller_;
- bool controllerInit_;
RPiController::Metadata rpiMetadata_;
/*
@@ -142,18 +154,135 @@ private:
/* How many frames we should avoid running control algos on. */
unsigned int mistrustCount_;
+ /* Number of frames that need to be dropped on startup. */
+ unsigned int dropFrameCount_;
+
+ /* Frame timestamp for the last run of the controller. */
+ uint64_t lastRunTimestamp_;
+
+ /* Do we run a Controller::process() for this frame? */
+ bool processPending_;
+
/* LS table allocation passed in from the pipeline handler. */
FileDescriptor lsTableHandle_;
void *lsTable_;
+
+ /* Distinguish the first camera start from others. */
+ bool firstStart_;
+
+ /* Frame duration (1/fps) limits. */
+ Duration minFrameDuration_;
+ Duration maxFrameDuration_;
};
-int IPARPi::init(const IPASettings &settings)
+int IPARPi::init(const IPASettings &settings, ipa::RPi::SensorConfig *sensorConfig)
{
- tuningFile_ = settings.configurationFile;
+ /*
+ * Load the "helper" for this sensor. This tells us all the device specific stuff
+ * that the kernel driver doesn't. We only do this the first time; we don't need
+ * to re-parse the metadata after a simple mode-switch for no reason.
+ */
+ helper_ = std::unique_ptr<RPiController::CamHelper>(RPiController::CamHelper::Create(settings.sensorModel));
+ if (!helper_) {
+ LOG(IPARPI, Error) << "Could not create camera helper for "
+ << settings.sensorModel;
+ return -EINVAL;
+ }
+
+ /*
+ * Pass out the sensor config to the pipeline handler in order
+ * to setup the staggered writer class.
+ */
+ int gainDelay, exposureDelay, vblankDelay, sensorMetadata;
+ helper_->GetDelays(exposureDelay, gainDelay, vblankDelay);
+ sensorMetadata = helper_->SensorEmbeddedDataPresent();
+
+ sensorConfig->gainDelay = gainDelay;
+ sensorConfig->exposureDelay = exposureDelay;
+ sensorConfig->vblankDelay = vblankDelay;
+ sensorConfig->sensorMetadata = sensorMetadata;
+
+ /* Load the tuning file for this sensor. */
+ controller_.Read(settings.configurationFile.c_str());
+ controller_.Initialise();
+
return 0;
}
-void IPARPi::setMode(const CameraSensorInfo &sensorInfo)
+void IPARPi::start(const ControlList &controls, ipa::RPi::StartConfig *startConfig)
+{
+ RPiController::Metadata metadata;
+
+ ASSERT(startConfig);
+ if (!controls.empty()) {
+ /* We have been given some controls to action before start. */
+ queueRequest(controls);
+ }
+
+ controller_.SwitchMode(mode_, &metadata);
+
+ /* SwitchMode may supply updated exposure/gain values to use. */
+ AgcStatus agcStatus;
+ agcStatus.shutter_time = 0.0s;
+ agcStatus.analogue_gain = 0.0;
+
+ metadata.Get("agc.status", agcStatus);
+ if (agcStatus.shutter_time && agcStatus.analogue_gain) {
+ ControlList ctrls(sensorCtrls_);
+ applyAGC(&agcStatus, ctrls);
+ startConfig->controls = std::move(ctrls);
+ }
+
+ /*
+ * Initialise frame counts, and decide how many frames must be hidden or
+ * "mistrusted", which depends on whether this is a startup from cold,
+ * or merely a mode switch in a running system.
+ */
+ frameCount_ = 0;
+ checkCount_ = 0;
+ if (firstStart_) {
+ dropFrameCount_ = helper_->HideFramesStartup();
+ mistrustCount_ = helper_->MistrustFramesStartup();
+
+ /*
+ * Query the AGC/AWB for how many frames they may take to
+ * converge sufficiently. Where these numbers are non-zero
+ * we must allow for the frames with bad statistics
+ * (mistrustCount_) that they won't see. But if zero (i.e.
+ * no convergence necessary), no frames need to be dropped.
+ */
+ unsigned int agcConvergenceFrames = 0;
+ RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
+ controller_.GetAlgorithm("agc"));
+ if (agc) {
+ agcConvergenceFrames = agc->GetConvergenceFrames();
+ if (agcConvergenceFrames)
+ agcConvergenceFrames += mistrustCount_;
+ }
+
+ unsigned int awbConvergenceFrames = 0;
+ RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
+ controller_.GetAlgorithm("awb"));
+ if (awb) {
+ awbConvergenceFrames = awb->GetConvergenceFrames();
+ if (awbConvergenceFrames)
+ awbConvergenceFrames += mistrustCount_;
+ }
+
+ dropFrameCount_ = std::max({ dropFrameCount_, agcConvergenceFrames, awbConvergenceFrames });
+ LOG(IPARPI, Debug) << "Drop " << dropFrameCount_ << " frames on startup";
+ } else {
+ dropFrameCount_ = helper_->HideFramesModeSwitch();
+ mistrustCount_ = helper_->MistrustFramesModeSwitch();
+ }
+
+ startConfig->dropFrameCount = dropFrameCount_;
+
+ firstStart_ = false;
+ lastRunTimestamp_ = 0;
+}
+
+void IPARPi::setMode(const IPACameraSensorInfo &sensorInfo)
{
mode_.bitdepth = sensorInfo.bitsPerPixel;
mode_.width = sensorInfo.outputSize.width;
@@ -178,81 +307,70 @@ void IPARPi::setMode(const CameraSensorInfo &sensorInfo)
*
* \todo Get the pipeline handle to provide the full data
*/
- mode_.bin_y = std::min(2, static_cast<int>(mode_.scale_x));
+ mode_.bin_x = std::min(2, static_cast<int>(mode_.scale_x));
mode_.bin_y = std::min(2, static_cast<int>(mode_.scale_y));
/* The noise factor is the square root of the total binning factor. */
mode_.noise_factor = sqrt(mode_.bin_x * mode_.bin_y);
/*
- * Calculate the line length in nanoseconds as the ratio between
- * the line length in pixels and the pixel rate.
+ * Calculate the line length as the ratio between the line length in
+ * pixels and the pixel rate.
+ */
+ mode_.line_length = sensorInfo.lineLength * (1.0s / sensorInfo.pixelRate);
+
+ /*
+ * Set the frame length limits for the mode to ensure exposure and
+ * framerate calculations are clipped appropriately.
*/
- mode_.line_length = 1e9 * sensorInfo.lineLength / sensorInfo.pixelRate;
+ mode_.min_frame_length = sensorInfo.minFrameLength;
+ mode_.max_frame_length = sensorInfo.maxFrameLength;
}
-void IPARPi::configure(const CameraSensorInfo &sensorInfo,
- [[maybe_unused]] const std::map<unsigned int, IPAStream> &streamConfig,
- const std::map<unsigned int, const ControlInfoMap &> &entityControls,
- const IPAOperationData &ipaConfig,
- IPAOperationData *result)
+int IPARPi::configure(const IPACameraSensorInfo &sensorInfo,
+ [[maybe_unused]] const std::map<unsigned int, IPAStream> &streamConfig,
+ const std::map<unsigned int, ControlInfoMap> &entityControls,
+ const ipa::RPi::IPAConfig &ipaConfig,
+ ControlList *controls)
{
- if (entityControls.empty())
- return;
-
- result->operation = 0;
+ if (entityControls.size() != 2) {
+ LOG(IPARPI, Error) << "No ISP or sensor controls found.";
+ return -1;
+ }
- unicamCtrls_ = entityControls.at(0);
+ sensorCtrls_ = entityControls.at(0);
ispCtrls_ = entityControls.at(1);
- /* Setup a metadata ControlList to output metadata. */
- libcameraMetadata_ = ControlList(controls::controls);
-
- /*
- * Load the "helper" for this sensor. This tells us all the device specific stuff
- * that the kernel driver doesn't. We only do this the first time; we don't need
- * to re-parse the metadata after a simple mode-switch for no reason.
- */
- std::string cameraName(sensorInfo.model);
- if (!helper_) {
- helper_ = std::unique_ptr<RPiController::CamHelper>(RPiController::CamHelper::Create(cameraName));
-
- /*
- * Pass out the sensor config to the pipeline handler in order
- * to setup the staggered writer class.
- */
- int gainDelay, exposureDelay, sensorMetadata;
- helper_->GetDelays(exposureDelay, gainDelay);
- sensorMetadata = helper_->SensorEmbeddedDataPresent();
-
- result->data.push_back(gainDelay);
- result->data.push_back(exposureDelay);
- result->data.push_back(sensorMetadata);
+ if (!validateSensorControls()) {
+ LOG(IPARPI, Error) << "Sensor control validation failed.";
+ return -1;
+ }
- result->operation |= RPi::IPA_CONFIG_STAGGERED_WRITE;
+ if (!validateIspControls()) {
+ LOG(IPARPI, Error) << "ISP control validation failed.";
+ return -1;
}
+ /* Setup a metadata ControlList to output metadata. */
+ libcameraMetadata_ = ControlList(controls::controls);
+
/* Re-assemble camera mode using the sensor info. */
setMode(sensorInfo);
- /*
- * The ipaConfig.data always gives us the user transform first. Note that
- * this will always make the LS table pointer (if present) element 1.
- */
- mode_.transform = static_cast<libcamera::Transform>(ipaConfig.data[0]);
+ mode_.transform = static_cast<libcamera::Transform>(ipaConfig.transform);
/* Store the lens shading table pointer and handle if available. */
- if (ipaConfig.operation & RPi::IPA_CONFIG_LS_TABLE) {
+ if (ipaConfig.lsTableHandle.isValid()) {
/* Remove any previous table, if there was one. */
if (lsTable_) {
- munmap(lsTable_, RPi::MaxLsGridSize);
+ munmap(lsTable_, ipa::RPi::MaxLsGridSize);
lsTable_ = nullptr;
}
- /* Map the LS table buffer into user space (now element 1). */
- lsTableHandle_ = FileDescriptor(ipaConfig.data[1]);
+ /* Map the LS table buffer into user space. */
+ lsTableHandle_ = std::move(ipaConfig.lsTableHandle);
if (lsTableHandle_.isValid()) {
- lsTable_ = mmap(nullptr, RPi::MaxLsGridSize, PROT_READ | PROT_WRITE,
+ lsTable_ = mmap(nullptr, ipa::RPi::MaxLsGridSize, PROT_READ | PROT_WRITE,
MAP_SHARED, lsTableHandle_.fd(), 0);
if (lsTable_ == MAP_FAILED) {
@@ -265,139 +383,72 @@ void IPARPi::configure(const CameraSensorInfo &sensorInfo,
/* Pass the camera mode to the CamHelper to setup algorithms. */
helper_->SetCameraMode(mode_);
- /*
- * Initialise frame counts, and decide how many frames must be hidden or
- *"mistrusted", which depends on whether this is a startup from cold,
- * or merely a mode switch in a running system.
- */
- frameCount_ = 0;
- checkCount_ = 0;
- unsigned int dropFrame = 0;
- if (controllerInit_) {
- dropFrame = helper_->HideFramesModeSwitch();
- mistrustCount_ = helper_->MistrustFramesModeSwitch();
- } else {
- dropFrame = helper_->HideFramesStartup();
- mistrustCount_ = helper_->MistrustFramesStartup();
- }
-
- result->data.push_back(dropFrame);
- result->operation |= RPi::IPA_CONFIG_DROP_FRAMES;
-
- /* These zero values mean not program anything (unless overwritten). */
- struct AgcStatus agcStatus;
- agcStatus.shutter_time = 0.0;
- agcStatus.analogue_gain = 0.0;
-
- if (!controllerInit_) {
- /* Load the tuning file for this sensor. */
- controller_.Read(tuningFile_.c_str());
- controller_.Initialise();
- controllerInit_ = true;
+ if (firstStart_) {
+ /* Supply initial values for frame durations. */
+ applyFrameDurations(defaultMinFrameDuration, defaultMaxFrameDuration);
/* Supply initial values for gain and exposure. */
- agcStatus.shutter_time = DefaultExposureTime;
- agcStatus.analogue_gain = DefaultAnalogueGain;
- }
-
- RPiController::Metadata metadata;
- controller_.SwitchMode(mode_, &metadata);
-
- /* SwitchMode may supply updated exposure/gain values to use. */
- metadata.Get("agc.status", agcStatus);
- if (agcStatus.shutter_time != 0.0 && agcStatus.analogue_gain != 0.0) {
- ControlList ctrls(unicamCtrls_);
+ ControlList ctrls(sensorCtrls_);
+ AgcStatus agcStatus;
+ agcStatus.shutter_time = defaultExposureTime;
+ agcStatus.analogue_gain = defaultAnalogueGain;
applyAGC(&agcStatus, ctrls);
- result->controls.push_back(ctrls);
- result->operation |= RPi::IPA_CONFIG_SENSOR;
+ ASSERT(controls);
+ *controls = std::move(ctrls);
}
- lastMode_ = mode_;
+ return 0;
}
void IPARPi::mapBuffers(const std::vector<IPABuffer> &buffers)
{
for (const IPABuffer &buffer : buffers) {
- auto elem = buffers_.emplace(std::piecewise_construct,
- std::forward_as_tuple(buffer.id),
- std::forward_as_tuple(buffer.planes));
- const FrameBuffer &fb = elem.first->second;
-
- buffersMemory_[buffer.id] = mmap(nullptr, fb.planes()[0].length,
- PROT_READ | PROT_WRITE, MAP_SHARED,
- fb.planes()[0].fd.fd(), 0);
-
- if (buffersMemory_[buffer.id] == MAP_FAILED) {
- int ret = -errno;
- LOG(IPARPI, Fatal) << "Failed to mmap buffer: " << strerror(-ret);
- }
+ const FrameBuffer fb(buffer.planes);
+ buffers_.emplace(buffer.id, MappedFrameBuffer(&fb, PROT_READ | PROT_WRITE));
}
}
void IPARPi::unmapBuffers(const std::vector<unsigned int> &ids)
{
for (unsigned int id : ids) {
- const auto fb = buffers_.find(id);
- if (fb == buffers_.end())
+ auto it = buffers_.find(id);
+ if (it == buffers_.end())
continue;
- munmap(buffersMemory_[id], fb->second.planes()[0].length);
- buffersMemory_.erase(id);
buffers_.erase(id);
}
}
-void IPARPi::processEvent(const IPAOperationData &event)
+void IPARPi::signalStatReady(uint32_t bufferId)
{
- switch (event.operation) {
- case RPi::IPA_EVENT_SIGNAL_STAT_READY: {
- unsigned int bufferId = event.data[0];
-
- if (++checkCount_ != frameCount_) /* assert here? */
- LOG(IPARPI, Error) << "WARNING: Prepare/Process mismatch!!!";
- if (frameCount_ > mistrustCount_)
- processStats(bufferId);
-
- reportMetadata();
-
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_STATS_METADATA_COMPLETE;
- op.data = { bufferId & RPi::BufferMask::ID };
- op.controls = { libcameraMetadata_ };
- queueFrameAction.emit(0, op);
- break;
- }
+ if (++checkCount_ != frameCount_) /* assert here? */
+ LOG(IPARPI, Error) << "WARNING: Prepare/Process mismatch!!!";
+ if (processPending_ && frameCount_ > mistrustCount_)
+ processStats(bufferId);
- case RPi::IPA_EVENT_SIGNAL_ISP_PREPARE: {
- unsigned int embeddedbufferId = event.data[0];
- unsigned int bayerbufferId = event.data[1];
+ reportMetadata();
- /*
- * At start-up, or after a mode-switch, we may want to
- * avoid running the control algos for a few frames in case
- * they are "unreliable".
- */
- prepareISP(embeddedbufferId);
- frameCount_++;
-
- /* Ready to push the input buffer into the ISP. */
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_RUN_ISP;
- op.data = { bayerbufferId & RPi::BufferMask::ID };
- queueFrameAction.emit(0, op);
- break;
- }
+ statsMetadataComplete.emit(bufferId & ipa::RPi::MaskID, libcameraMetadata_);
+}
- case RPi::IPA_EVENT_QUEUE_REQUEST: {
- queueRequest(event.controls[0]);
- break;
- }
+void IPARPi::signalQueueRequest(const ControlList &controls)
+{
+ queueRequest(controls);
+}
- default:
- LOG(IPARPI, Error) << "Unknown event " << event.operation;
- break;
- }
+void IPARPi::signalIspPrepare(const ipa::RPi::ISPConfig &data)
+{
+ /*
+ * At start-up, or after a mode-switch, we may want to
+ * avoid running the control algos for a few frames in case
+ * they are "unreliable".
+ */
+ prepareISP(data);
+ frameCount_++;
+
+ /* Ready to push the input buffer into the ISP. */
+ runIsp.emit(data.bayerBufferId & ipa::RPi::MaskID);
}
void IPARPi::reportMetadata()
@@ -411,13 +462,16 @@ void IPARPi::reportMetadata()
*/
DeviceStatus *deviceStatus = rpiMetadata_.GetLocked<DeviceStatus>("device.status");
if (deviceStatus) {
- libcameraMetadata_.set(controls::ExposureTime, deviceStatus->shutter_speed);
+ libcameraMetadata_.set(controls::ExposureTime,
+ deviceStatus->shutter_speed.get<std::micro>());
libcameraMetadata_.set(controls::AnalogueGain, deviceStatus->analogue_gain);
}
AgcStatus *agcStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
- if (agcStatus)
+ if (agcStatus) {
libcameraMetadata_.set(controls::AeLocked, agcStatus->locked);
+ libcameraMetadata_.set(controls::DigitalGain, agcStatus->digital_gain);
+ }
LuxStatus *luxStatus = rpiMetadata_.GetLocked<LuxStatus>("lux.status");
if (luxStatus)
@@ -458,6 +512,53 @@ void IPARPi::reportMetadata()
}
}
+bool IPARPi::validateSensorControls()
+{
+ static const uint32_t ctrls[] = {
+ V4L2_CID_ANALOGUE_GAIN,
+ V4L2_CID_EXPOSURE,
+ V4L2_CID_VBLANK,
+ };
+
+ for (auto c : ctrls) {
+ if (sensorCtrls_.find(c) == sensorCtrls_.end()) {
+ LOG(IPARPI, Error) << "Unable to find sensor control "
+ << utils::hex(c);
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool IPARPi::validateIspControls()
+{
+ static const uint32_t ctrls[] = {
+ V4L2_CID_RED_BALANCE,
+ V4L2_CID_BLUE_BALANCE,
+ V4L2_CID_DIGITAL_GAIN,
+ V4L2_CID_USER_BCM2835_ISP_CC_MATRIX,
+ V4L2_CID_USER_BCM2835_ISP_GAMMA,
+ V4L2_CID_USER_BCM2835_ISP_BLACK_LEVEL,
+ V4L2_CID_USER_BCM2835_ISP_GEQ,
+ V4L2_CID_USER_BCM2835_ISP_DENOISE,
+ V4L2_CID_USER_BCM2835_ISP_SHARPEN,
+ V4L2_CID_USER_BCM2835_ISP_DPC,
+ V4L2_CID_USER_BCM2835_ISP_LENS_SHADING,
+ V4L2_CID_USER_BCM2835_ISP_CDN,
+ };
+
+ for (auto c : ctrls) {
+ if (ispCtrls_.find(c) == ispCtrls_.end()) {
+ LOG(IPARPI, Error) << "Unable to find ISP control "
+ << utils::hex(c);
+ return false;
+ }
+ }
+
+ return true;
+}
+
/*
* Converting between enums (used in the libcamera API) and the names that
* we use to identify different modes. Unfortunately, the conversion tables
@@ -490,9 +591,18 @@ static const std::map<int32_t, std::string> AwbModeTable = {
{ controls::AwbFluorescent, "fluorescent" },
{ controls::AwbIndoor, "indoor" },
{ controls::AwbDaylight, "daylight" },
+ { controls::AwbCloudy, "cloudy" },
{ controls::AwbCustom, "custom" },
};
+static const std::map<int32_t, RPiController::DenoiseMode> DenoiseModeTable = {
+ { controls::draft::NoiseReductionModeOff, RPiController::DenoiseMode::Off },
+ { controls::draft::NoiseReductionModeFast, RPiController::DenoiseMode::ColourFast },
+ { controls::draft::NoiseReductionModeHighQuality, RPiController::DenoiseMode::ColourHighQuality },
+ { controls::draft::NoiseReductionModeMinimal, RPiController::DenoiseMode::ColourOff },
+ { controls::draft::NoiseReductionModeZSL, RPiController::DenoiseMode::ColourHighQuality },
+};
+
void IPARPi::queueRequest(const ControlList &controls)
{
/* Clear the return metadata buffer. */
@@ -506,7 +616,12 @@ void IPARPi::queueRequest(const ControlList &controls)
switch (ctrl.first) {
case controls::AE_ENABLE: {
RPiController::Algorithm *agc = controller_.GetAlgorithm("agc");
- ASSERT(agc);
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set AE_ENABLE - no AGC algorithm";
+ break;
+ }
+
if (ctrl.second.get<bool>() == false)
agc->Pause();
else
@@ -519,14 +634,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::EXPOSURE_TIME: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
- ASSERT(agc);
-
- /* This expects units of micro-seconds. */
- agc->SetFixedShutter(ctrl.second.get<int32_t>());
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set EXPOSURE_TIME - no AGC algorithm";
+ break;
+ }
- /* For the manual values to take effect, AGC must be unpaused. */
- if (agc->IsPaused())
- agc->Resume();
+ /* The control provides units of microseconds. */
+ agc->SetFixedShutter(ctrl.second.get<int32_t>() * 1.0us);
libcameraMetadata_.set(controls::ExposureTime, ctrl.second.get<int32_t>());
break;
@@ -535,12 +650,13 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::ANALOGUE_GAIN: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
- ASSERT(agc);
- agc->SetFixedAnalogueGain(ctrl.second.get<float>());
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set ANALOGUE_GAIN - no AGC algorithm";
+ break;
+ }
- /* For the manual values to take effect, AGC must be unpaused. */
- if (agc->IsPaused())
- agc->Resume();
+ agc->SetFixedAnalogueGain(ctrl.second.get<float>());
libcameraMetadata_.set(controls::AnalogueGain,
ctrl.second.get<float>());
@@ -550,7 +666,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AE_METERING_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
- ASSERT(agc);
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set AE_METERING_MODE - no AGC algorithm";
+ break;
+ }
int32_t idx = ctrl.second.get<int32_t>();
if (MeteringModeTable.count(idx)) {
@@ -566,7 +686,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AE_CONSTRAINT_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
- ASSERT(agc);
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set AE_CONSTRAINT_MODE - no AGC algorithm";
+ break;
+ }
int32_t idx = ctrl.second.get<int32_t>();
if (ConstraintModeTable.count(idx)) {
@@ -582,7 +706,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AE_EXPOSURE_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
- ASSERT(agc);
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set AE_EXPOSURE_MODE - no AGC algorithm";
+ break;
+ }
int32_t idx = ctrl.second.get<int32_t>();
if (ExposureModeTable.count(idx)) {
@@ -598,7 +726,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::EXPOSURE_VALUE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
controller_.GetAlgorithm("agc"));
- ASSERT(agc);
+ if (!agc) {
+ LOG(IPARPI, Warning)
+ << "Could not set EXPOSURE_VALUE - no AGC algorithm";
+ break;
+ }
/*
* The SetEv() method takes in a direct exposure multiplier.
@@ -613,7 +745,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AWB_ENABLE: {
RPiController::Algorithm *awb = controller_.GetAlgorithm("awb");
- ASSERT(awb);
+ if (!awb) {
+ LOG(IPARPI, Warning)
+ << "Could not set AWB_ENABLE - no AWB algorithm";
+ break;
+ }
if (ctrl.second.get<bool>() == false)
awb->Pause();
@@ -628,7 +764,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AWB_MODE: {
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
controller_.GetAlgorithm("awb"));
- ASSERT(awb);
+ if (!awb) {
+ LOG(IPARPI, Warning)
+ << "Could not set AWB_MODE - no AWB algorithm";
+ break;
+ }
int32_t idx = ctrl.second.get<int32_t>();
if (AwbModeTable.count(idx)) {
@@ -645,7 +785,11 @@ void IPARPi::queueRequest(const ControlList &controls)
auto gains = ctrl.second.get<Span<const float>>();
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
controller_.GetAlgorithm("awb"));
- ASSERT(awb);
+ if (!awb) {
+ LOG(IPARPI, Warning)
+ << "Could not set COLOUR_GAINS - no AWB algorithm";
+ break;
+ }
awb->SetManualGains(gains[0], gains[1]);
if (gains[0] != 0.0f && gains[1] != 0.0f)
@@ -658,7 +802,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::BRIGHTNESS: {
RPiController::ContrastAlgorithm *contrast = dynamic_cast<RPiController::ContrastAlgorithm *>(
controller_.GetAlgorithm("contrast"));
- ASSERT(contrast);
+ if (!contrast) {
+ LOG(IPARPI, Warning)
+ << "Could not set BRIGHTNESS - no contrast algorithm";
+ break;
+ }
contrast->SetBrightness(ctrl.second.get<float>() * 65536);
libcameraMetadata_.set(controls::Brightness,
@@ -669,7 +817,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::CONTRAST: {
RPiController::ContrastAlgorithm *contrast = dynamic_cast<RPiController::ContrastAlgorithm *>(
controller_.GetAlgorithm("contrast"));
- ASSERT(contrast);
+ if (!contrast) {
+ LOG(IPARPI, Warning)
+ << "Could not set CONTRAST - no contrast algorithm";
+ break;
+ }
contrast->SetContrast(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Contrast,
@@ -680,7 +832,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::SATURATION: {
RPiController::CcmAlgorithm *ccm = dynamic_cast<RPiController::CcmAlgorithm *>(
controller_.GetAlgorithm("ccm"));
- ASSERT(ccm);
+ if (!ccm) {
+ LOG(IPARPI, Warning)
+ << "Could not set SATURATION - no ccm algorithm";
+ break;
+ }
ccm->SetSaturation(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Saturation,
@@ -691,7 +847,11 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::SHARPNESS: {
RPiController::SharpenAlgorithm *sharpen = dynamic_cast<RPiController::SharpenAlgorithm *>(
controller_.GetAlgorithm("sharpen"));
- ASSERT(sharpen);
+ if (!sharpen) {
+ LOG(IPARPI, Warning)
+ << "Could not set SHARPNESS - no sharpen algorithm";
+ break;
+ }
sharpen->SetStrength(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Sharpness,
@@ -699,6 +859,44 @@ void IPARPi::queueRequest(const ControlList &controls)
break;
}
+ case controls::SCALER_CROP: {
+ /* We do nothing with this, but should avoid the warning below. */
+ break;
+ }
+
+ case controls::FRAME_DURATION_LIMITS: {
+ auto frameDurations = ctrl.second.get<Span<const int64_t>>();
+ applyFrameDurations(frameDurations[0] * 1.0us, frameDurations[1] * 1.0us);
+ break;
+ }
+
+ case controls::NOISE_REDUCTION_MODE: {
+ RPiController::DenoiseAlgorithm *sdn = dynamic_cast<RPiController::DenoiseAlgorithm *>(
+ controller_.GetAlgorithm("SDN"));
+ if (!sdn) {
+ LOG(IPARPI, Warning)
+ << "Could not set NOISE_REDUCTION_MODE - no SDN algorithm";
+ break;
+ }
+
+ int32_t idx = ctrl.second.get<int32_t>();
+ auto mode = DenoiseModeTable.find(idx);
+ if (mode != DenoiseModeTable.end()) {
+ sdn->SetMode(mode->second);
+
+ /*
+ * \todo If the colour denoise is not going to run due to an
+ * analysis image resolution or format mismatch, we should
+ * report the status correctly in the metadata.
+ */
+ libcameraMetadata_.set(controls::draft::NoiseReductionMode, idx);
+ } else {
+ LOG(IPARPI, Error) << "Noise reduction mode " << idx
+ << " not recognised";
+ }
+ break;
+ }
+
default:
LOG(IPARPI, Warning)
<< "Ctrl " << controls::controls.at(ctrl.first)->name()
@@ -710,152 +908,149 @@ void IPARPi::queueRequest(const ControlList &controls)
void IPARPi::returnEmbeddedBuffer(unsigned int bufferId)
{
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_EMBEDDED_COMPLETE;
- op.data = { bufferId & RPi::BufferMask::ID };
- queueFrameAction.emit(0, op);
+ embeddedComplete.emit(bufferId & ipa::RPi::MaskID);
}
-void IPARPi::prepareISP(unsigned int bufferId)
+void IPARPi::prepareISP(const ipa::RPi::ISPConfig &data)
{
- struct DeviceStatus deviceStatus = {};
- bool success = parseEmbeddedData(bufferId, deviceStatus);
+ int64_t frameTimestamp = data.controls.get(controls::SensorTimestamp);
+ RPiController::Metadata lastMetadata;
+ Span<uint8_t> embeddedBuffer;
+
+ lastMetadata = std::move(rpiMetadata_);
+ fillDeviceStatus(data.controls);
+
+ if (data.embeddedBufferPresent) {
+ /*
+ * Pipeline handler has supplied us with an embedded data buffer,
+ * we must pass it to the CamHelper for parsing.
+ */
+ auto it = buffers_.find(data.embeddedBufferId);
+ ASSERT(it != buffers_.end());
+ embeddedBuffer = it->second.maps()[0];
+ }
+
+ /*
+ * This may overwrite the DeviceStatus using values from the sensor
+ * metadata, and may also do additional custom processing.
+ */
+ helper_->Prepare(embeddedBuffer, rpiMetadata_);
/* Done with embedded data now, return to pipeline handler asap. */
- returnEmbeddedBuffer(bufferId);
+ if (data.embeddedBufferPresent)
+ returnEmbeddedBuffer(data.embeddedBufferId);
- if (success) {
- ControlList ctrls(ispCtrls_);
+ /* Allow a 10% margin on the comparison below. */
+ Duration delta = (frameTimestamp - lastRunTimestamp_) * 1.0ns;
+ if (lastRunTimestamp_ && frameCount_ > dropFrameCount_ &&
+ delta < controllerMinFrameDuration * 0.9) {
+ /*
+ * Ensure we merge the previous frame's metadata with the current
+ * frame. This will not overwrite exposure/gain values for the
+ * current frame, or any other bits of metadata that were added
+ * in helper_->Prepare().
+ */
+ rpiMetadata_.Merge(lastMetadata);
+ processPending_ = false;
+ return;
+ }
- rpiMetadata_.Clear();
- rpiMetadata_.Set("device.status", deviceStatus);
- controller_.Prepare(&rpiMetadata_);
+ lastRunTimestamp_ = frameTimestamp;
+ processPending_ = true;
- /* Lock the metadata buffer to avoid constant locks/unlocks. */
- std::unique_lock<RPiController::Metadata> lock(rpiMetadata_);
+ ControlList ctrls(ispCtrls_);
- AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
- if (awbStatus)
- applyAWB(awbStatus, ctrls);
+ controller_.Prepare(&rpiMetadata_);
- CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
- if (ccmStatus)
- applyCCM(ccmStatus, ctrls);
+ /* Lock the metadata buffer to avoid constant locks/unlocks. */
+ std::unique_lock<RPiController::Metadata> lock(rpiMetadata_);
- AgcStatus *dgStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
- if (dgStatus)
- applyDG(dgStatus, ctrls);
+ AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
+ if (awbStatus)
+ applyAWB(awbStatus, ctrls);
- AlscStatus *lsStatus = rpiMetadata_.GetLocked<AlscStatus>("alsc.status");
- if (lsStatus)
- applyLS(lsStatus, ctrls);
+ CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
+ if (ccmStatus)
+ applyCCM(ccmStatus, ctrls);
- ContrastStatus *contrastStatus = rpiMetadata_.GetLocked<ContrastStatus>("contrast.status");
- if (contrastStatus)
- applyGamma(contrastStatus, ctrls);
+ AgcStatus *dgStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
+ if (dgStatus)
+ applyDG(dgStatus, ctrls);
- BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
- if (blackLevelStatus)
- applyBlackLevel(blackLevelStatus, ctrls);
+ AlscStatus *lsStatus = rpiMetadata_.GetLocked<AlscStatus>("alsc.status");
+ if (lsStatus)
+ applyLS(lsStatus, ctrls);
- GeqStatus *geqStatus = rpiMetadata_.GetLocked<GeqStatus>("geq.status");
- if (geqStatus)
- applyGEQ(geqStatus, ctrls);
+ ContrastStatus *contrastStatus = rpiMetadata_.GetLocked<ContrastStatus>("contrast.status");
+ if (contrastStatus)
+ applyGamma(contrastStatus, ctrls);
- SdnStatus *denoiseStatus = rpiMetadata_.GetLocked<SdnStatus>("sdn.status");
- if (denoiseStatus)
- applyDenoise(denoiseStatus, ctrls);
+ BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
+ if (blackLevelStatus)
+ applyBlackLevel(blackLevelStatus, ctrls);
- SharpenStatus *sharpenStatus = rpiMetadata_.GetLocked<SharpenStatus>("sharpen.status");
- if (sharpenStatus)
- applySharpen(sharpenStatus, ctrls);
+ GeqStatus *geqStatus = rpiMetadata_.GetLocked<GeqStatus>("geq.status");
+ if (geqStatus)
+ applyGEQ(geqStatus, ctrls);
- DpcStatus *dpcStatus = rpiMetadata_.GetLocked<DpcStatus>("dpc.status");
- if (dpcStatus)
- applyDPC(dpcStatus, ctrls);
+ DenoiseStatus *denoiseStatus = rpiMetadata_.GetLocked<DenoiseStatus>("denoise.status");
+ if (denoiseStatus)
+ applyDenoise(denoiseStatus, ctrls);
- if (!ctrls.empty()) {
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_V4L2_SET_ISP;
- op.controls.push_back(ctrls);
- queueFrameAction.emit(0, op);
- }
- }
+ SharpenStatus *sharpenStatus = rpiMetadata_.GetLocked<SharpenStatus>("sharpen.status");
+ if (sharpenStatus)
+ applySharpen(sharpenStatus, ctrls);
+
+ DpcStatus *dpcStatus = rpiMetadata_.GetLocked<DpcStatus>("dpc.status");
+ if (dpcStatus)
+ applyDPC(dpcStatus, ctrls);
+
+ if (!ctrls.empty())
+ setIspControls.emit(ctrls);
}
-bool IPARPi::parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &deviceStatus)
+void IPARPi::fillDeviceStatus(const ControlList &sensorControls)
{
- auto it = buffersMemory_.find(bufferId);
- if (it == buffersMemory_.end()) {
- LOG(IPARPI, Error) << "Could not find embedded buffer!";
- return false;
- }
+ DeviceStatus deviceStatus = {};
- int size = buffers_.find(bufferId)->second.planes()[0].length;
- helper_->Parser().SetBufferSize(size);
- RPiController::MdParser::Status status = helper_->Parser().Parse(it->second);
- if (status != RPiController::MdParser::Status::OK) {
- LOG(IPARPI, Error) << "Embedded Buffer parsing failed, error " << status;
- } else {
- uint32_t exposureLines, gainCode;
- if (helper_->Parser().GetExposureLines(exposureLines) != RPiController::MdParser::Status::OK) {
- LOG(IPARPI, Error) << "Exposure time failed";
- return false;
- }
+ int32_t exposureLines = sensorControls.get(V4L2_CID_EXPOSURE).get<int32_t>();
+ int32_t gainCode = sensorControls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>();
+ int32_t vblank = sensorControls.get(V4L2_CID_VBLANK).get<int32_t>();
- deviceStatus.shutter_speed = helper_->Exposure(exposureLines);
- if (helper_->Parser().GetGainCode(gainCode) != RPiController::MdParser::Status::OK) {
- LOG(IPARPI, Error) << "Gain failed";
- return false;
- }
+ deviceStatus.shutter_speed = helper_->Exposure(exposureLines);
+ deviceStatus.analogue_gain = helper_->Gain(gainCode);
+ deviceStatus.frame_length = mode_.height + vblank;
- deviceStatus.analogue_gain = helper_->Gain(gainCode);
- LOG(IPARPI, Debug) << "Metadata - Exposure : "
- << deviceStatus.shutter_speed << " Gain : "
- << deviceStatus.analogue_gain;
- }
+ LOG(IPARPI, Debug) << "Metadata - " << deviceStatus;
- return true;
+ rpiMetadata_.Set("device.status", deviceStatus);
}
void IPARPi::processStats(unsigned int bufferId)
{
- auto it = buffersMemory_.find(bufferId);
- if (it == buffersMemory_.end()) {
+ auto it = buffers_.find(bufferId);
+ if (it == buffers_.end()) {
LOG(IPARPI, Error) << "Could not find stats buffer!";
return;
}
- bcm2835_isp_stats *stats = static_cast<bcm2835_isp_stats *>(it->second);
+ Span<uint8_t> mem = it->second.maps()[0];
+ bcm2835_isp_stats *stats = reinterpret_cast<bcm2835_isp_stats *>(mem.data());
RPiController::StatisticsPtr statistics = std::make_shared<bcm2835_isp_stats>(*stats);
+ helper_->Process(statistics, rpiMetadata_);
controller_.Process(statistics, &rpiMetadata_);
struct AgcStatus agcStatus;
if (rpiMetadata_.Get("agc.status", agcStatus) == 0) {
- ControlList ctrls(unicamCtrls_);
+ ControlList ctrls(sensorCtrls_);
applyAGC(&agcStatus, ctrls);
- IPAOperationData op;
- op.operation = RPi::IPA_ACTION_V4L2_SET_STAGGERED;
- op.controls.push_back(ctrls);
- queueFrameAction.emit(0, op);
+ setDelayedControls.emit(ctrls);
}
}
void IPARPi::applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls)
{
- const auto gainR = ispCtrls_.find(V4L2_CID_RED_BALANCE);
- if (gainR == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find red gain control";
- return;
- }
-
- const auto gainB = ispCtrls_.find(V4L2_CID_BLUE_BALANCE);
- if (gainB == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find blue gain control";
- return;
- }
-
LOG(IPARPI, Debug) << "Applying WB R: " << awbStatus->gain_r << " B: "
<< awbStatus->gain_b;
@@ -865,49 +1060,76 @@ void IPARPi::applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls)
static_cast<int32_t>(awbStatus->gain_b * 1000));
}
+void IPARPi::applyFrameDurations(Duration minFrameDuration, Duration maxFrameDuration)
+{
+ const Duration minSensorFrameDuration = mode_.min_frame_length * mode_.line_length;
+ const Duration maxSensorFrameDuration = mode_.max_frame_length * mode_.line_length;
+
+ /*
+ * This will only be applied once AGC recalculations occur.
+ * The values may be clamped based on the sensor mode capabilities as well.
+ */
+ minFrameDuration_ = minFrameDuration ? minFrameDuration : defaultMaxFrameDuration;
+ maxFrameDuration_ = maxFrameDuration ? maxFrameDuration : defaultMinFrameDuration;
+ minFrameDuration_ = std::clamp(minFrameDuration_,
+ minSensorFrameDuration, maxSensorFrameDuration);
+ maxFrameDuration_ = std::clamp(maxFrameDuration_,
+ minSensorFrameDuration, maxSensorFrameDuration);
+ maxFrameDuration_ = std::max(maxFrameDuration_, minFrameDuration_);
+
+ /* Return the validated limits via metadata. */
+ libcameraMetadata_.set(controls::FrameDurationLimits,
+ { static_cast<int64_t>(minFrameDuration_.get<std::micro>()),
+ static_cast<int64_t>(maxFrameDuration_.get<std::micro>()) });
+
+ /*
+ * Calculate the maximum exposure time possible for the AGC to use.
+ * GetVBlanking() will update maxShutter with the largest exposure
+ * value possible.
+ */
+ Duration maxShutter = Duration::max();
+ helper_->GetVBlanking(maxShutter, minFrameDuration_, maxFrameDuration_);
+
+ RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
+ controller_.GetAlgorithm("agc"));
+ agc->SetMaxShutter(maxShutter);
+}
+
void IPARPi::applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls)
{
int32_t gainCode = helper_->GainCode(agcStatus->analogue_gain);
- int32_t exposureLines = helper_->ExposureLines(agcStatus->shutter_time);
- if (unicamCtrls_.find(V4L2_CID_ANALOGUE_GAIN) == unicamCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find analogue gain control";
- return;
- }
-
- if (unicamCtrls_.find(V4L2_CID_EXPOSURE) == unicamCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find exposure control";
- return;
- }
+ /* GetVBlanking might clip exposure time to the fps limits. */
+ Duration exposure = agcStatus->shutter_time;
+ int32_t vblanking = helper_->GetVBlanking(exposure, minFrameDuration_, maxFrameDuration_);
+ int32_t exposureLines = helper_->ExposureLines(exposure);
- LOG(IPARPI, Debug) << "Applying AGC Exposure: " << agcStatus->shutter_time
- << " (Shutter lines: " << exposureLines << ") Gain: "
+ LOG(IPARPI, Debug) << "Applying AGC Exposure: " << exposure
+ << " (Shutter lines: " << exposureLines << ", AGC requested "
+ << agcStatus->shutter_time << ") Gain: "
<< agcStatus->analogue_gain << " (Gain Code: "
<< gainCode << ")";
- ctrls.set(V4L2_CID_ANALOGUE_GAIN, gainCode);
+ /*
+ * Due to the behavior of V4L2, the current value of VBLANK could clip the
+ * exposure time without us knowing. The next time though this function should
+ * clip exposure correctly.
+ */
+ ctrls.set(V4L2_CID_VBLANK, vblanking);
ctrls.set(V4L2_CID_EXPOSURE, exposureLines);
+ ctrls.set(V4L2_CID_ANALOGUE_GAIN, gainCode);
}
void IPARPi::applyDG(const struct AgcStatus *dgStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_DIGITAL_GAIN) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find digital gain control";
- return;
- }
-
ctrls.set(V4L2_CID_DIGITAL_GAIN,
static_cast<int32_t>(dgStatus->digital_gain * 1000));
}
void IPARPi::applyCCM(const struct CcmStatus *ccmStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_CC_MATRIX) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find CCM control";
- return;
- }
-
bcm2835_isp_custom_ccm ccm;
+
for (int i = 0; i < 9; i++) {
ccm.ccm.ccm[i / 3][i % 3].den = 1000;
ccm.ccm.ccm[i / 3][i % 3].num = 1000 * ccmStatus->matrix[i];
@@ -923,12 +1145,8 @@ void IPARPi::applyCCM(const struct CcmStatus *ccmStatus, ControlList &ctrls)
void IPARPi::applyGamma(const struct ContrastStatus *contrastStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_GAMMA) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find Gamma control";
- return;
- }
-
struct bcm2835_isp_gamma gamma;
+
gamma.enabled = 1;
for (int i = 0; i < CONTRAST_NUM_POINTS; i++) {
gamma.x[i] = contrastStatus->points[i].x;
@@ -942,12 +1160,8 @@ void IPARPi::applyGamma(const struct ContrastStatus *contrastStatus, ControlList
void IPARPi::applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_BLACK_LEVEL) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find black level control";
- return;
- }
-
bcm2835_isp_black_level blackLevel;
+
blackLevel.enabled = 1;
blackLevel.black_level_r = blackLevelStatus->black_level_r;
blackLevel.black_level_g = blackLevelStatus->black_level_g;
@@ -960,12 +1174,8 @@ void IPARPi::applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, Co
void IPARPi::applyGEQ(const struct GeqStatus *geqStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_GEQ) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find geq control";
- return;
- }
-
bcm2835_isp_geq geq;
+
geq.enabled = 1;
geq.offset = geqStatus->offset;
geq.slope.den = 1000;
@@ -976,34 +1186,48 @@ void IPARPi::applyGEQ(const struct GeqStatus *geqStatus, ControlList &ctrls)
ctrls.set(V4L2_CID_USER_BCM2835_ISP_GEQ, c);
}
-void IPARPi::applyDenoise(const struct SdnStatus *denoiseStatus, ControlList &ctrls)
+void IPARPi::applyDenoise(const struct DenoiseStatus *denoiseStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_DENOISE) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find denoise control";
- return;
- }
+ using RPiController::DenoiseMode;
bcm2835_isp_denoise denoise;
- denoise.enabled = 1;
+ DenoiseMode mode = static_cast<DenoiseMode>(denoiseStatus->mode);
+
+ denoise.enabled = mode != DenoiseMode::Off;
denoise.constant = denoiseStatus->noise_constant;
denoise.slope.num = 1000 * denoiseStatus->noise_slope;
denoise.slope.den = 1000;
denoise.strength.num = 1000 * denoiseStatus->strength;
denoise.strength.den = 1000;
+ /* Set the CDN mode to match the SDN operating mode. */
+ bcm2835_isp_cdn cdn;
+ switch (mode) {
+ case DenoiseMode::ColourFast:
+ cdn.enabled = 1;
+ cdn.mode = CDN_MODE_FAST;
+ break;
+ case DenoiseMode::ColourHighQuality:
+ cdn.enabled = 1;
+ cdn.mode = CDN_MODE_HIGH_QUALITY;
+ break;
+ default:
+ cdn.enabled = 0;
+ }
+
ControlValue c(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&denoise),
sizeof(denoise) });
ctrls.set(V4L2_CID_USER_BCM2835_ISP_DENOISE, c);
+
+ c = ControlValue(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&cdn),
+ sizeof(cdn) });
+ ctrls.set(V4L2_CID_USER_BCM2835_ISP_CDN, c);
}
void IPARPi::applySharpen(const struct SharpenStatus *sharpenStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_SHARPEN) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find sharpen control";
- return;
- }
-
bcm2835_isp_sharpen sharpen;
+
sharpen.enabled = 1;
sharpen.threshold.num = 1000 * sharpenStatus->threshold;
sharpen.threshold.den = 1000;
@@ -1019,12 +1243,8 @@ void IPARPi::applySharpen(const struct SharpenStatus *sharpenStatus, ControlList
void IPARPi::applyDPC(const struct DpcStatus *dpcStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_DPC) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find DPC control";
- return;
- }
-
bcm2835_isp_dpc dpc;
+
dpc.enabled = 1;
dpc.strength = dpcStatus->strength;
@@ -1035,17 +1255,12 @@ void IPARPi::applyDPC(const struct DpcStatus *dpcStatus, ControlList &ctrls)
void IPARPi::applyLS(const struct AlscStatus *lsStatus, ControlList &ctrls)
{
- if (ispCtrls_.find(V4L2_CID_USER_BCM2835_ISP_LENS_SHADING) == ispCtrls_.end()) {
- LOG(IPARPI, Error) << "Can't find LS control";
- return;
- }
-
/*
* Program lens shading tables into pipeline.
* Choose smallest cell size that won't exceed 63x48 cells.
*/
const int cellSizes[] = { 16, 32, 64, 128, 256 };
- unsigned int numCells = ARRAY_SIZE(cellSizes);
+ unsigned int numCells = std::size(cellSizes);
unsigned int i, w, h, cellSize;
for (i = 0; i < numCells; i++) {
cellSize = cellSizes[i];
@@ -1068,13 +1283,14 @@ void IPARPi::applyLS(const struct AlscStatus *lsStatus, ControlList &ctrls)
.grid_width = w,
.grid_stride = w,
.grid_height = h,
- .dmabuf = lsTableHandle_.fd(),
+ /* .dmabuf will be filled in by pipeline handler. */
+ .dmabuf = 0,
.ref_transform = 0,
.corner_sampled = 1,
.gain_format = GAIN_FORMAT_U4P10
};
- if (!lsTable_ || w * h * 4 * sizeof(uint16_t) > RPi::MaxLsGridSize) {
+ if (!lsTable_ || w * h * 4 * sizeof(uint16_t) > ipa::RPi::MaxLsGridSize) {
LOG(IPARPI, Error) << "Do not have a correctly allocate lens shading table!";
return;
}
@@ -1145,11 +1361,11 @@ const struct IPAModuleInfo ipaModuleInfo = {
"raspberrypi",
};
-struct ipa_context *ipaCreate()
+IPAInterface *ipaCreate()
{
- return new IPAInterfaceWrapper(std::make_unique<IPARPi>());
+ return new IPARPi();
}
-}; /* extern "C" */
+} /* extern "C" */
} /* namespace libcamera */