summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorNaushir Patuck <naush@raspberrypi.com>2022-07-27 09:55:17 +0100
committerLaurent Pinchart <laurent.pinchart@ideasonboard.com>2022-07-27 18:12:12 +0300
commit177df04d2b7f357ebe41f1a9809ab68b6f948082 (patch)
tree062bc7f480d96629461487c63b4762936a7dcb22 /src
parentb4a3eb6b98ce65a6c9323368fa0afcb887739628 (diff)
ipa: raspberrypi: Code refactoring to match style guidelines
Refactor all the source files in src/ipa/raspberrypi/ to match the recommended formatting guidelines for the libcamera project. The vast majority of changes in this commit comprise of switching from snake_case to CamelCase, and starting class member functions with a lower case character. Signed-off-by: Naushir Patuck <naush@raspberrypi.com> Reviewed-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
Diffstat (limited to 'src')
-rw-r--r--src/ipa/raspberrypi/cam_helper.cpp88
-rw-r--r--src/ipa/raspberrypi/cam_helper.hpp40
-rw-r--r--src/ipa/raspberrypi/cam_helper_imx219.cpp34
-rw-r--r--src/ipa/raspberrypi/cam_helper_imx290.cpp32
-rw-r--r--src/ipa/raspberrypi/cam_helper_imx296.cpp24
-rw-r--r--src/ipa/raspberrypi/cam_helper_imx477.cpp72
-rw-r--r--src/ipa/raspberrypi/cam_helper_imx519.cpp70
-rw-r--r--src/ipa/raspberrypi/cam_helper_ov5647.cpp44
-rw-r--r--src/ipa/raspberrypi/cam_helper_ov9281.cpp28
-rw-r--r--src/ipa/raspberrypi/controller/agc_algorithm.hpp19
-rw-r--r--src/ipa/raspberrypi/controller/agc_status.h24
-rw-r--r--src/ipa/raspberrypi/controller/algorithm.cpp20
-rw-r--r--src/ipa/raspberrypi/controller/algorithm.hpp26
-rw-r--r--src/ipa/raspberrypi/controller/awb_algorithm.hpp6
-rw-r--r--src/ipa/raspberrypi/controller/awb_status.h8
-rw-r--r--src/ipa/raspberrypi/controller/black_level_status.h6
-rw-r--r--src/ipa/raspberrypi/controller/camera_mode.h16
-rw-r--r--src/ipa/raspberrypi/controller/ccm_algorithm.hpp2
-rw-r--r--src/ipa/raspberrypi/controller/contrast_algorithm.hpp4
-rw-r--r--src/ipa/raspberrypi/controller/controller.cpp74
-rw-r--r--src/ipa/raspberrypi/controller/controller.hpp22
-rw-r--r--src/ipa/raspberrypi/controller/denoise_algorithm.hpp2
-rw-r--r--src/ipa/raspberrypi/controller/denoise_status.h4
-rw-r--r--src/ipa/raspberrypi/controller/device_status.cpp18
-rw-r--r--src/ipa/raspberrypi/controller/device_status.h16
-rw-r--r--src/ipa/raspberrypi/controller/focus_status.h2
-rw-r--r--src/ipa/raspberrypi/controller/histogram.cpp34
-rw-r--r--src/ipa/raspberrypi/controller/histogram.hpp10
-rw-r--r--src/ipa/raspberrypi/controller/metadata.hpp16
-rw-r--r--src/ipa/raspberrypi/controller/noise_status.h4
-rw-r--r--src/ipa/raspberrypi/controller/pwl.cpp130
-rw-r--r--src/ipa/raspberrypi/controller/pwl.hpp48
-rw-r--r--src/ipa/raspberrypi/controller/rpi/agc.cpp732
-rw-r--r--src/ipa/raspberrypi/controller/rpi/agc.hpp130
-rw-r--r--src/ipa/raspberrypi/controller/rpi/alsc.cpp641
-rw-r--r--src/ipa/raspberrypi/controller/rpi/alsc.hpp86
-rw-r--r--src/ipa/raspberrypi/controller/rpi/awb.cpp566
-rw-r--r--src/ipa/raspberrypi/controller/rpi/awb.hpp110
-rw-r--r--src/ipa/raspberrypi/controller/rpi/black_level.cpp34
-rw-r--r--src/ipa/raspberrypi/controller/rpi/black_level.hpp12
-rw-r--r--src/ipa/raspberrypi/controller/rpi/ccm.cpp84
-rw-r--r--src/ipa/raspberrypi/controller/rpi/ccm.hpp12
-rw-r--r--src/ipa/raspberrypi/controller/rpi/contrast.cpp118
-rw-r--r--src/ipa/raspberrypi/controller/rpi/contrast.hpp30
-rw-r--r--src/ipa/raspberrypi/controller/rpi/dpc.cpp18
-rw-r--r--src/ipa/raspberrypi/controller/rpi/dpc.hpp6
-rw-r--r--src/ipa/raspberrypi/controller/rpi/focus.cpp14
-rw-r--r--src/ipa/raspberrypi/controller/rpi/focus.hpp4
-rw-r--r--src/ipa/raspberrypi/controller/rpi/geq.cpp48
-rw-r--r--src/ipa/raspberrypi/controller/rpi/geq.hpp6
-rw-r--r--src/ipa/raspberrypi/controller/rpi/lux.cpp70
-rw-r--r--src/ipa/raspberrypi/controller/rpi/lux.hpp22
-rw-r--r--src/ipa/raspberrypi/controller/rpi/noise.cpp38
-rw-r--r--src/ipa/raspberrypi/controller/rpi/noise.hpp14
-rw-r--r--src/ipa/raspberrypi/controller/rpi/sdn.cpp36
-rw-r--r--src/ipa/raspberrypi/controller/rpi/sdn.hpp10
-rw-r--r--src/ipa/raspberrypi/controller/rpi/sharpen.cpp42
-rw-r--r--src/ipa/raspberrypi/controller/rpi/sharpen.hpp14
-rw-r--r--src/ipa/raspberrypi/controller/sharpen_algorithm.hpp2
-rw-r--r--src/ipa/raspberrypi/controller/sharpen_status.h2
-rw-r--r--src/ipa/raspberrypi/md_parser.hpp44
-rw-r--r--src/ipa/raspberrypi/md_parser_smia.cpp108
-rw-r--r--src/ipa/raspberrypi/raspberrypi.cpp272
63 files changed, 2100 insertions, 2168 deletions
diff --git a/src/ipa/raspberrypi/cam_helper.cpp b/src/ipa/raspberrypi/cam_helper.cpp
index 3f81d418..45db3950 100644
--- a/src/ipa/raspberrypi/cam_helper.cpp
+++ b/src/ipa/raspberrypi/cam_helper.cpp
@@ -24,16 +24,16 @@ namespace libcamera {
LOG_DECLARE_CATEGORY(IPARPI)
}
-static std::map<std::string, CamHelperCreateFunc> cam_helpers;
+static std::map<std::string, CamHelperCreateFunc> camHelpers;
-CamHelper *CamHelper::Create(std::string const &cam_name)
+CamHelper *CamHelper::create(std::string const &camName)
{
/*
* CamHelpers get registered by static RegisterCamHelper
* initialisers.
*/
- for (auto &p : cam_helpers) {
- if (cam_name.find(p.first) != std::string::npos)
+ for (auto &p : camHelpers) {
+ if (camName.find(p.first) != std::string::npos)
return p.second();
}
@@ -50,35 +50,35 @@ CamHelper::~CamHelper()
{
}
-void CamHelper::Prepare(Span<const uint8_t> buffer,
+void CamHelper::prepare(Span<const uint8_t> buffer,
Metadata &metadata)
{
parseEmbeddedData(buffer, metadata);
}
-void CamHelper::Process([[maybe_unused]] StatisticsPtr &stats,
+void CamHelper::process([[maybe_unused]] StatisticsPtr &stats,
[[maybe_unused]] Metadata &metadata)
{
}
-uint32_t CamHelper::ExposureLines(const Duration exposure) const
+uint32_t CamHelper::exposureLines(const Duration exposure) const
{
assert(initialized_);
- return exposure / mode_.line_length;
+ return exposure / mode_.lineLength;
}
-Duration CamHelper::Exposure(uint32_t exposure_lines) const
+Duration CamHelper::exposure(uint32_t exposureLines) const
{
assert(initialized_);
- return exposure_lines * mode_.line_length;
+ return exposureLines * mode_.lineLength;
}
-uint32_t CamHelper::GetVBlanking(Duration &exposure,
+uint32_t CamHelper::getVBlanking(Duration &exposure,
Duration minFrameDuration,
Duration maxFrameDuration) const
{
uint32_t frameLengthMin, frameLengthMax, vblank;
- uint32_t exposureLines = ExposureLines(exposure);
+ uint32_t exposureLines = CamHelper::exposureLines(exposure);
assert(initialized_);
@@ -86,15 +86,15 @@ uint32_t CamHelper::GetVBlanking(Duration &exposure,
* minFrameDuration and maxFrameDuration are clamped by the caller
* based on the limits for the active sensor mode.
*/
- frameLengthMin = minFrameDuration / mode_.line_length;
- frameLengthMax = maxFrameDuration / mode_.line_length;
+ frameLengthMin = minFrameDuration / mode_.lineLength;
+ frameLengthMax = maxFrameDuration / mode_.lineLength;
/*
* Limit the exposure to the maximum frame duration requested, and
* re-calculate if it has been clipped.
*/
exposureLines = std::min(frameLengthMax - frameIntegrationDiff_, exposureLines);
- exposure = Exposure(exposureLines);
+ exposure = CamHelper::exposure(exposureLines);
/* Limit the vblank to the range allowed by the frame length limits. */
vblank = std::clamp(exposureLines + frameIntegrationDiff_,
@@ -102,34 +102,34 @@ uint32_t CamHelper::GetVBlanking(Duration &exposure,
return vblank;
}
-void CamHelper::SetCameraMode(const CameraMode &mode)
+void CamHelper::setCameraMode(const CameraMode &mode)
{
mode_ = mode;
if (parser_) {
- parser_->SetBitsPerPixel(mode.bitdepth);
- parser_->SetLineLengthBytes(0); /* We use SetBufferSize. */
+ parser_->setBitsPerPixel(mode.bitdepth);
+ parser_->setLineLengthBytes(0); /* We use SetBufferSize. */
}
initialized_ = true;
}
-void CamHelper::GetDelays(int &exposure_delay, int &gain_delay,
- int &vblank_delay) const
+void CamHelper::getDelays(int &exposureDelay, int &gainDelay,
+ int &vblankDelay) const
{
/*
* These values are correct for many sensors. Other sensors will
* need to over-ride this function.
*/
- exposure_delay = 2;
- gain_delay = 1;
- vblank_delay = 2;
+ exposureDelay = 2;
+ gainDelay = 1;
+ vblankDelay = 2;
}
-bool CamHelper::SensorEmbeddedDataPresent() const
+bool CamHelper::sensorEmbeddedDataPresent() const
{
return false;
}
-double CamHelper::GetModeSensitivity([[maybe_unused]] const CameraMode &mode) const
+double CamHelper::getModeSensitivity([[maybe_unused]] const CameraMode &mode) const
{
/*
* Most sensors have the same sensitivity in every mode, but this
@@ -140,7 +140,7 @@ double CamHelper::GetModeSensitivity([[maybe_unused]] const CameraMode &mode) co
return 1.0;
}
-unsigned int CamHelper::HideFramesStartup() const
+unsigned int CamHelper::hideFramesStartup() const
{
/*
* The number of frames when a camera first starts that shouldn't be
@@ -149,19 +149,19 @@ unsigned int CamHelper::HideFramesStartup() const
return 0;
}
-unsigned int CamHelper::HideFramesModeSwitch() const
+unsigned int CamHelper::hideFramesModeSwitch() const
{
/* After a mode switch, many sensors return valid frames immediately. */
return 0;
}
-unsigned int CamHelper::MistrustFramesStartup() const
+unsigned int CamHelper::mistrustFramesStartup() const
{
/* Many sensors return a single bad frame on start-up. */
return 1;
}
-unsigned int CamHelper::MistrustFramesModeSwitch() const
+unsigned int CamHelper::mistrustFramesModeSwitch() const
{
/* Many sensors return valid metadata immediately. */
return 0;
@@ -176,13 +176,13 @@ void CamHelper::parseEmbeddedData(Span<const uint8_t> buffer,
if (buffer.empty())
return;
- if (parser_->Parse(buffer, registers) != MdParser::Status::OK) {
+ if (parser_->parse(buffer, registers) != MdParser::Status::OK) {
LOG(IPARPI, Error) << "Embedded data buffer parsing failed";
return;
}
- PopulateMetadata(registers, parsedMetadata);
- metadata.Merge(parsedMetadata);
+ populateMetadata(registers, parsedMetadata);
+ metadata.merge(parsedMetadata);
/*
* Overwrite the exposure/gain, frame length and sensor temperature values
@@ -190,30 +190,30 @@ void CamHelper::parseEmbeddedData(Span<const uint8_t> buffer,
* Fetch it first in case any other fields were set meaningfully.
*/
DeviceStatus deviceStatus, parsedDeviceStatus;
- if (metadata.Get("device.status", deviceStatus) ||
- parsedMetadata.Get("device.status", parsedDeviceStatus)) {
+ if (metadata.get("device.status", deviceStatus) ||
+ parsedMetadata.get("device.status", parsedDeviceStatus)) {
LOG(IPARPI, Error) << "DeviceStatus not found";
return;
}
- deviceStatus.shutter_speed = parsedDeviceStatus.shutter_speed;
- deviceStatus.analogue_gain = parsedDeviceStatus.analogue_gain;
- deviceStatus.frame_length = parsedDeviceStatus.frame_length;
- if (parsedDeviceStatus.sensor_temperature)
- deviceStatus.sensor_temperature = parsedDeviceStatus.sensor_temperature;
+ deviceStatus.shutterSpeed = parsedDeviceStatus.shutterSpeed;
+ deviceStatus.analogueGain = parsedDeviceStatus.analogueGain;
+ deviceStatus.frameLength = parsedDeviceStatus.frameLength;
+ if (parsedDeviceStatus.sensorTemperature)
+ deviceStatus.sensorTemperature = parsedDeviceStatus.sensorTemperature;
LOG(IPARPI, Debug) << "Metadata updated - " << deviceStatus;
- metadata.Set("device.status", deviceStatus);
+ metadata.set("device.status", deviceStatus);
}
-void CamHelper::PopulateMetadata([[maybe_unused]] const MdParser::RegisterMap &registers,
+void CamHelper::populateMetadata([[maybe_unused]] const MdParser::RegisterMap &registers,
[[maybe_unused]] Metadata &metadata) const
{
}
-RegisterCamHelper::RegisterCamHelper(char const *cam_name,
- CamHelperCreateFunc create_func)
+RegisterCamHelper::RegisterCamHelper(char const *camName,
+ CamHelperCreateFunc createFunc)
{
- cam_helpers[std::string(cam_name)] = create_func;
+ camHelpers[std::string(camName)] = createFunc;
}
diff --git a/src/ipa/raspberrypi/cam_helper.hpp b/src/ipa/raspberrypi/cam_helper.hpp
index 300f8f8a..0cd718c4 100644
--- a/src/ipa/raspberrypi/cam_helper.hpp
+++ b/src/ipa/raspberrypi/cam_helper.hpp
@@ -69,33 +69,33 @@ namespace RPiController {
class CamHelper
{
public:
- static CamHelper *Create(std::string const &cam_name);
+ static CamHelper *create(std::string const &camName);
CamHelper(std::unique_ptr<MdParser> parser, unsigned int frameIntegrationDiff);
virtual ~CamHelper();
- void SetCameraMode(const CameraMode &mode);
- virtual void Prepare(libcamera::Span<const uint8_t> buffer,
+ void setCameraMode(const CameraMode &mode);
+ virtual void prepare(libcamera::Span<const uint8_t> buffer,
Metadata &metadata);
- virtual void Process(StatisticsPtr &stats, Metadata &metadata);
- virtual uint32_t ExposureLines(libcamera::utils::Duration exposure) const;
- virtual libcamera::utils::Duration Exposure(uint32_t exposure_lines) const;
- virtual uint32_t GetVBlanking(libcamera::utils::Duration &exposure,
+ virtual void process(StatisticsPtr &stats, Metadata &metadata);
+ virtual uint32_t exposureLines(libcamera::utils::Duration exposure) const;
+ virtual libcamera::utils::Duration exposure(uint32_t exposureLines) const;
+ virtual uint32_t getVBlanking(libcamera::utils::Duration &exposure,
libcamera::utils::Duration minFrameDuration,
libcamera::utils::Duration maxFrameDuration) const;
- virtual uint32_t GainCode(double gain) const = 0;
- virtual double Gain(uint32_t gain_code) const = 0;
- virtual void GetDelays(int &exposure_delay, int &gain_delay,
- int &vblank_delay) const;
- virtual bool SensorEmbeddedDataPresent() const;
- virtual double GetModeSensitivity(const CameraMode &mode) const;
- virtual unsigned int HideFramesStartup() const;
- virtual unsigned int HideFramesModeSwitch() const;
- virtual unsigned int MistrustFramesStartup() const;
- virtual unsigned int MistrustFramesModeSwitch() const;
+ virtual uint32_t gainCode(double gain) const = 0;
+ virtual double gain(uint32_t gainCode) const = 0;
+ virtual void getDelays(int &exposureDelay, int &gainDelay,
+ int &vblankDelay) const;
+ virtual bool sensorEmbeddedDataPresent() const;
+ virtual double getModeSensitivity(const CameraMode &mode) const;
+ virtual unsigned int hideFramesStartup() const;
+ virtual unsigned int hideFramesModeSwitch() const;
+ virtual unsigned int mistrustFramesStartup() const;
+ virtual unsigned int mistrustFramesModeSwitch() const;
protected:
void parseEmbeddedData(libcamera::Span<const uint8_t> buffer,
Metadata &metadata);
- virtual void PopulateMetadata(const MdParser::RegisterMap &registers,
+ virtual void populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const;
std::unique_ptr<MdParser> parser_;
@@ -116,8 +116,8 @@ private:
typedef CamHelper *(*CamHelperCreateFunc)();
struct RegisterCamHelper
{
- RegisterCamHelper(char const *cam_name,
- CamHelperCreateFunc create_func);
+ RegisterCamHelper(char const *camName,
+ CamHelperCreateFunc createFunc);
};
} // namespace RPi
diff --git a/src/ipa/raspberrypi/cam_helper_imx219.cpp b/src/ipa/raspberrypi/cam_helper_imx219.cpp
index a3caab71..17c35143 100644
--- a/src/ipa/raspberrypi/cam_helper_imx219.cpp
+++ b/src/ipa/raspberrypi/cam_helper_imx219.cpp
@@ -39,10 +39,10 @@ class CamHelperImx219 : public CamHelper
{
public:
CamHelperImx219();
- uint32_t GainCode(double gain) const override;
- double Gain(uint32_t gain_code) const override;
- unsigned int MistrustFramesModeSwitch() const override;
- bool SensorEmbeddedDataPresent() const override;
+ uint32_t gainCode(double gain) const override;
+ double gain(uint32_t gainCode) const override;
+ unsigned int mistrustFramesModeSwitch() const override;
+ bool sensorEmbeddedDataPresent() const override;
private:
/*
@@ -51,7 +51,7 @@ private:
*/
static constexpr int frameIntegrationDiff = 4;
- void PopulateMetadata(const MdParser::RegisterMap &registers,
+ void populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const override;
};
@@ -64,17 +64,17 @@ CamHelperImx219::CamHelperImx219()
{
}
-uint32_t CamHelperImx219::GainCode(double gain) const
+uint32_t CamHelperImx219::gainCode(double gain) const
{
return (uint32_t)(256 - 256 / gain);
}
-double CamHelperImx219::Gain(uint32_t gain_code) const
+double CamHelperImx219::gain(uint32_t gainCode) const
{
- return 256.0 / (256 - gain_code);
+ return 256.0 / (256 - gainCode);
}
-unsigned int CamHelperImx219::MistrustFramesModeSwitch() const
+unsigned int CamHelperImx219::mistrustFramesModeSwitch() const
{
/*
* For reasons unknown, we do occasionally get a bogus metadata frame
@@ -84,26 +84,26 @@ unsigned int CamHelperImx219::MistrustFramesModeSwitch() const
return 1;
}
-bool CamHelperImx219::SensorEmbeddedDataPresent() const
+bool CamHelperImx219::sensorEmbeddedDataPresent() const
{
return ENABLE_EMBEDDED_DATA;
}
-void CamHelperImx219::PopulateMetadata(const MdParser::RegisterMap &registers,
+void CamHelperImx219::populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const
{
DeviceStatus deviceStatus;
- deviceStatus.shutter_speed = Exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
- deviceStatus.analogue_gain = Gain(registers.at(gainReg));
- deviceStatus.frame_length = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
+ deviceStatus.shutterSpeed = exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
+ deviceStatus.analogueGain = gain(registers.at(gainReg));
+ deviceStatus.frameLength = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
- metadata.Set("device.status", deviceStatus);
+ metadata.set("device.status", deviceStatus);
}
-static CamHelper *Create()
+static CamHelper *create()
{
return new CamHelperImx219();
}
-static RegisterCamHelper reg("imx219", &Create);
+static RegisterCamHelper reg("imx219", &create);
diff --git a/src/ipa/raspberrypi/cam_helper_imx290.cpp b/src/ipa/raspberrypi/cam_helper_imx290.cpp
index 871c1f8e..0d9a94d8 100644
--- a/src/ipa/raspberrypi/cam_helper_imx290.cpp
+++ b/src/ipa/raspberrypi/cam_helper_imx290.cpp
@@ -15,11 +15,11 @@ class CamHelperImx290 : public CamHelper
{
public:
CamHelperImx290();
- uint32_t GainCode(double gain) const override;
- double Gain(uint32_t gain_code) const override;
- void GetDelays(int &exposure_delay, int &gain_delay,
- int &vblank_delay) const override;
- unsigned int HideFramesModeSwitch() const override;
+ uint32_t gainCode(double gain) const override;
+ double gain(uint32_t gainCode) const override;
+ void getDelays(int &exposureDelay, int &gainDelay,
+ int &vblankDelay) const override;
+ unsigned int hideFramesModeSwitch() const override;
private:
/*
@@ -34,34 +34,34 @@ CamHelperImx290::CamHelperImx290()
{
}
-uint32_t CamHelperImx290::GainCode(double gain) const
+uint32_t CamHelperImx290::gainCode(double gain) const
{
int code = 66.6667 * log10(gain);
return std::max(0, std::min(code, 0xf0));
}
-double CamHelperImx290::Gain(uint32_t gain_code) const
+double CamHelperImx290::gain(uint32_t gainCode) const
{
- return pow(10, 0.015 * gain_code);
+ return pow(10, 0.015 * gainCode);
}
-void CamHelperImx290::GetDelays(int &exposure_delay, int &gain_delay,
- int &vblank_delay) const
+void CamHelperImx290::getDelays(int &exposureDelay, int &gainDelay,
+ int &vblankDelay) const
{
- exposure_delay = 2;
- gain_delay = 2;
- vblank_delay = 2;
+ exposureDelay = 2;
+ gainDelay = 2;
+ vblankDelay = 2;
}
-unsigned int CamHelperImx290::HideFramesModeSwitch() const
+unsigned int CamHelperImx290::hideFramesModeSwitch() const
{
/* After a mode switch, we seem to get 1 bad frame. */
return 1;
}
-static CamHelper *Create()
+static CamHelper *create()
{
return new CamHelperImx290();
}
-static RegisterCamHelper reg("imx290", &Create);
+static RegisterCamHelper reg("imx290", &create);
diff --git a/src/ipa/raspberrypi/cam_helper_imx296.cpp b/src/ipa/raspberrypi/cam_helper_imx296.cpp
index a1a771cb..15674335 100644
--- a/src/ipa/raspberrypi/cam_helper_imx296.cpp
+++ b/src/ipa/raspberrypi/cam_helper_imx296.cpp
@@ -19,10 +19,10 @@ class CamHelperImx296 : public CamHelper
{
public:
CamHelperImx296();
- uint32_t GainCode(double gain) const override;
- double Gain(uint32_t gain_code) const override;
- uint32_t ExposureLines(Duration exposure) const override;
- Duration Exposure(uint32_t exposure_lines) const override;
+ uint32_t gainCode(double gain) const override;
+ double gain(uint32_t gainCode) const override;
+ uint32_t exposureLines(Duration exposure) const override;
+ Duration exposure(uint32_t exposureLines) const override;
private:
static constexpr uint32_t maxGainCode = 239;
@@ -40,30 +40,30 @@ CamHelperImx296::CamHelperImx296()
{
}
-uint32_t CamHelperImx296::GainCode(double gain) const
+uint32_t CamHelperImx296::gainCode(double gain) const
{
uint32_t code = 20 * std::log10(gain) * 10;
return std::min(code, maxGainCode);
}
-double CamHelperImx296::Gain(uint32_t gain_code) const
+double CamHelperImx296::gain(uint32_t gainCode) const
{
- return std::pow(10.0, gain_code / 200.0);
+ return std::pow(10.0, gainCode / 200.0);
}
-uint32_t CamHelperImx296::ExposureLines(Duration exposure) const
+uint32_t CamHelperImx296::exposureLines(Duration exposure) const
{
return (exposure - 14.26us) / timePerLine;
}
-Duration CamHelperImx296::Exposure(uint32_t exposure_lines) const
+Duration CamHelperImx296::exposure(uint32_t exposureLines) const
{
- return exposure_lines * timePerLine + 14.26us;
+ return exposureLines * timePerLine + 14.26us;
}
-static CamHelper *Create()
+static CamHelper *create()
{
return new CamHelperImx296();
}
-static RegisterCamHelper reg("imx296", &Create);
+static RegisterCamHelper reg("imx296", &create);
diff --git a/src/ipa/raspberrypi/cam_helper_imx477.cpp b/src/ipa/raspberrypi/cam_helper_imx477.cpp
index 0e1c0dbd..0767a5e1 100644
--- a/src/ipa/raspberrypi/cam_helper_imx477.cpp
+++ b/src/ipa/raspberrypi/cam_helper_imx477.cpp
@@ -43,14 +43,14 @@ class CamHelperImx477 : public CamHelper
{
public:
CamHelperImx477();
- uint32_t GainCode(double gain) const override;
- double Gain(uint32_t gain_code) const override;
- void Prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata) override;
- uint32_t GetVBlanking(Duration &exposure, Duration minFrameDuration,
+ uint32_t gainCode(double gain) const override;
+ double gain(uint32_t gainCode) const override;
+ void prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata) override;
+ uint32_t getVBlanking(Duration &exposure, Duration minFrameDuration,
Duration maxFrameDuration) const override;
- void GetDelays(int &exposure_delay, int &gain_delay,
- int &vblank_delay) const override;
- bool SensorEmbeddedDataPresent() const override;
+ void getDelays(int &exposureDelay, int &gainDelay,
+ int &vblankDelay) const override;
+ bool sensorEmbeddedDataPresent() const override;
private:
/*
@@ -63,7 +63,7 @@ private:
/* Largest long exposure scale factor given as a left shift on the frame length. */
static constexpr int longExposureShiftMax = 7;
- void PopulateMetadata(const MdParser::RegisterMap &registers,
+ void populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const override;
};
@@ -72,22 +72,22 @@ CamHelperImx477::CamHelperImx477()
{
}
-uint32_t CamHelperImx477::GainCode(double gain) const
+uint32_t CamHelperImx477::gainCode(double gain) const
{
return static_cast<uint32_t>(1024 - 1024 / gain);
}
-double CamHelperImx477::Gain(uint32_t gain_code) const
+double CamHelperImx477::gain(uint32_t gainCode) const
{
- return 1024.0 / (1024 - gain_code);
+ return 1024.0 / (1024 - gainCode);
}
-void CamHelperImx477::Prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata)
+void CamHelperImx477::prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata)
{
MdParser::RegisterMap registers;
DeviceStatus deviceStatus;
- if (metadata.Get("device.status", deviceStatus)) {
+ if (metadata.get("device.status", deviceStatus)) {
LOG(IPARPI, Error) << "DeviceStatus not found from DelayedControls";
return;
}
@@ -105,27 +105,27 @@ void CamHelperImx477::Prepare(libcamera::Span<const uint8_t> buffer, Metadata &m
* Otherwise, all values are updated with what is reported in the
* embedded data.
*/
- if (deviceStatus.frame_length > frameLengthMax) {
+ if (deviceStatus.frameLength > frameLengthMax) {
DeviceStatus parsedDeviceStatus;
- metadata.Get("device.status", parsedDeviceStatus);
- parsedDeviceStatus.shutter_speed = deviceStatus.shutter_speed;
- parsedDeviceStatus.frame_length = deviceStatus.frame_length;
- metadata.Set("device.status", parsedDeviceStatus);
+ metadata.get("device.status", parsedDeviceStatus);
+ parsedDeviceStatus.shutterSpeed = deviceStatus.shutterSpeed;
+ parsedDeviceStatus.frameLength = deviceStatus.frameLength;
+ metadata.set("device.status", parsedDeviceStatus);
LOG(IPARPI, Debug) << "Metadata updated for long exposure: "
<< parsedDeviceStatus;
}
}
-uint32_t CamHelperImx477::GetVBlanking(Duration &exposure,
+uint32_t CamHelperImx477::getVBlanking(Duration &exposure,
Duration minFrameDuration,
Duration maxFrameDuration) const
{
uint32_t frameLength, exposureLines;
unsigned int shift = 0;
- frameLength = mode_.height + CamHelper::GetVBlanking(exposure, minFrameDuration,
+ frameLength = mode_.height + CamHelper::getVBlanking(exposure, minFrameDuration,
maxFrameDuration);
/*
* Check if the frame length calculated needs to be setup for long
@@ -144,43 +144,43 @@ uint32_t CamHelperImx477::GetVBlanking(Duration &exposure,
if (shift) {
/* Account for any rounding in the scaled frame length value. */
frameLength <<= shift;
- exposureLines = ExposureLines(exposure);
+ exposureLines = CamHelperImx477::exposureLines(exposure);
exposureLines = std::min(exposureLines, frameLength - frameIntegrationDiff);
- exposure = Exposure(exposureLines);
+ exposure = CamHelperImx477::exposure(exposureLines);
}
return frameLength - mode_.height;
}
-void CamHelperImx477::GetDelays(int &exposure_delay, int &gain_delay,
- int &vblank_delay) const
+void CamHelperImx477::getDelays(int &exposureDelay, int &gainDelay,
+ int &vblankDelay) const
{
- exposure_delay = 2;
- gain_delay = 2;
- vblank_delay = 3;
+ exposureDelay = 2;
+ gainDelay = 2;
+ vblankDelay = 3;
}
-bool CamHelperImx477::SensorEmbeddedDataPresent() const
+bool CamHelperImx477::sensorEmbeddedDataPresent() const
{
return true;
}
-void CamHelperImx477::PopulateMetadata(const MdParser::RegisterMap &registers,
+void CamHelperImx477::populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const
{
DeviceStatus deviceStatus;
- deviceStatus.shutter_speed = Exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
- deviceStatus.analogue_gain = Gain(registers.at(gainHiReg) * 256 + registers.at(gainLoReg));
- deviceStatus.frame_length = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
- deviceStatus.sensor_temperature = std::clamp<int8_t>(registers.at(temperatureReg), -20, 80);
+ deviceStatus.shutterSpeed = exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
+ deviceStatus.analogueGain = gain(registers.at(gainHiReg) * 256 + registers.at(gainLoReg));
+ deviceStatus.frameLength = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
+ deviceStatus.sensorTemperature = std::clamp<int8_t>(registers.at(temperatureReg), -20, 80);
- metadata.Set("device.status", deviceStatus);
+ metadata.set("device.status", deviceStatus);
}
-static CamHelper *Create()
+static CamHelper *create()
{
return new CamHelperImx477();
}
-static RegisterCamHelper reg("imx477", &Create);
+static RegisterCamHelper reg("imx477", &create);
diff --git a/src/ipa/raspberrypi/cam_helper_imx519.cpp b/src/ipa/raspberrypi/cam_helper_imx519.cpp
index eaf24982..1752760e 100644
--- a/src/ipa/raspberrypi/cam_helper_imx519.cpp
+++ b/src/ipa/raspberrypi/cam_helper_imx519.cpp
@@ -43,14 +43,14 @@ class CamHelperImx519 : public CamHelper
{
public:
CamHelperImx519();
- uint32_t GainCode(double gain) const override;
- double Gain(uint32_t gain_code) const override;
- void Prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata) override;
- uint32_t GetVBlanking(Duration &exposure, Duration minFrameDuration,
+ uint32_t gainCode(double gain) const override;
+ double gain(uint32_t gainCode) const override;
+ void prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata) override;
+ uint32_t getVBlanking(Duration &exposure, Duration minFrameDuration,
Duration maxFrameDuration) const override;
- void GetDelays(int &exposure_delay, int &gain_delay,
- int &vblank_delay) const override;
- bool SensorEmbeddedDataPresent() const override;
+ void getDelays(int &exposureDelay, int &gainDelay,
+ int &vblankDelay) const override;
+ bool sensorEmbeddedDataPresent() const override;
private:
/*
@@ -63,7 +63,7 @@ private:
/* Largest long exposure scale factor given as a left shift on the frame length. */
static constexpr int longExposureShiftMax = 7;
- void PopulateMetadata(const MdParser::RegisterMap &registers,
+ void populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const override;
};
@@ -72,22 +72,22 @@ CamHelperImx519::CamHelperImx519()
{
}
-uint32_t CamHelperImx519::GainCode(double gain) const
+uint32_t CamHelperImx519::gainCode(double gain) const
{
return static_cast<uint32_t>(1024 - 1024 / gain);
}
-double CamHelperImx519::Gain(uint32_t gain_code) const
+double CamHelperImx519::gain(uint32_t gainCode) const
{
- return 1024.0 / (1024 - gain_code);
+ return 1024.0 / (1024 - gainCode);
}
-void CamHelperImx519::Prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata)
+void CamHelperImx519::prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata)
{
MdParser::RegisterMap registers;
DeviceStatus deviceStatus;
- if (metadata.Get("device.status", deviceStatus)) {
+ if (metadata.get("device.status", deviceStatus)) {
LOG(IPARPI, Error) << "DeviceStatus not found from DelayedControls";
return;
}
@@ -105,27 +105,27 @@ void CamHelperImx519::Prepare(libcamera::Span<const uint8_t> buffer, Metadata &m
* Otherwise, all values are updated with what is reported in the
* embedded data.
*/
- if (deviceStatus.frame_length > frameLengthMax) {
+ if (deviceStatus.frameLength > frameLengthMax) {
DeviceStatus parsedDeviceStatus;
- metadata.Get("device.status", parsedDeviceStatus);
- parsedDeviceStatus.shutter_speed = deviceStatus.shutter_speed;
- parsedDeviceStatus.frame_length = deviceStatus.frame_length;
- metadata.Set("device.status", parsedDeviceStatus);
+ metadata.get("device.status", parsedDeviceStatus);
+ parsedDeviceStatus.shutterSpeed = deviceStatus.shutterSpeed;
+ parsedDeviceStatus.frameLength = deviceStatus.frameLength;
+ metadata.set("device.status", parsedDeviceStatus);
LOG(IPARPI, Debug) << "Metadata updated for long exposure: "
<< parsedDeviceStatus;
}
}
-uint32_t CamHelperImx519::GetVBlanking(Duration &exposure,
+uint32_t CamHelperImx519::getVBlanking(Duration &exposure,
Duration minFrameDuration,
Duration maxFrameDuration) const
{
uint32_t frameLength, exposureLines;
unsigned int shift = 0;
- frameLength = mode_.height + CamHelper::GetVBlanking(exposure, minFrameDuration,
+ frameLength = mode_.height + CamHelper::getVBlanking(exposure, minFrameDuration,
maxFrameDuration);
/*
* Check if the frame length calculated needs to be setup for long
@@ -144,42 +144,42 @@ uint32_t CamHelperImx519::GetVBlanking(Duration &exposure,
if (shift) {
/* Account for any rounding in the scaled frame length value. */
frameLength <<= shift;
- exposureLines = ExposureLines(exposure);
+ exposureLines = CamHelperImx519::exposureLines(exposure);
exposureLines = std::min(exposureLines, frameLength - frameIntegrationDiff);
- exposure = Exposure(exposureLines);
+ exposure = CamHelperImx519::exposure(exposureLines);
}
return frameLength - mode_.height;
}
-void CamHelperImx519::GetDelays(int &exposure_delay, int &gain_delay,
- int &vblank_delay) const
+void CamHelperImx519::getDelays(int &exposureDelay, int &gainDelay,
+ int &vblankDelay) const
{
- exposure_delay = 2;
- gain_delay = 2;
- vblank_delay = 3;
+ exposureDelay = 2;
+ gainDelay = 2;
+ vblankDelay = 3;
}
-bool CamHelperImx519::SensorEmbeddedDataPresent() const
+bool CamHelperImx519::sensorEmbeddedDataPresent() const
{
return true;
}
-void CamHelperImx519::PopulateMetadata(const MdParser::RegisterMap &registers,
+void CamHelperImx519::populateMetadata(const MdParser::RegisterMap &registers,
Metadata &metadata) const
{
DeviceStatus deviceStatus;
- deviceStatus.shutter_speed = Exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
- deviceStatus.analogue_gain = Gain(registers.at(gainHiReg) * 256 + registers.at(gainLoReg));
- deviceStatus.frame_length = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
+ deviceStatus.shutterSpeed = exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg));
+ deviceStatus.analogueGain = gain(registers.at(gainHiReg) * 256 + registers.at(gainLoReg));
+ deviceStatus.frameLength = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);
- metadata.Set("device.status", deviceStatus);
+ metadata.set("device.status", deviceStatus);
}
-static CamHelper *Create()
+static CamHelper *create()
{
return new CamHelperImx519();
}
-static RegisterCamHelper reg("imx519", &Create);
+static RegisterCamHelper reg("imx519", &create);
diff --git a/src/ipa/raspberrypi/cam_helper_ov5647.cpp b/src/ipa/raspberrypi/cam_helper_ov5647.cpp
index 702c2d07..9dc3fc44 100644
--- a/src/ipa/raspberrypi/cam_helper_ov5647.cpp
+++ b/src/ipa/raspberrypi/cam_helper_ov5647.cpp
@@ -15,14 +15,14 @@ class CamHelperOv5647 : public CamHelper
{
public:
CamHelperOv5647();
- uint32_t GainCode(double gain) const override;
- double Gain(uint32_t gain_code) const override;
- void GetDelays(int &exposure_delay, int &gain_delay,
- int &vblank_delay) const override;
- unsigned int HideFramesStartup() const override;
- unsigned int HideFramesModeSwitch() const override;
- unsigned int MistrustFramesStartup() const override;
- unsigned int MistrustFramesModeSwitch() const override;
+ uint32_t gainCode(double gain) const override;
+ double gain(uint32_t gainCode) const override;
+ void getDelays(int &exposureDelay, int &gainDelay,
+ int &vblankDelay) const override;
+ unsigned int hideFramesStartup() const override;
+ unsigned int hideFramesModeSwitch() const override;
+ unsigned int mistrustFramesStartup() const override;
+ unsigned int mistrustFramesModeSwitch() const override;
private:
/*
@@ -42,29 +42,29 @@ CamHelperOv5647::CamHelperOv5647()
{
}
-uint32_t CamHelperOv5647::GainCode(double gain) const
+uint32_t CamHelperOv5647::gainCode(double gain) const
{
return static_cast<uint32_t>(gain * 16.0);
}
-double CamHelperOv5647::Gain(uint32_t gain_code) const
+double CamHelperOv5647::gain(uint32_t gainCode) const
{
- return static_cast<double>(gain_code) / 16.0;
+ return static_cast<double>(gainCode) / 16.0;
}
-void CamHelperOv5647::GetDelays(int &exposure_delay, int &gain_delay,
- int &vblank_delay) const
+void CamHelperOv5647::getDelays(int &exposureDelay, int &gainDelay,
+ int &vblankDelay) const
{
/*
* We run this sensor in a mode where the gain delay is bumped up to
* 2. It seems to be the only way to make the delays "predictable".
*/
- exposure_delay = 2;
- gain_delay = 2;
- vblank_delay = 2;
+ exposureDelay = 2;
+ gainDelay = 2;
+ vblankDelay = 2;
}
-unsigned int CamHelperOv5647::HideFramesStartup() const
+unsigned int CamHelperOv5647::hideFramesStartup() const
{
/*
* On startup, we get a couple of under-exposed frames which
@@ -73,7 +73,7 @@ unsigned int CamHelperOv5647::HideFramesStartup() const
return 2;
}
-unsigned int CamHelperOv5647::HideFramesModeSwitch() const
+unsigned int CamHelperOv5647::hideFramesModeSwitch() const
{
/*
* After a mode switch, we get a couple of under-exposed frames which
@@ -82,7 +82,7 @@ unsigned int CamHelperOv5647::HideFramesModeSwitch() const
return 2;
}
-unsigned int CamHelperOv5647::MistrustFramesStartup() const
+unsigned int CamHelperOv5647::mistrustFramesStartup() const
{
/*
* First couple of frames are under-exposed and are no good for control
@@ -91,7 +91,7 @@ unsigned int CamHelperOv5647::MistrustFramesStartup() const
return 2;
}
-unsigned int CamHelperOv5647::MistrustFramesModeSwitch() const
+unsigned int CamHelperOv5647::mistrustFramesModeSwitch() const
{
/*
* First couple of frames are under-exposed even after a simple
@@ -100,9 +100,9 @@ unsigned int CamHelperOv5647::MistrustFramesModeSwitch() const
return 2;
}
-static CamHelper *Create()
+static CamHelper *create()
{
return new CamHelperOv5647();
}
-static RegisterCamHelper reg("ov5647", &Create);
+static RegisterCamHelper reg("ov5647", &create);
diff --git a/src/ipa/raspberrypi/cam_helper_ov9281.cpp b/src/ipa/raspberrypi/cam_helper_ov9281.cpp
index 9de868c3..130450af 100644
--- a/src/ipa/raspberrypi/cam_helper_ov9281.cpp
+++ b/src/ipa/raspberrypi/cam_helper_ov9281.cpp
@@ -15,10 +15,10 @@ class CamHelperOv9281 : public CamHelper
{
public:
CamHelperOv9281();
- uint32_t GainCode(double gain) const override;
- double Gain(uint32_t gain_code) const override;
- void GetDelays(int &exposure_delay, int &gain_delay,
- int &vblank_delay) const override;
+ uint32_t gainCode(double gain) const override;
+ double gain(uint32_t gainCode) const override;
+ void getDelays(int &exposureDelay, int &gainDelay,
+ int &vblankDelay) const override;
private:
/*
@@ -38,28 +38,28 @@ CamHelperOv9281::CamHelperOv9281()
{
}
-uint32_t CamHelperOv9281::GainCode(double gain) const
+uint32_t CamHelperOv9281::gainCode(double gain) const
{
return static_cast<uint32_t>(gain * 16.0);
}
-double CamHelperOv9281::Gain(uint32_t gain_code) const
+double CamHelperOv9281::gain(uint32_t gainCode) const
{
- return static_cast<double>(gain_code) / 16.0;
+ return static_cast<double>(gainCode) / 16.0;
}
-void CamHelperOv9281::GetDelays(int &exposure_delay, int &gain_delay,
- int &vblank_delay) const
+void CamHelperOv9281::getDelays(int &exposureDelay, int &gainDelay,
+ int &vblankDelay) const
{
/* The driver appears to behave as follows: */
- exposure_delay = 2;
- gain_delay = 2;
- vblank_delay = 2;
+ exposureDelay = 2;
+ gainDelay = 2;
+ vblankDelay = 2;
}
-static CamHelper *Create()
+static CamHelper *create()
{
return new CamHelperOv9281();
}
-static RegisterCamHelper reg("ov9281", &Create);
+static RegisterCamHelper reg("ov9281", &create);
diff --git a/src/ipa/raspberrypi/controller/agc_algorithm.hpp b/src/ipa/raspberrypi/controller/agc_algorithm.hpp
index 61595ea2..51900b68 100644
--- a/src/ipa/raspberrypi/controller/agc_algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/agc_algorithm.hpp
@@ -17,16 +17,15 @@ class AgcAlgorithm : public Algorithm
public:
AgcAlgorithm(Controller *controller) : Algorithm(controller) {}
// An AGC algorithm must provide the following:
- virtual unsigned int GetConvergenceFrames() const = 0;
- virtual void SetEv(double ev) = 0;
- virtual void SetFlickerPeriod(libcamera::utils::Duration flicker_period) = 0;
- virtual void SetFixedShutter(libcamera::utils::Duration fixed_shutter) = 0;
- virtual void SetMaxShutter(libcamera::utils::Duration max_shutter) = 0;
- virtual void SetFixedAnalogueGain(double fixed_analogue_gain) = 0;
- virtual void SetMeteringMode(std::string const &metering_mode_name) = 0;
- virtual void SetExposureMode(std::string const &exposure_mode_name) = 0;
- virtual void
- SetConstraintMode(std::string const &contraint_mode_name) = 0;
+ virtual unsigned int getConvergenceFrames() const = 0;
+ virtual void setEv(double ev) = 0;
+ virtual void setFlickerPeriod(libcamera::utils::Duration flickerPeriod) = 0;
+ virtual void setFixedShutter(libcamera::utils::Duration fixedShutter) = 0;
+ virtual void setMaxShutter(libcamera::utils::Duration maxShutter) = 0;
+ virtual void setFixedAnalogueGain(double fixedAnalogueGain) = 0;
+ virtual void setMeteringMode(std::string const &meteringModeName) = 0;
+ virtual void setExposureMode(std::string const &exposureModeName) = 0;
+ virtual void setConstraintMode(std::string const &contraintModeName) = 0;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/agc_status.h b/src/ipa/raspberrypi/controller/agc_status.h
index 20cb1b62..d36b40d8 100644
--- a/src/ipa/raspberrypi/controller/agc_status.h
+++ b/src/ipa/raspberrypi/controller/agc_status.h
@@ -20,19 +20,19 @@ extern "C" {
// ignored until then.
struct AgcStatus {
- libcamera::utils::Duration total_exposure_value; // value for all exposure and gain for this image
- libcamera::utils::Duration target_exposure_value; // (unfiltered) target total exposure AGC is aiming for
- libcamera::utils::Duration shutter_time;
- double analogue_gain;
- char exposure_mode[32];
- char constraint_mode[32];
- char metering_mode[32];
+ libcamera::utils::Duration totalExposureValue; // value for all exposure and gain for this image
+ libcamera::utils::Duration targetExposureValue; // (unfiltered) target total exposure AGC is aiming for
+ libcamera::utils::Duration shutterTime;
+ double analogueGain;
+ char exposureMode[32];
+ char constraintMode[32];
+ char meteringMode[32];
double ev;
- libcamera::utils::Duration flicker_period;
- int floating_region_enable;
- libcamera::utils::Duration fixed_shutter;
- double fixed_analogue_gain;
- double digital_gain;
+ libcamera::utils::Duration flickerPeriod;
+ int floatingRegionEnable;
+ libcamera::utils::Duration fixedShutter;
+ double fixedAnalogueGain;
+ double digitalGain;
int locked;
};
diff --git a/src/ipa/raspberrypi/controller/algorithm.cpp b/src/ipa/raspberrypi/controller/algorithm.cpp
index 43ad0a2b..cfcd18a9 100644
--- a/src/ipa/raspberrypi/controller/algorithm.cpp
+++ b/src/ipa/raspberrypi/controller/algorithm.cpp
@@ -9,36 +9,38 @@
using namespace RPiController;
-void Algorithm::Read([[maybe_unused]] boost::property_tree::ptree const &params)
+void Algorithm::read([[maybe_unused]] boost::property_tree::ptree const &params)
{
}
-void Algorithm::Initialise() {}
+void Algorithm::initialise()
+{
+}
-void Algorithm::SwitchMode([[maybe_unused]] CameraMode const &camera_mode,
+void Algorithm::switchMode([[maybe_unused]] CameraMode const &cameraMode,
[[maybe_unused]] Metadata *metadata)
{
}
-void Algorithm::Prepare([[maybe_unused]] Metadata *image_metadata)
+void Algorithm::prepare([[maybe_unused]] Metadata *imageMetadata)
{
}
-void Algorithm::Process([[maybe_unused]] StatisticsPtr &stats,
- [[maybe_unused]] Metadata *image_metadata)
+void Algorithm::process([[maybe_unused]] StatisticsPtr &stats,
+ [[maybe_unused]] Metadata *imageMetadata)
{
}
// For registering algorithms with the system:
static std::map<std::string, AlgoCreateFunc> algorithms;
-std::map<std::string, AlgoCreateFunc> const &RPiController::GetAlgorithms()
+std::map<std::string, AlgoCreateFunc> const &RPiController::getAlgorithms()
{
return algorithms;
}
RegisterAlgorithm::RegisterAlgorithm(char const *name,
- AlgoCreateFunc create_func)
+ AlgoCreateFunc createFunc)
{
- algorithms[std::string(name)] = create_func;
+ algorithms[std::string(name)] = createFunc;
}
diff --git a/src/ipa/raspberrypi/controller/algorithm.hpp b/src/ipa/raspberrypi/controller/algorithm.hpp
index 5123c87b..a33b14da 100644
--- a/src/ipa/raspberrypi/controller/algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/algorithm.hpp
@@ -29,18 +29,18 @@ public:
{
}
virtual ~Algorithm() = default;
- virtual char const *Name() const = 0;
- virtual bool IsPaused() const { return paused_; }
- virtual void Pause() { paused_ = true; }
- virtual void Resume() { paused_ = false; }
- virtual void Read(boost::property_tree::ptree const &params);
- virtual void Initialise();
- virtual void SwitchMode(CameraMode const &camera_mode, Metadata *metadata);
- virtual void Prepare(Metadata *image_metadata);
- virtual void Process(StatisticsPtr &stats, Metadata *image_metadata);
- Metadata &GetGlobalMetadata() const
+ virtual char const *name() const = 0;
+ virtual bool isPaused() const { return paused_; }
+ virtual void pause() { paused_ = true; }
+ virtual void resume() { paused_ = false; }
+ virtual void read(boost::property_tree::ptree const &params);
+ virtual void initialise();
+ virtual void switchMode(CameraMode const &cameraMode, Metadata *metadata);
+ virtual void prepare(Metadata *imageMetadata);
+ virtual void process(StatisticsPtr &stats, Metadata *imageMetadata);
+ Metadata &getGlobalMetadata() const
{
- return controller_->GetGlobalMetadata();
+ return controller_->getGlobalMetadata();
}
private:
@@ -53,8 +53,8 @@ private:
typedef Algorithm *(*AlgoCreateFunc)(Controller *controller);
struct RegisterAlgorithm {
- RegisterAlgorithm(char const *name, AlgoCreateFunc create_func);
+ RegisterAlgorithm(char const *name, AlgoCreateFunc createFunc);
};
-std::map<std::string, AlgoCreateFunc> const &GetAlgorithms();
+std::map<std::string, AlgoCreateFunc> const &getAlgorithms();
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/awb_algorithm.hpp b/src/ipa/raspberrypi/controller/awb_algorithm.hpp
index 96f88afc..c5d2ca90 100644
--- a/src/ipa/raspberrypi/controller/awb_algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/awb_algorithm.hpp
@@ -15,9 +15,9 @@ class AwbAlgorithm : public Algorithm
public:
AwbAlgorithm(Controller *controller) : Algorithm(controller) {}
// An AWB algorithm must provide the following:
- virtual unsigned int GetConvergenceFrames() const = 0;
- virtual void SetMode(std::string const &mode_name) = 0;
- virtual void SetManualGains(double manual_r, double manual_b) = 0;
+ virtual unsigned int getConvergenceFrames() const = 0;
+ virtual void setMode(std::string const &modeName) = 0;
+ virtual void setManualGains(double manualR, double manualB) = 0;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/awb_status.h b/src/ipa/raspberrypi/controller/awb_status.h
index 46d7c842..bc428ed3 100644
--- a/src/ipa/raspberrypi/controller/awb_status.h
+++ b/src/ipa/raspberrypi/controller/awb_status.h
@@ -15,10 +15,10 @@ extern "C" {
struct AwbStatus {
char mode[32];
- double temperature_K;
- double gain_r;
- double gain_g;
- double gain_b;
+ double temperatureK;
+ double gainR;
+ double gainG;
+ double gainB;
};
#ifdef __cplusplus
diff --git a/src/ipa/raspberrypi/controller/black_level_status.h b/src/ipa/raspberrypi/controller/black_level_status.h
index d085f64b..df366140 100644
--- a/src/ipa/raspberrypi/controller/black_level_status.h
+++ b/src/ipa/raspberrypi/controller/black_level_status.h
@@ -13,9 +13,9 @@ extern "C" {
#endif
struct BlackLevelStatus {
- uint16_t black_level_r; // out of 16 bits
- uint16_t black_level_g;
- uint16_t black_level_b;
+ uint16_t blackLevelR; // out of 16 bits
+ uint16_t blackLevelG;
+ uint16_t blackLevelB;
};
#ifdef __cplusplus
diff --git a/src/ipa/raspberrypi/controller/camera_mode.h b/src/ipa/raspberrypi/controller/camera_mode.h
index e2b82828..8b81ca9d 100644
--- a/src/ipa/raspberrypi/controller/camera_mode.h
+++ b/src/ipa/raspberrypi/controller/camera_mode.h
@@ -26,21 +26,21 @@ struct CameraMode {
// size in pixels of frames in this mode
uint16_t width, height;
// size of full resolution uncropped frame ("sensor frame")
- uint16_t sensor_width, sensor_height;
+ uint16_t sensorWidth, sensorHeight;
// binning factor (1 = no binning, 2 = 2-pixel binning etc.)
- uint8_t bin_x, bin_y;
+ uint8_t binX, binY;
// location of top left pixel in the sensor frame
- uint16_t crop_x, crop_y;
- // scaling factor (so if uncropped, width*scale_x is sensor_width)
- double scale_x, scale_y;
+ uint16_t cropX, cropY;
+ // scaling factor (so if uncropped, width*scaleX is sensorWidth)
+ double scaleX, scaleY;
// scaling of the noise compared to the native sensor mode
- double noise_factor;
+ double noiseFactor;
// line time
- libcamera::utils::Duration line_length;
+ libcamera::utils::Duration lineLength;
// any camera transform *not* reflected already in the camera tuning
libcamera::Transform transform;
// minimum and maximum fame lengths in units of lines
- uint32_t min_frame_length, max_frame_length;
+ uint32_t minFrameLength, maxFrameLength;
// sensitivity of this mode
double sensitivity;
};
diff --git a/src/ipa/raspberrypi/controller/ccm_algorithm.hpp b/src/ipa/raspberrypi/controller/ccm_algorithm.hpp
index 33d0e30d..b8b5879b 100644
--- a/src/ipa/raspberrypi/controller/ccm_algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/ccm_algorithm.hpp
@@ -15,7 +15,7 @@ class CcmAlgorithm : public Algorithm
public:
CcmAlgorithm(Controller *controller) : Algorithm(controller) {}
// A CCM algorithm must provide the following:
- virtual void SetSaturation(double saturation) = 0;
+ virtual void setSaturation(double saturation) = 0;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/contrast_algorithm.hpp b/src/ipa/raspberrypi/controller/contrast_algorithm.hpp
index 7f03bba5..c76f3cd7 100644
--- a/src/ipa/raspberrypi/controller/contrast_algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/contrast_algorithm.hpp
@@ -15,8 +15,8 @@ class ContrastAlgorithm : public Algorithm
public:
ContrastAlgorithm(Controller *controller) : Algorithm(controller) {}
// A contrast algorithm must provide the following:
- virtual void SetBrightness(double brightness) = 0;
- virtual void SetContrast(double contrast) = 0;
+ virtual void setBrightness(double brightness) = 0;
+ virtual void setContrast(double contrast) = 0;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/controller.cpp b/src/ipa/raspberrypi/controller/controller.cpp
index d3433ad2..e0b152c7 100644
--- a/src/ipa/raspberrypi/controller/controller.cpp
+++ b/src/ipa/raspberrypi/controller/controller.cpp
@@ -19,85 +19,87 @@ using namespace libcamera;
LOG_DEFINE_CATEGORY(RPiController)
Controller::Controller()
- : switch_mode_called_(false) {}
+ : switchModeCalled_(false)
+{
+}
-Controller::Controller(char const *json_filename)
- : switch_mode_called_(false)
+Controller::Controller(char const *jsonFilename)
+ : switchModeCalled_(false)
{
- Read(json_filename);
- Initialise();
+ read(jsonFilename);
+ initialise();
}
Controller::~Controller() {}
-void Controller::Read(char const *filename)
+void Controller::read(char const *filename)
{
boost::property_tree::ptree root;
boost::property_tree::read_json(filename, root);
- for (auto const &key_and_value : root) {
- Algorithm *algo = CreateAlgorithm(key_and_value.first.c_str());
+ for (auto const &keyAndValue : root) {
+ Algorithm *algo = createAlgorithm(keyAndValue.first.c_str());
if (algo) {
- algo->Read(key_and_value.second);
+ algo->read(keyAndValue.second);
algorithms_.push_back(AlgorithmPtr(algo));
} else
LOG(RPiController, Warning)
- << "No algorithm found for \"" << key_and_value.first << "\"";
+ << "No algorithm found for \"" << keyAndValue.first << "\"";
}
}
-Algorithm *Controller::CreateAlgorithm(char const *name)
+Algorithm *Controller::createAlgorithm(char const *name)
{
- auto it = GetAlgorithms().find(std::string(name));
- return it != GetAlgorithms().end() ? (*it->second)(this) : nullptr;
+ auto it = getAlgorithms().find(std::string(name));
+ return it != getAlgorithms().end() ? (*it->second)(this) : nullptr;
}
-void Controller::Initialise()
+void Controller::initialise()
{
for (auto &algo : algorithms_)
- algo->Initialise();
+ algo->initialise();
}
-void Controller::SwitchMode(CameraMode const &camera_mode, Metadata *metadata)
+void Controller::switchMode(CameraMode const &cameraMode, Metadata *metadata)
{
for (auto &algo : algorithms_)
- algo->SwitchMode(camera_mode, metadata);
- switch_mode_called_ = true;
+ algo->switchMode(cameraMode, metadata);
+ switchModeCalled_ = true;
}
-void Controller::Prepare(Metadata *image_metadata)
+void Controller::prepare(Metadata *imageMetadata)
{
- assert(switch_mode_called_);
+ assert(switchModeCalled_);
for (auto &algo : algorithms_)
- if (!algo->IsPaused())
- algo->Prepare(image_metadata);
+ if (!algo->isPaused())
+ algo->prepare(imageMetadata);
}
-void Controller::Process(StatisticsPtr stats, Metadata *image_metadata)
+void Controller::process(StatisticsPtr stats, Metadata *imageMetadata)
{
- assert(switch_mode_called_);
+ assert(switchModeCalled_);
for (auto &algo : algorithms_)
- if (!algo->IsPaused())
- algo->Process(stats, image_metadata);
+ if (!algo->isPaused())
+ algo->process(stats, imageMetadata);
}
-Metadata &Controller::GetGlobalMetadata()
+Metadata &Controller::getGlobalMetadata()
{
- return global_metadata_;
+ return globalMetadata_;
}
-Algorithm *Controller::GetAlgorithm(std::string const &name) const
+Algorithm *Controller::getAlgorithm(std::string const &name) const
{
// The passed name must be the entire algorithm name, or must match the
// last part of it with a period (.) just before.
- size_t name_len = name.length();
+ size_t nameLen = name.length();
for (auto &algo : algorithms_) {
- char const *algo_name = algo->Name();
- size_t algo_name_len = strlen(algo_name);
- if (algo_name_len >= name_len &&
+ char const *algoName = algo->name();
+ size_t algoNameLen = strlen(algoName);
+ if (algoNameLen >= nameLen &&
strcasecmp(name.c_str(),
- algo_name + algo_name_len - name_len) == 0 &&
- (name_len == algo_name_len ||
- algo_name[algo_name_len - name_len - 1] == '.'))
+ algoName + algoNameLen - nameLen) == 0 &&
+ (nameLen == algoNameLen ||
+ algoName[algoNameLen - nameLen - 1] == '.'))
return algo.get();
}
return nullptr;
diff --git a/src/ipa/raspberrypi/controller/controller.hpp b/src/ipa/raspberrypi/controller/controller.hpp
index 3b50ae77..a5e1eb38 100644
--- a/src/ipa/raspberrypi/controller/controller.hpp
+++ b/src/ipa/raspberrypi/controller/controller.hpp
@@ -34,21 +34,21 @@ class Controller
{
public:
Controller();
- Controller(char const *json_filename);
+ Controller(char const *jsonFilename);
~Controller();
- Algorithm *CreateAlgorithm(char const *name);
- void Read(char const *filename);
- void Initialise();
- void SwitchMode(CameraMode const &camera_mode, Metadata *metadata);
- void Prepare(Metadata *image_metadata);
- void Process(StatisticsPtr stats, Metadata *image_metadata);
- Metadata &GetGlobalMetadata();
- Algorithm *GetAlgorithm(std::string const &name) const;
+ Algorithm *createAlgorithm(char const *name);
+ void read(char const *filename);
+ void initialise();
+ void switchMode(CameraMode const &cameraMode, Metadata *metadata);
+ void prepare(Metadata *imageMetadata);
+ void process(StatisticsPtr stats, Metadata *imageMetadata);
+ Metadata &getGlobalMetadata();
+ Algorithm *getAlgorithm(std::string const &name) const;
protected:
- Metadata global_metadata_;
+ Metadata globalMetadata_;
std::vector<AlgorithmPtr> algorithms_;
- bool switch_mode_called_;
+ bool switchModeCalled_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/denoise_algorithm.hpp b/src/ipa/raspberrypi/controller/denoise_algorithm.hpp
index 39fcd7e9..48de542a 100644
--- a/src/ipa/raspberrypi/controller/denoise_algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/denoise_algorithm.hpp
@@ -17,7 +17,7 @@ class DenoiseAlgorithm : public Algorithm
public:
DenoiseAlgorithm(Controller *controller) : Algorithm(controller) {}
// A Denoise algorithm must provide the following:
- virtual void SetMode(DenoiseMode mode) = 0;
+ virtual void setMode(DenoiseMode mode) = 0;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/denoise_status.h b/src/ipa/raspberrypi/controller/denoise_status.h
index 67a3c361..fe304d09 100644
--- a/src/ipa/raspberrypi/controller/denoise_status.h
+++ b/src/ipa/raspberrypi/controller/denoise_status.h
@@ -13,8 +13,8 @@ extern "C" {
#endif
struct DenoiseStatus {
- double noise_constant;
- double noise_slope;
+ double noiseConstant;
+ double noiseSlope;
double strength;
unsigned int mode;
};
diff --git a/src/ipa/raspberrypi/controller/device_status.cpp b/src/ipa/raspberrypi/controller/device_status.cpp
index a389c40d..f8ed7735 100644
--- a/src/ipa/raspberrypi/controller/device_status.cpp
+++ b/src/ipa/raspberrypi/controller/device_status.cpp
@@ -10,21 +10,21 @@ using namespace libcamera; /* for the Duration operator<< overload */
std::ostream &operator<<(std::ostream &out, const DeviceStatus &d)
{
- out << "Exposure: " << d.shutter_speed
- << " Frame length: " << d.frame_length
- << " Gain: " << d.analogue_gain;
+ out << "Exposure: " << d.shutterSpeed
+ << " Frame length: " << d.frameLength
+ << " Gain: " << d.analogueGain;
if (d.aperture)
out << " Aperture: " << *d.aperture;
- if (d.lens_position)
- out << " Lens: " << *d.lens_position;
+ if (d.lensPosition)
+ out << " Lens: " << *d.lensPosition;
- if (d.flash_intensity)
- out << " Flash: " << *d.flash_intensity;
+ if (d.flashIntensity)
+ out << " Flash: " << *d.flashIntensity;
- if (d.sensor_temperature)
- out << " Temperature: " << *d.sensor_temperature;
+ if (d.sensorTemperature)
+ out << " Temperature: " << *d.sensorTemperature;
return out;
}
diff --git a/src/ipa/raspberrypi/controller/device_status.h b/src/ipa/raspberrypi/controller/device_status.h
index b33f0d09..ebcd7da2 100644
--- a/src/ipa/raspberrypi/controller/device_status.h
+++ b/src/ipa/raspberrypi/controller/device_status.h
@@ -18,24 +18,24 @@
struct DeviceStatus {
DeviceStatus()
- : shutter_speed(std::chrono::seconds(0)), frame_length(0),
- analogue_gain(0.0)
+ : shutterSpeed(std::chrono::seconds(0)), frameLength(0),
+ analogueGain(0.0)
{
}
friend std::ostream &operator<<(std::ostream &out, const DeviceStatus &d);
/* time shutter is open */
- libcamera::utils::Duration shutter_speed;
+ libcamera::utils::Duration shutterSpeed;
/* frame length given in number of lines */
- uint32_t frame_length;
- double analogue_gain;
+ uint32_t frameLength;
+ double analogueGain;
/* 1.0/distance-in-metres, or 0 if unknown */
- std::optional<double> lens_position;
+ std::optional<double> lensPosition;
/* 1/f so that brightness quadruples when this doubles, or 0 if unknown */
std::optional<double> aperture;
/* proportional to brightness with 0 = no flash, 1 = maximum flash */
- std::optional<double> flash_intensity;
+ std::optional<double> flashIntensity;
/* Sensor reported temperature value (in degrees) */
- std::optional<double> sensor_temperature;
+ std::optional<double> sensorTemperature;
};
diff --git a/src/ipa/raspberrypi/controller/focus_status.h b/src/ipa/raspberrypi/controller/focus_status.h
index ace2fe2c..65645510 100644
--- a/src/ipa/raspberrypi/controller/focus_status.h
+++ b/src/ipa/raspberrypi/controller/focus_status.h
@@ -18,7 +18,7 @@ extern "C" {
struct FocusStatus {
unsigned int num;
- uint32_t focus_measures[FOCUS_REGIONS];
+ uint32_t focusMeasures[FOCUS_REGIONS];
};
#ifdef __cplusplus
diff --git a/src/ipa/raspberrypi/controller/histogram.cpp b/src/ipa/raspberrypi/controller/histogram.cpp
index 9916b3ed..e865bef0 100644
--- a/src/ipa/raspberrypi/controller/histogram.cpp
+++ b/src/ipa/raspberrypi/controller/histogram.cpp
@@ -11,25 +11,25 @@
using namespace RPiController;
-uint64_t Histogram::CumulativeFreq(double bin) const
+uint64_t Histogram::cumulativeFreq(double bin) const
{
if (bin <= 0)
return 0;
- else if (bin >= Bins())
- return Total();
+ else if (bin >= bins())
+ return total();
int b = (int)bin;
return cumulative_[b] +
(bin - b) * (cumulative_[b + 1] - cumulative_[b]);
}
-double Histogram::Quantile(double q, int first, int last) const
+double Histogram::quantile(double q, int first, int last) const
{
if (first == -1)
first = 0;
if (last == -1)
last = cumulative_.size() - 2;
assert(first <= last);
- uint64_t items = q * Total();
+ uint64_t items = q * total();
while (first < last) // binary search to find the right bin
{
int middle = (first + last) / 2;
@@ -45,20 +45,20 @@ double Histogram::Quantile(double q, int first, int last) const
return first + frac;
}
-double Histogram::InterQuantileMean(double q_lo, double q_hi) const
+double Histogram::interQuantileMean(double qLo, double qHi) const
{
- assert(q_hi > q_lo);
- double p_lo = Quantile(q_lo);
- double p_hi = Quantile(q_hi, (int)p_lo);
- double sum_bin_freq = 0, cumul_freq = 0;
- for (double p_next = floor(p_lo) + 1.0; p_next <= ceil(p_hi);
- p_lo = p_next, p_next += 1.0) {
- int bin = floor(p_lo);
+ assert(qHi > qLo);
+ double pLo = quantile(qLo);
+ double pHi = quantile(qHi, (int)pLo);
+ double sumBinFreq = 0, cumulFreq = 0;
+ for (double pNext = floor(pLo) + 1.0; pNext <= ceil(pHi);
+ pLo = pNext, pNext += 1.0) {
+ int bin = floor(pLo);
double freq = (cumulative_[bin + 1] - cumulative_[bin]) *
- (std::min(p_next, p_hi) - p_lo);
- sum_bin_freq += bin * freq;
- cumul_freq += freq;
+ (std::min(pNext, pHi) - pLo);
+ sumBinFreq += bin * freq;
+ cumulFreq += freq;
}
// add 0.5 to give an average for bin mid-points
- return sum_bin_freq / cumul_freq + 0.5;
+ return sumBinFreq / cumulFreq + 0.5;
}
diff --git a/src/ipa/raspberrypi/controller/histogram.hpp b/src/ipa/raspberrypi/controller/histogram.hpp
index 90f5ac78..4ff5a56b 100644
--- a/src/ipa/raspberrypi/controller/histogram.hpp
+++ b/src/ipa/raspberrypi/controller/histogram.hpp
@@ -27,15 +27,15 @@ public:
cumulative_.push_back(cumulative_.back() +
histogram[i]);
}
- uint32_t Bins() const { return cumulative_.size() - 1; }
- uint64_t Total() const { return cumulative_[cumulative_.size() - 1]; }
+ uint32_t bins() const { return cumulative_.size() - 1; }
+ uint64_t total() const { return cumulative_[cumulative_.size() - 1]; }
// Cumulative frequency up to a (fractional) point in a bin.
- uint64_t CumulativeFreq(double bin) const;
+ uint64_t cumulativeFreq(double bin) const;
// Return the (fractional) bin of the point q (0 <= q <= 1) through the
// histogram. Optionally provide limits to help.
- double Quantile(double q, int first = -1, int last = -1) const;
+ double quantile(double q, int first = -1, int last = -1) const;
// Return the average histogram bin value between the two quantiles.
- double InterQuantileMean(double q_lo, double q_hi) const;
+ double interQuantileMean(double qLo, double qHi) const;
private:
std::vector<uint64_t> cumulative_;
diff --git a/src/ipa/raspberrypi/controller/metadata.hpp b/src/ipa/raspberrypi/controller/metadata.hpp
index 51e576cf..a79a67d4 100644
--- a/src/ipa/raspberrypi/controller/metadata.hpp
+++ b/src/ipa/raspberrypi/controller/metadata.hpp
@@ -22,26 +22,26 @@ public:
Metadata(Metadata const &other)
{
- std::scoped_lock other_lock(other.mutex_);
+ std::scoped_lock otherLock(other.mutex_);
data_ = other.data_;
}
Metadata(Metadata &&other)
{
- std::scoped_lock other_lock(other.mutex_);
+ std::scoped_lock otherLock(other.mutex_);
data_ = std::move(other.data_);
other.data_.clear();
}
template<typename T>
- void Set(std::string const &tag, T const &value)
+ void set(std::string const &tag, T const &value)
{
std::scoped_lock lock(mutex_);
data_[tag] = value;
}
template<typename T>
- int Get(std::string const &tag, T &value) const
+ int get(std::string const &tag, T &value) const
{
std::scoped_lock lock(mutex_);
auto it = data_.find(tag);
@@ -51,7 +51,7 @@ public:
return 0;
}
- void Clear()
+ void clear()
{
std::scoped_lock lock(mutex_);
data_.clear();
@@ -72,14 +72,14 @@ public:
return *this;
}
- void Merge(Metadata &other)
+ void merge(Metadata &other)
{
std::scoped_lock lock(mutex_, other.mutex_);
data_.merge(other.data_);
}
template<typename T>
- T *GetLocked(std::string const &tag)
+ T *getLocked(std::string const &tag)
{
// This allows in-place access to the Metadata contents,
// for which you should be holding the lock.
@@ -90,7 +90,7 @@ public:
}
template<typename T>
- void SetLocked(std::string const &tag, T const &value)
+ void setLocked(std::string const &tag, T const &value)
{
// Use this only if you're holding the lock yourself.
data_[tag] = value;
diff --git a/src/ipa/raspberrypi/controller/noise_status.h b/src/ipa/raspberrypi/controller/noise_status.h
index 8439a402..358af4fe 100644
--- a/src/ipa/raspberrypi/controller/noise_status.h
+++ b/src/ipa/raspberrypi/controller/noise_status.h
@@ -13,8 +13,8 @@ extern "C" {
#endif
struct NoiseStatus {
- double noise_constant;
- double noise_slope;
+ double noiseConstant;
+ double noiseSlope;
};
#ifdef __cplusplus
diff --git a/src/ipa/raspberrypi/controller/pwl.cpp b/src/ipa/raspberrypi/controller/pwl.cpp
index 130c820b..24ff3ea3 100644
--- a/src/ipa/raspberrypi/controller/pwl.cpp
+++ b/src/ipa/raspberrypi/controller/pwl.cpp
@@ -12,7 +12,7 @@
using namespace RPiController;
-void Pwl::Read(boost::property_tree::ptree const &params)
+void Pwl::read(boost::property_tree::ptree const &params)
{
for (auto it = params.begin(); it != params.end(); it++) {
double x = it->second.get_value<double>();
@@ -24,24 +24,24 @@ void Pwl::Read(boost::property_tree::ptree const &params)
assert(points_.size() >= 2);
}
-void Pwl::Append(double x, double y, const double eps)
+void Pwl::append(double x, double y, const double eps)
{
if (points_.empty() || points_.back().x + eps < x)
points_.push_back(Point(x, y));
}
-void Pwl::Prepend(double x, double y, const double eps)
+void Pwl::prepend(double x, double y, const double eps)
{
if (points_.empty() || points_.front().x - eps > x)
points_.insert(points_.begin(), Point(x, y));
}
-Pwl::Interval Pwl::Domain() const
+Pwl::Interval Pwl::domain() const
{
return Interval(points_[0].x, points_[points_.size() - 1].x);
}
-Pwl::Interval Pwl::Range() const
+Pwl::Interval Pwl::range() const
{
double lo = points_[0].y, hi = lo;
for (auto &p : points_)
@@ -49,18 +49,16 @@ Pwl::Interval Pwl::Range() const
return Interval(lo, hi);
}
-bool Pwl::Empty() const
+bool Pwl::empty() const
{
return points_.empty();
}
-double Pwl::Eval(double x, int *span_ptr, bool update_span) const
+double Pwl::eval(double x, int *spanPtr, bool updateSpan) const
{
- int span = findSpan(x, span_ptr && *span_ptr != -1
- ? *span_ptr
- : points_.size() / 2 - 1);
- if (span_ptr && update_span)
- *span_ptr = span;
+ int span = findSpan(x, spanPtr && *spanPtr != -1 ? *spanPtr : points_.size() / 2 - 1);
+ if (spanPtr && updateSpan)
+ *spanPtr = span;
return points_[span].y +
(x - points_[span].x) * (points_[span + 1].y - points_[span].y) /
(points_[span + 1].x - points_[span].x);
@@ -70,31 +68,31 @@ int Pwl::findSpan(double x, int span) const
{
// Pwls are generally small, so linear search may well be faster than
// binary, though could review this if large PWls start turning up.
- int last_span = points_.size() - 2;
+ int lastSpan = points_.size() - 2;
// some algorithms may call us with span pointing directly at the last
// control point
- span = std::max(0, std::min(last_span, span));
- while (span < last_span && x >= points_[span + 1].x)
+ span = std::max(0, std::min(lastSpan, span));
+ while (span < lastSpan && x >= points_[span + 1].x)
span++;
while (span && x < points_[span].x)
span--;
return span;
}
-Pwl::PerpType Pwl::Invert(Point const &xy, Point &perp, int &span,
+Pwl::PerpType Pwl::invert(Point const &xy, Point &perp, int &span,
const double eps) const
{
assert(span >= -1);
- bool prev_off_end = false;
+ bool prevOffEnd = false;
for (span = span + 1; span < (int)points_.size() - 1; span++) {
- Point span_vec = points_[span + 1] - points_[span];
- double t = ((xy - points_[span]) % span_vec) / span_vec.Len2();
+ Point spanVec = points_[span + 1] - points_[span];
+ double t = ((xy - points_[span]) % spanVec) / spanVec.len2();
if (t < -eps) // off the start of this span
{
if (span == 0) {
perp = points_[span];
return PerpType::Start;
- } else if (prev_off_end) {
+ } else if (prevOffEnd) {
perp = points_[span];
return PerpType::Vertex;
}
@@ -104,32 +102,32 @@ Pwl::PerpType Pwl::Invert(Point const &xy, Point &perp, int &span,
perp = points_[span + 1];
return PerpType::End;
}
- prev_off_end = true;
+ prevOffEnd = true;
} else // a true perpendicular
{
- perp = points_[span] + span_vec * t;
+ perp = points_[span] + spanVec * t;
return PerpType::Perpendicular;
}
}
return PerpType::None;
}
-Pwl Pwl::Inverse(bool *true_inverse, const double eps) const
+Pwl Pwl::inverse(bool *trueInverse, const double eps) const
{
bool appended = false, prepended = false, neither = false;
Pwl inverse;
for (Point const &p : points_) {
- if (inverse.Empty())
- inverse.Append(p.y, p.x, eps);
+ if (inverse.empty())
+ inverse.append(p.y, p.x, eps);
else if (std::abs(inverse.points_.back().x - p.y) <= eps ||
std::abs(inverse.points_.front().x - p.y) <= eps)
/* do nothing */;
else if (p.y > inverse.points_.back().x) {
- inverse.Append(p.y, p.x, eps);
+ inverse.append(p.y, p.x, eps);
appended = true;
} else if (p.y < inverse.points_.front().x) {
- inverse.Prepend(p.y, p.x, eps);
+ inverse.prepend(p.y, p.x, eps);
prepended = true;
} else
neither = true;
@@ -138,63 +136,65 @@ Pwl Pwl::Inverse(bool *true_inverse, const double eps) const
// This is not a proper inverse if we found ourselves putting points
// onto both ends of the inverse, or if there were points that couldn't
// go on either.
- if (true_inverse)
- *true_inverse = !(neither || (appended && prepended));
+ if (trueInverse)
+ *trueInverse = !(neither || (appended && prepended));
return inverse;
}
-Pwl Pwl::Compose(Pwl const &other, const double eps) const
+Pwl Pwl::compose(Pwl const &other, const double eps) const
{
- double this_x = points_[0].x, this_y = points_[0].y;
- int this_span = 0, other_span = other.findSpan(this_y, 0);
- Pwl result({ { this_x, other.Eval(this_y, &other_span, false) } });
- while (this_span != (int)points_.size() - 1) {
- double dx = points_[this_span + 1].x - points_[this_span].x,
- dy = points_[this_span + 1].y - points_[this_span].y;
+ double thisX = points_[0].x, thisY = points_[0].y;
+ int thisSpan = 0, otherSpan = other.findSpan(thisY, 0);
+ Pwl result({ { thisX, other.eval(thisY, &otherSpan, false) } });
+ while (thisSpan != (int)points_.size() - 1) {
+ double dx = points_[thisSpan + 1].x - points_[thisSpan].x,
+ dy = points_[thisSpan + 1].y - points_[thisSpan].y;
if (abs(dy) > eps &&
- other_span + 1 < (int)other.points_.size() &&
- points_[this_span + 1].y >=
- other.points_[other_span + 1].x + eps) {
+ otherSpan + 1 < (int)other.points_.size() &&
+ points_[thisSpan + 1].y >=
+ other.points_[otherSpan + 1].x + eps) {
// next control point in result will be where this
// function's y reaches the next span in other
- this_x = points_[this_span].x +
- (other.points_[other_span + 1].x -
- points_[this_span].y) * dx / dy;
- this_y = other.points_[++other_span].x;
- } else if (abs(dy) > eps && other_span > 0 &&
- points_[this_span + 1].y <=
- other.points_[other_span - 1].x - eps) {
+ thisX = points_[thisSpan].x +
+ (other.points_[otherSpan + 1].x -
+ points_[thisSpan].y) *
+ dx / dy;
+ thisY = other.points_[++otherSpan].x;
+ } else if (abs(dy) > eps && otherSpan > 0 &&
+ points_[thisSpan + 1].y <=
+ other.points_[otherSpan - 1].x - eps) {
// next control point in result will be where this
// function's y reaches the previous span in other
- this_x = points_[this_span].x +
- (other.points_[other_span + 1].x -
- points_[this_span].y) * dx / dy;
- this_y = other.points_[--other_span].x;
+ thisX = points_[thisSpan].x +
+ (other.points_[otherSpan + 1].x -
+ points_[thisSpan].y) *
+ dx / dy;
+ thisY = other.points_[--otherSpan].x;
} else {
// we stay in the same span in other
- this_span++;
- this_x = points_[this_span].x,
- this_y = points_[this_span].y;
+ thisSpan++;
+ thisX = points_[thisSpan].x,
+ thisY = points_[thisSpan].y;
}
- result.Append(this_x, other.Eval(this_y, &other_span, false),
+ result.append(thisX, other.eval(thisY, &otherSpan, false),
eps);
}
return result;
}
-void Pwl::Map(std::function<void(double x, double y)> f) const
+void Pwl::map(std::function<void(double x, double y)> f) const
{
for (auto &pt : points_)
f(pt.x, pt.y);
}
-void Pwl::Map2(Pwl const &pwl0, Pwl const &pwl1,
+void Pwl::map2(Pwl const &pwl0, Pwl const &pwl1,
std::function<void(double x, double y0, double y1)> f)
{
int span0 = 0, span1 = 0;
double x = std::min(pwl0.points_[0].x, pwl1.points_[0].x);
- f(x, pwl0.Eval(x, &span0, false), pwl1.Eval(x, &span1, false));
+ f(x, pwl0.eval(x, &span0, false), pwl1.eval(x, &span1, false));
while (span0 < (int)pwl0.points_.size() - 1 ||
span1 < (int)pwl1.points_.size() - 1) {
if (span0 == (int)pwl0.points_.size() - 1)
@@ -205,28 +205,28 @@ void Pwl::Map2(Pwl const &pwl0, Pwl const &pwl1,
x = pwl1.points_[++span1].x;
else
x = pwl0.points_[++span0].x;
- f(x, pwl0.Eval(x, &span0, false), pwl1.Eval(x, &span1, false));
+ f(x, pwl0.eval(x, &span0, false), pwl1.eval(x, &span1, false));
}
}
-Pwl Pwl::Combine(Pwl const &pwl0, Pwl const &pwl1,
+Pwl Pwl::combine(Pwl const &pwl0, Pwl const &pwl1,
std::function<double(double x, double y0, double y1)> f,
const double eps)
{
Pwl result;
- Map2(pwl0, pwl1, [&](double x, double y0, double y1) {
- result.Append(x, f(x, y0, y1), eps);
+ map2(pwl0, pwl1, [&](double x, double y0, double y1) {
+ result.append(x, f(x, y0, y1), eps);
});
return result;
}
-void Pwl::MatchDomain(Interval const &domain, bool clip, const double eps)
+void Pwl::matchDomain(Interval const &domain, bool clip, const double eps)
{
int span = 0;
- Prepend(domain.start, Eval(clip ? points_[0].x : domain.start, &span),
+ prepend(domain.start, eval(clip ? points_[0].x : domain.start, &span),
eps);
span = points_.size() - 2;
- Append(domain.end, Eval(clip ? points_.back().x : domain.end, &span),
+ append(domain.end, eval(clip ? points_.back().x : domain.end, &span),
eps);
}
@@ -237,7 +237,7 @@ Pwl &Pwl::operator*=(double d)
return *this;
}
-void Pwl::Debug(FILE *fp) const
+void Pwl::debug(FILE *fp) const
{
fprintf(fp, "Pwl {\n");
for (auto &p : points_)
diff --git a/src/ipa/raspberrypi/controller/pwl.hpp b/src/ipa/raspberrypi/controller/pwl.hpp
index 484672f6..4a38d1df 100644
--- a/src/ipa/raspberrypi/controller/pwl.hpp
+++ b/src/ipa/raspberrypi/controller/pwl.hpp
@@ -17,24 +17,26 @@ class Pwl
{
public:
struct Interval {
- Interval(double _start, double _end) : start(_start), end(_end)
+ Interval(double _start, double _end)
+ : start(_start), end(_end)
{
}
double start, end;
- bool Contains(double value)
+ bool contains(double value)
{
return value >= start && value <= end;
}
- double Clip(double value)
+ double clip(double value)
{
return value < start ? start
: (value > end ? end : value);
}
- double Len() const { return end - start; }
+ double len() const { return end - start; }
};
struct Point {
Point() : x(0), y(0) {}
- Point(double _x, double _y) : x(_x), y(_y) {}
+ Point(double _x, double _y)
+ : x(_x), y(_y) {}
double x, y;
Point operator-(Point const &p) const
{
@@ -50,23 +52,23 @@ public:
}
Point operator*(double f) const { return Point(x * f, y * f); }
Point operator/(double f) const { return Point(x / f, y / f); }
- double Len2() const { return x * x + y * y; }
- double Len() const { return sqrt(Len2()); }
+ double len2() const { return x * x + y * y; }
+ double len() const { return sqrt(len2()); }
};
Pwl() {}
Pwl(std::vector<Point> const &points) : points_(points) {}
- void Read(boost::property_tree::ptree const &params);
- void Append(double x, double y, const double eps = 1e-6);
- void Prepend(double x, double y, const double eps = 1e-6);
- Interval Domain() const;
- Interval Range() const;
- bool Empty() const;
+ void read(boost::property_tree::ptree const &params);
+ void append(double x, double y, const double eps = 1e-6);
+ void prepend(double x, double y, const double eps = 1e-6);
+ Interval domain() const;
+ Interval range() const;
+ bool empty() const;
// Evaluate Pwl, optionally supplying an initial guess for the
// "span". The "span" may be optionally be updated. If you want to know
// the "span" value but don't have an initial guess you can set it to
// -1.
- double Eval(double x, int *span_ptr = nullptr,
- bool update_span = true) const;
+ double eval(double x, int *spanPtr = nullptr,
+ bool updateSpan = true) const;
// Find perpendicular closest to xy, starting from span+1 so you can
// call it repeatedly to check for multiple closest points (set span to
// -1 on the first call). Also returns "pseudo" perpendiculars; see
@@ -78,31 +80,31 @@ public:
Vertex, // vertex of Pwl is closest point
Perpendicular // true perpendicular found
};
- PerpType Invert(Point const &xy, Point &perp, int &span,
+ PerpType invert(Point const &xy, Point &perp, int &span,
const double eps = 1e-6) const;
// Compute the inverse function. Indicate if it is a proper (true)
// inverse, or only a best effort (e.g. input was non-monotonic).
- Pwl Inverse(bool *true_inverse = nullptr, const double eps = 1e-6) const;
+ Pwl inverse(bool *trueInverse = nullptr, const double eps = 1e-6) const;
// Compose two Pwls together, doing "this" first and "other" after.
- Pwl Compose(Pwl const &other, const double eps = 1e-6) const;
+ Pwl compose(Pwl const &other, const double eps = 1e-6) const;
// Apply function to (x,y) values at every control point.
- void Map(std::function<void(double x, double y)> f) const;
+ void map(std::function<void(double x, double y)> f) const;
// Apply function to (x, y0, y1) values wherever either Pwl has a
// control point.
- static void Map2(Pwl const &pwl0, Pwl const &pwl1,
+ static void map2(Pwl const &pwl0, Pwl const &pwl1,
std::function<void(double x, double y0, double y1)> f);
// Combine two Pwls, meaning we create a new Pwl where the y values are
// given by running f wherever either has a knot.
static Pwl
- Combine(Pwl const &pwl0, Pwl const &pwl1,
+ combine(Pwl const &pwl0, Pwl const &pwl1,
std::function<double(double x, double y0, double y1)> f,
const double eps = 1e-6);
// Make "this" match (at least) the given domain. Any extension my be
// clipped or linear.
- void MatchDomain(Interval const &domain, bool clip = true,
+ void matchDomain(Interval const &domain, bool clip = true,
const double eps = 1e-6);
Pwl &operator*=(double d);
- void Debug(FILE *fp = stdout) const;
+ void debug(FILE *fp = stdout) const;
private:
int findSpan(double x, int span) const;
diff --git a/src/ipa/raspberrypi/controller/rpi/agc.cpp b/src/ipa/raspberrypi/controller/rpi/agc.cpp
index f6a9cb0a..52a41a55 100644
--- a/src/ipa/raspberrypi/controller/rpi/agc.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/agc.cpp
@@ -30,7 +30,7 @@ LOG_DEFINE_CATEGORY(RPiAgc)
#define PIPELINE_BITS 13 // seems to be a 13-bit pipeline
-void AgcMeteringMode::Read(boost::property_tree::ptree const &params)
+void AgcMeteringMode::read(boost::property_tree::ptree const &params)
{
int num = 0;
for (auto &p : params.get_child("weights")) {
@@ -43,265 +43,260 @@ void AgcMeteringMode::Read(boost::property_tree::ptree const &params)
}
static std::string
-read_metering_modes(std::map<std::string, AgcMeteringMode> &metering_modes,
- boost::property_tree::ptree const &params)
+readMeteringModes(std::map<std::string, AgcMeteringMode> &meteringModes,
+ boost::property_tree::ptree const &params)
{
std::string first;
for (auto &p : params) {
- AgcMeteringMode metering_mode;
- metering_mode.Read(p.second);
- metering_modes[p.first] = std::move(metering_mode);
+ AgcMeteringMode meteringMode;
+ meteringMode.read(p.second);
+ meteringModes[p.first] = std::move(meteringMode);
if (first.empty())
first = p.first;
}
return first;
}
-static int read_list(std::vector<double> &list,
- boost::property_tree::ptree const &params)
+static int readList(std::vector<double> &list,
+ boost::property_tree::ptree const &params)
{
for (auto &p : params)
list.push_back(p.second.get_value<double>());
return list.size();
}
-static int read_list(std::vector<Duration> &list,
- boost::property_tree::ptree const &params)
+static int readList(std::vector<Duration> &list,
+ boost::property_tree::ptree const &params)
{
for (auto &p : params)
list.push_back(p.second.get_value<double>() * 1us);
return list.size();
}
-void AgcExposureMode::Read(boost::property_tree::ptree const &params)
+void AgcExposureMode::read(boost::property_tree::ptree const &params)
{
- int num_shutters = read_list(shutter, params.get_child("shutter"));
- int num_ags = read_list(gain, params.get_child("gain"));
- if (num_shutters < 2 || num_ags < 2)
+ int numShutters = readList(shutter, params.get_child("shutter"));
+ int numAgs = readList(gain, params.get_child("gain"));
+ if (numShutters < 2 || numAgs < 2)
throw std::runtime_error(
"AgcConfig: must have at least two entries in exposure profile");
- if (num_shutters != num_ags)
+ if (numShutters != numAgs)
throw std::runtime_error(
"AgcConfig: expect same number of exposure and gain entries in exposure profile");
}
static std::string
-read_exposure_modes(std::map<std::string, AgcExposureMode> &exposure_modes,
- boost::property_tree::ptree const &params)
+readExposureModes(std::map<std::string, AgcExposureMode> &exposureModes,
+ boost::property_tree::ptree const &params)
{
std::string first;
for (auto &p : params) {
- AgcExposureMode exposure_mode;
- exposure_mode.Read(p.second);
- exposure_modes[p.first] = std::move(exposure_mode);
+ AgcExposureMode exposureMode;
+ exposureMode.read(p.second);
+ exposureModes[p.first] = std::move(exposureMode);
if (first.empty())
first = p.first;
}
return first;
}
-void AgcConstraint::Read(boost::property_tree::ptree const &params)
+void AgcConstraint::read(boost::property_tree::ptree const &params)
{
- std::string bound_string = params.get<std::string>("bound", "");
- transform(bound_string.begin(), bound_string.end(),
- bound_string.begin(), ::toupper);
- if (bound_string != "UPPER" && bound_string != "LOWER")
+ std::string boundString = params.get<std::string>("bound", "");
+ transform(boundString.begin(), boundString.end(),
+ boundString.begin(), ::toupper);
+ if (boundString != "UPPER" && boundString != "LOWER")
throw std::runtime_error(
"AGC constraint type should be UPPER or LOWER");
- bound = bound_string == "UPPER" ? Bound::UPPER : Bound::LOWER;
- q_lo = params.get<double>("q_lo");
- q_hi = params.get<double>("q_hi");
- Y_target.Read(params.get_child("y_target"));
+ bound = boundString == "UPPER" ? Bound::UPPER : Bound::LOWER;
+ qLo = params.get<double>("q_lo");
+ qHi = params.get<double>("q_hi");
+ yTarget.read(params.get_child("y_target"));
}
static AgcConstraintMode
-read_constraint_mode(boost::property_tree::ptree const &params)
+readConstraintMode(boost::property_tree::ptree const &params)
{
AgcConstraintMode mode;
for (auto &p : params) {
AgcConstraint constraint;
- constraint.Read(p.second);
+ constraint.read(p.second);
mode.push_back(std::move(constraint));
}
return mode;
}
-static std::string read_constraint_modes(
- std::map<std::string, AgcConstraintMode> &constraint_modes,
- boost::property_tree::ptree const &params)
+static std::string readConstraintModes(std::map<std::string, AgcConstraintMode> &constraintModes,
+ boost::property_tree::ptree const &params)
{
std::string first;
for (auto &p : params) {
- constraint_modes[p.first] = read_constraint_mode(p.second);
+ constraintModes[p.first] = readConstraintMode(p.second);
if (first.empty())
first = p.first;
}
return first;
}
-void AgcConfig::Read(boost::property_tree::ptree const &params)
+void AgcConfig::read(boost::property_tree::ptree const &params)
{
LOG(RPiAgc, Debug) << "AgcConfig";
- default_metering_mode = read_metering_modes(
- metering_modes, params.get_child("metering_modes"));
- default_exposure_mode = read_exposure_modes(
- exposure_modes, params.get_child("exposure_modes"));
- default_constraint_mode = read_constraint_modes(
- constraint_modes, params.get_child("constraint_modes"));
- Y_target.Read(params.get_child("y_target"));
+ defaultMeteringMode = readMeteringModes(meteringModes, params.get_child("metering_modes"));
+ defaultExposureMode = readExposureModes(exposureModes, params.get_child("exposure_modes"));
+ defaultConstraintMode = readConstraintModes(constraintModes, params.get_child("constraint_modes"));
+ yTarget.read(params.get_child("y_target"));
speed = params.get<double>("speed", 0.2);
- startup_frames = params.get<uint16_t>("startup_frames", 10);
- convergence_frames = params.get<unsigned int>("convergence_frames", 6);
- fast_reduce_threshold =
- params.get<double>("fast_reduce_threshold", 0.4);
- base_ev = params.get<double>("base_ev", 1.0);
+ startupFrames = params.get<uint16_t>("startup_frames", 10);
+ convergenceFrames = params.get<unsigned int>("convergence_frames", 6);
+ fastReduceThreshold = params.get<double>("fast_reduce_threshold", 0.4);
+ baseEv = params.get<double>("base_ev", 1.0);
// Start with quite a low value as ramping up is easier than ramping down.
- default_exposure_time = params.get<double>("default_exposure_time", 1000) * 1us;
- default_analogue_gain = params.get<double>("default_analogue_gain", 1.0);
+ defaultExposureTime = params.get<double>("default_exposure_time", 1000) * 1us;
+ defaultAnalogueGain = params.get<double>("default_analogueGain", 1.0);
}
Agc::ExposureValues::ExposureValues()
- : shutter(0s), analogue_gain(0),
- total_exposure(0s), total_exposure_no_dg(0s)
+ : shutter(0s), analogueGain(0),
+ totalExposure(0s), totalExposureNoDG(0s)
{
}
Agc::Agc(Controller *controller)
- : AgcAlgorithm(controller), metering_mode_(nullptr),
- exposure_mode_(nullptr), constraint_mode_(nullptr),
- frame_count_(0), lock_count_(0),
- last_target_exposure_(0s), last_sensitivity_(0.0),
- ev_(1.0), flicker_period_(0s),
- max_shutter_(0s), fixed_shutter_(0s), fixed_analogue_gain_(0.0)
+ : AgcAlgorithm(controller), meteringMode_(nullptr),
+ exposureMode_(nullptr), constraintMode_(nullptr),
+ frameCount_(0), lockCount_(0),
+ lastTargetExposure_(0s), lastSensitivity_(0.0),
+ ev_(1.0), flickerPeriod_(0s),
+ maxShutter_(0s), fixedShutter_(0s), fixedAnalogueGain_(0.0)
{
memset(&awb_, 0, sizeof(awb_));
- // Setting status_.total_exposure_value_ to zero initially tells us
+ // Setting status_.totalExposureValue_ to zero initially tells us
// it's not been calculated yet (i.e. Process hasn't yet run).
memset(&status_, 0, sizeof(status_));
status_.ev = ev_;
}
-char const *Agc::Name() const
+char const *Agc::name() const
{
return NAME;
}
-void Agc::Read(boost::property_tree::ptree const &params)
+void Agc::read(boost::property_tree::ptree const &params)
{
LOG(RPiAgc, Debug) << "Agc";
- config_.Read(params);
+ config_.read(params);
// Set the config's defaults (which are the first ones it read) as our
// current modes, until someone changes them. (they're all known to
// exist at this point)
- metering_mode_name_ = config_.default_metering_mode;
- metering_mode_ = &config_.metering_modes[metering_mode_name_];
- exposure_mode_name_ = config_.default_exposure_mode;
- exposure_mode_ = &config_.exposure_modes[exposure_mode_name_];
- constraint_mode_name_ = config_.default_constraint_mode;
- constraint_mode_ = &config_.constraint_modes[constraint_mode_name_];
+ meteringModeName_ = config_.defaultMeteringMode;
+ meteringMode_ = &config_.meteringModes[meteringModeName_];
+ exposureModeName_ = config_.defaultExposureMode;
+ exposureMode_ = &config_.exposureModes[exposureModeName_];
+ constraintModeName_ = config_.defaultConstraintMode;
+ constraintMode_ = &config_.constraintModes[constraintModeName_];
// Set up the "last shutter/gain" values, in case AGC starts "disabled".
- status_.shutter_time = config_.default_exposure_time;
- status_.analogue_gain = config_.default_analogue_gain;
+ status_.shutterTime = config_.defaultExposureTime;
+ status_.analogueGain = config_.defaultAnalogueGain;
}
-bool Agc::IsPaused() const
+bool Agc::isPaused() const
{
return false;
}
-void Agc::Pause()
+void Agc::pause()
{
- fixed_shutter_ = status_.shutter_time;
- fixed_analogue_gain_ = status_.analogue_gain;
+ fixedShutter_ = status_.shutterTime;
+ fixedAnalogueGain_ = status_.analogueGain;
}
-void Agc::Resume()
+void Agc::resume()
{
- fixed_shutter_ = 0s;
- fixed_analogue_gain_ = 0;
+ fixedShutter_ = 0s;
+ fixedAnalogueGain_ = 0;
}
-unsigned int Agc::GetConvergenceFrames() const
+unsigned int Agc::getConvergenceFrames() const
{
// If shutter and gain have been explicitly set, there is no
// convergence to happen, so no need to drop any frames - return zero.
- if (fixed_shutter_ && fixed_analogue_gain_)
+ if (fixedShutter_ && fixedAnalogueGain_)
return 0;
else
- return config_.convergence_frames;
+ return config_.convergenceFrames;
}
-void Agc::SetEv(double ev)
+void Agc::setEv(double ev)
{
ev_ = ev;
}
-void Agc::SetFlickerPeriod(Duration flicker_period)
+void Agc::setFlickerPeriod(Duration flickerPeriod)
{
- flicker_period_ = flicker_period;
+ flickerPeriod_ = flickerPeriod;
}
-void Agc::SetMaxShutter(Duration max_shutter)
+void Agc::setMaxShutter(Duration maxShutter)
{
- max_shutter_ = max_shutter;
+ maxShutter_ = maxShutter;
}
-void Agc::SetFixedShutter(Duration fixed_shutter)
+void Agc::setFixedShutter(Duration fixedShutter)
{
- fixed_shutter_ = fixed_shutter;
+ fixedShutter_ = fixedShutter;
// Set this in case someone calls Pause() straight after.
- status_.shutter_time = clipShutter(fixed_shutter_);
+ status_.shutterTime = clipShutter(fixedShutter_);
}
-void Agc::SetFixedAnalogueGain(double fixed_analogue_gain)
+void Agc::setFixedAnalogueGain(double fixedAnalogueGain)
{
- fixed_analogue_gain_ = fixed_analogue_gain;
+ fixedAnalogueGain_ = fixedAnalogueGain;
// Set this in case someone calls Pause() straight after.
- status_.analogue_gain = fixed_analogue_gain;
+ status_.analogueGain = fixedAnalogueGain;
}
-void Agc::SetMeteringMode(std::string const &metering_mode_name)
+void Agc::setMeteringMode(std::string const &meteringModeName)
{
- metering_mode_name_ = metering_mode_name;
+ meteringModeName_ = meteringModeName;
}
-void Agc::SetExposureMode(std::string const &exposure_mode_name)
+void Agc::setExposureMode(std::string const &exposureModeName)
{
- exposure_mode_name_ = exposure_mode_name;
+ exposureModeName_ = exposureModeName;
}
-void Agc::SetConstraintMode(std::string const &constraint_mode_name)
+void Agc::setConstraintMode(std::string const &constraintModeName)
{
- constraint_mode_name_ = constraint_mode_name;
+ constraintModeName_ = constraintModeName;
}
-void Agc::SwitchMode(CameraMode const &camera_mode,
+void Agc::switchMode(CameraMode const &cameraMode,
Metadata *metadata)
{
/* AGC expects the mode sensitivity always to be non-zero. */
- ASSERT(camera_mode.sensitivity);
+ ASSERT(cameraMode.sensitivity);
housekeepConfig();
- Duration fixed_shutter = clipShutter(fixed_shutter_);
- if (fixed_shutter && fixed_analogue_gain_) {
+ Duration fixedShutter = clipShutter(fixedShutter_);
+ if (fixedShutter && fixedAnalogueGain_) {
// We're going to reset the algorithm here with these fixed values.
fetchAwbStatus(metadata);
- double min_colour_gain = std::min({ awb_.gain_r, awb_.gain_g, awb_.gain_b, 1.0 });
- ASSERT(min_colour_gain != 0.0);
+ double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
+ ASSERT(minColourGain != 0.0);
// This is the equivalent of computeTargetExposure and applyDigitalGain.
- target_.total_exposure_no_dg = fixed_shutter * fixed_analogue_gain_;
- target_.total_exposure = target_.total_exposure_no_dg / min_colour_gain;
+ target_.totalExposureNoDG = fixedShutter_ * fixedAnalogueGain_;
+ target_.totalExposure = target_.totalExposureNoDG / minColourGain;
// Equivalent of filterExposure. This resets any "history".
filtered_ = target_;
// Equivalent of divideUpExposure.
- filtered_.shutter = fixed_shutter;
- filtered_.analogue_gain = fixed_analogue_gain_;
- } else if (status_.total_exposure_value) {
+ filtered_.shutter = fixedShutter;
+ filtered_.analogueGain = fixedAnalogueGain_;
+ } else if (status_.totalExposureValue) {
// On a mode switch, various things could happen:
// - the exposure profile might change
// - a fixed exposure or gain might be set
@@ -310,11 +305,11 @@ void Agc::SwitchMode(CameraMode const &camera_mode,
// that we just need to re-divide the exposure/gain according to the
// current exposure profile, which takes care of everything else.
- double ratio = last_sensitivity_ / camera_mode.sensitivity;
- target_.total_exposure_no_dg *= ratio;
- target_.total_exposure *= ratio;
- filtered_.total_exposure_no_dg *= ratio;
- filtered_.total_exposure *= ratio;
+ double ratio = lastSensitivity_ / cameraMode.sensitivity;
+ target_.totalExposureNoDG *= ratio;
+ target_.totalExposure *= ratio;
+ filtered_.totalExposureNoDG *= ratio;
+ filtered_.totalExposure *= ratio;
divideUpExposure();
} else {
@@ -324,114 +319,110 @@ void Agc::SwitchMode(CameraMode const &camera_mode,
// for any that weren't set.
// Equivalent of divideUpExposure.
- filtered_.shutter = fixed_shutter ? fixed_shutter : config_.default_exposure_time;
- filtered_.analogue_gain = fixed_analogue_gain_ ? fixed_analogue_gain_ : config_.default_analogue_gain;
+ filtered_.shutter = fixedShutter ? fixedShutter : config_.defaultExposureTime;
+ filtered_.analogueGain = fixedAnalogueGain_ ? fixedAnalogueGain_ : config_.defaultAnalogueGain;
}
writeAndFinish(metadata, false);
// We must remember the sensitivity of this mode for the next SwitchMode.
- last_sensitivity_ = camera_mode.sensitivity;
+ lastSensitivity_ = cameraMode.sensitivity;
}
-void Agc::Prepare(Metadata *image_metadata)
+void Agc::prepare(Metadata *imageMetadata)
{
- status_.digital_gain = 1.0;
- fetchAwbStatus(image_metadata); // always fetch it so that Process knows it's been done
+ status_.digitalGain = 1.0;
+ fetchAwbStatus(imageMetadata); // always fetch it so that Process knows it's been done
- if (status_.total_exposure_value) {
+ if (status_.totalExposureValue) {
// Process has run, so we have meaningful values.
- DeviceStatus device_status;
- if (image_metadata->Get("device.status", device_status) == 0) {
- Duration actual_exposure = device_status.shutter_speed *
- device_status.analogue_gain;
- if (actual_exposure) {
- status_.digital_gain =
- status_.total_exposure_value /
- actual_exposure;
- LOG(RPiAgc, Debug) << "Want total exposure " << status_.total_exposure_value;
+ DeviceStatus deviceStatus;
+ if (imageMetadata->get("device.status", deviceStatus) == 0) {
+ Duration actualExposure = deviceStatus.shutterSpeed *
+ deviceStatus.analogueGain;
+ if (actualExposure) {
+ status_.digitalGain = status_.totalExposureValue / actualExposure;
+ LOG(RPiAgc, Debug) << "Want total exposure " << status_.totalExposureValue;
// Never ask for a gain < 1.0, and also impose
// some upper limit. Make it customisable?
- status_.digital_gain = std::max(
- 1.0,
- std::min(status_.digital_gain, 4.0));
- LOG(RPiAgc, Debug) << "Actual exposure " << actual_exposure;
- LOG(RPiAgc, Debug) << "Use digital_gain " << status_.digital_gain;
+ status_.digitalGain = std::max(1.0, std::min(status_.digitalGain, 4.0));
+ LOG(RPiAgc, Debug) << "Actual exposure " << actualExposure;
+ LOG(RPiAgc, Debug) << "Use digitalGain " << status_.digitalGain;
LOG(RPiAgc, Debug) << "Effective exposure "
- << actual_exposure * status_.digital_gain;
+ << actualExposure * status_.digitalGain;
// Decide whether AEC/AGC has converged.
- updateLockStatus(device_status);
+ updateLockStatus(deviceStatus);
}
} else
- LOG(RPiAgc, Warning) << Name() << ": no device metadata";
- image_metadata->Set("agc.status", status_);
+ LOG(RPiAgc, Warning) << name() << ": no device metadata";
+ imageMetadata->set("agc.status", status_);
}
}
-void Agc::Process(StatisticsPtr &stats, Metadata *image_metadata)
+void Agc::process(StatisticsPtr &stats, Metadata *imageMetadata)
{
- frame_count_++;
+ frameCount_++;
// First a little bit of housekeeping, fetching up-to-date settings and
// configuration, that kind of thing.
housekeepConfig();
// Get the current exposure values for the frame that's just arrived.
- fetchCurrentExposure(image_metadata);
+ fetchCurrentExposure(imageMetadata);
// Compute the total gain we require relative to the current exposure.
- double gain, target_Y;
- computeGain(stats.get(), image_metadata, gain, target_Y);
+ double gain, targetY;
+ computeGain(stats.get(), imageMetadata, gain, targetY);
// Now compute the target (final) exposure which we think we want.
computeTargetExposure(gain);
// Some of the exposure has to be applied as digital gain, so work out
// what that is. This function also tells us whether it's decided to
// "desaturate" the image more quickly.
- bool desaturate = applyDigitalGain(gain, target_Y);
+ bool desaturate = applyDigitalGain(gain, targetY);
// The results have to be filtered so as not to change too rapidly.
filterExposure(desaturate);
// The last thing is to divide up the exposure value into a shutter time
- // and analogue_gain, according to the current exposure mode.
+ // and analogue gain, according to the current exposure mode.
divideUpExposure();
// Finally advertise what we've done.
- writeAndFinish(image_metadata, desaturate);
+ writeAndFinish(imageMetadata, desaturate);
}
-void Agc::updateLockStatus(DeviceStatus const &device_status)
+void Agc::updateLockStatus(DeviceStatus const &deviceStatus)
{
- const double ERROR_FACTOR = 0.10; // make these customisable?
- const int MAX_LOCK_COUNT = 5;
- // Reset "lock count" when we exceed this multiple of ERROR_FACTOR
- const double RESET_MARGIN = 1.5;
+ const double errorFactor = 0.10; // make these customisable?
+ const int maxLockCount = 5;
+ // Reset "lock count" when we exceed this multiple of errorFactor
+ const double resetMargin = 1.5;
// Add 200us to the exposure time error to allow for line quantisation.
- Duration exposure_error = last_device_status_.shutter_speed * ERROR_FACTOR + 200us;
- double gain_error = last_device_status_.analogue_gain * ERROR_FACTOR;
- Duration target_error = last_target_exposure_ * ERROR_FACTOR;
+ Duration exposureError = lastDeviceStatus_.shutterSpeed * errorFactor + 200us;
+ double gainError = lastDeviceStatus_.analogueGain * errorFactor;
+ Duration targetError = lastTargetExposure_ * errorFactor;
// Note that we don't know the exposure/gain limits of the sensor, so
// the values we keep requesting may be unachievable. For this reason
// we only insist that we're close to values in the past few frames.
- if (device_status.shutter_speed > last_device_status_.shutter_speed - exposure_error &&
- device_status.shutter_speed < last_device_status_.shutter_speed + exposure_error &&
- device_status.analogue_gain > last_device_status_.analogue_gain - gain_error &&
- device_status.analogue_gain < last_device_status_.analogue_gain + gain_error &&
- status_.target_exposure_value > last_target_exposure_ - target_error &&
- status_.target_exposure_value < last_target_exposure_ + target_error)
- lock_count_ = std::min(lock_count_ + 1, MAX_LOCK_COUNT);
- else if (device_status.shutter_speed < last_device_status_.shutter_speed - RESET_MARGIN * exposure_error ||
- device_status.shutter_speed > last_device_status_.shutter_speed + RESET_MARGIN * exposure_error ||
- device_status.analogue_gain < last_device_status_.analogue_gain - RESET_MARGIN * gain_error ||
- device_status.analogue_gain > last_device_status_.analogue_gain + RESET_MARGIN * gain_error ||
- status_.target_exposure_value < last_target_exposure_ - RESET_MARGIN * target_error ||
- status_.target_exposure_value > last_target_exposure_ + RESET_MARGIN * target_error)
- lock_count_ = 0;
-
- last_device_status_ = device_status;
- last_target_exposure_ = status_.target_exposure_value;
-
- LOG(RPiAgc, Debug) << "Lock count updated to " << lock_count_;
- status_.locked = lock_count_ == MAX_LOCK_COUNT;
-}
-
-static void copy_string(std::string const &s, char *d, size_t size)
+ if (deviceStatus.shutterSpeed > lastDeviceStatus_.shutterSpeed - exposureError &&
+ deviceStatus.shutterSpeed < lastDeviceStatus_.shutterSpeed + exposureError &&
+ deviceStatus.analogueGain > lastDeviceStatus_.analogueGain - gainError &&
+ deviceStatus.analogueGain < lastDeviceStatus_.analogueGain + gainError &&
+ status_.targetExposureValue > lastTargetExposure_ - targetError &&
+ status_.targetExposureValue < lastTargetExposure_ + targetError)
+ lockCount_ = std::min(lockCount_ + 1, maxLockCount);
+ else if (deviceStatus.shutterSpeed < lastDeviceStatus_.shutterSpeed - resetMargin * exposureError ||
+ deviceStatus.shutterSpeed > lastDeviceStatus_.shutterSpeed + resetMargin * exposureError ||
+ deviceStatus.analogueGain < lastDeviceStatus_.analogueGain - resetMargin * gainError ||
+ deviceStatus.analogueGain > lastDeviceStatus_.analogueGain + resetMargin * gainError ||
+ status_.targetExposureValue < lastTargetExposure_ - resetMargin * targetError ||
+ status_.targetExposureValue > lastTargetExposure_ + resetMargin * targetError)
+ lockCount_ = 0;
+
+ lastDeviceStatus_ = deviceStatus;
+ lastTargetExposure_ = status_.targetExposureValue;
+
+ LOG(RPiAgc, Debug) << "Lock count updated to " << lockCount_;
+ status_.locked = lockCount_ == maxLockCount;
+}
+
+static void copyString(std::string const &s, char *d, size_t size)
{
size_t length = s.copy(d, size - 1);
d[length] = '\0';
@@ -441,97 +432,97 @@ void Agc::housekeepConfig()
{
// First fetch all the up-to-date settings, so no one else has to do it.
status_.ev = ev_;
- status_.fixed_shutter = clipShutter(fixed_shutter_);
- status_.fixed_analogue_gain = fixed_analogue_gain_;
- status_.flicker_period = flicker_period_;
- LOG(RPiAgc, Debug) << "ev " << status_.ev << " fixed_shutter "
- << status_.fixed_shutter << " fixed_analogue_gain "
- << status_.fixed_analogue_gain;
+ status_.fixedShutter = clipShutter(fixedShutter_);
+ status_.fixedAnalogueGain = fixedAnalogueGain_;
+ status_.flickerPeriod = flickerPeriod_;
+ LOG(RPiAgc, Debug) << "ev " << status_.ev << " fixedShutter "
+ << status_.fixedShutter << " fixedAnalogueGain "
+ << status_.fixedAnalogueGain;
// Make sure the "mode" pointers point to the up-to-date things, if
// they've changed.
- if (strcmp(metering_mode_name_.c_str(), status_.metering_mode)) {
- auto it = config_.metering_modes.find(metering_mode_name_);
- if (it == config_.metering_modes.end())
+ if (strcmp(meteringModeName_.c_str(), status_.meteringMode)) {
+ auto it = config_.meteringModes.find(meteringModeName_);
+ if (it == config_.meteringModes.end())
throw std::runtime_error("Agc: no metering mode " +
- metering_mode_name_);
- metering_mode_ = &it->second;
- copy_string(metering_mode_name_, status_.metering_mode,
- sizeof(status_.metering_mode));
+ meteringModeName_);
+ meteringMode_ = &it->second;
+ copyString(meteringModeName_, status_.meteringMode,
+ sizeof(status_.meteringMode));
}
- if (strcmp(exposure_mode_name_.c_str(), status_.exposure_mode)) {
- auto it = config_.exposure_modes.find(exposure_mode_name_);
- if (it == config_.exposure_modes.end())
+ if (strcmp(exposureModeName_.c_str(), status_.exposureMode)) {
+ auto it = config_.exposureModes.find(exposureModeName_);
+ if (it == config_.exposureModes.end())
throw std::runtime_error("Agc: no exposure profile " +
- exposure_mode_name_);
- exposure_mode_ = &it->second;
- copy_string(exposure_mode_name_, status_.exposure_mode,
- sizeof(status_.exposure_mode));
+ exposureModeName_);
+ exposureMode_ = &it->second;
+ copyString(exposureModeName_, status_.exposureMode,
+ sizeof(status_.exposureMode));
}
- if (strcmp(constraint_mode_name_.c_str(), status_.constraint_mode)) {
+ if (strcmp(constraintModeName_.c_str(), status_.constraintMode)) {
auto it =
- config_.constraint_modes.find(constraint_mode_name_);
- if (it == config_.constraint_modes.end())
+ config_.constraintModes.find(constraintModeName_);
+ if (it == config_.constraintModes.end())
throw std::runtime_error("Agc: no constraint list " +
- constraint_mode_name_);
- constraint_mode_ = &it->second;
- copy_string(constraint_mode_name_, status_.constraint_mode,
- sizeof(status_.constraint_mode));
+ constraintModeName_);
+ constraintMode_ = &it->second;
+ copyString(constraintModeName_, status_.constraintMode,
+ sizeof(status_.constraintMode));
}
- LOG(RPiAgc, Debug) << "exposure_mode "
- << exposure_mode_name_ << " constraint_mode "
- << constraint_mode_name_ << " metering_mode "
- << metering_mode_name_;
+ LOG(RPiAgc, Debug) << "exposureMode "
+ << exposureModeName_ << " constraintMode "
+ << constraintModeName_ << " meteringMode "
+ << meteringModeName_;
}
-void Agc::fetchCurrentExposure(Metadata *image_metadata)
+void Agc::fetchCurrentExposure(Metadata *imageMetadata)
{
- std::unique_lock<Metadata> lock(*image_metadata);
- DeviceStatus *device_status =
- image_metadata->GetLocked<DeviceStatus>("device.status");
- if (!device_status)
+ std::unique_lock<Metadata> lock(*imageMetadata);
+ DeviceStatus *deviceStatus =
+ imageMetadata->getLocked<DeviceStatus>("device.status");
+ if (!deviceStatus)
throw std::runtime_error("Agc: no device metadata");
- current_.shutter = device_status->shutter_speed;
- current_.analogue_gain = device_status->analogue_gain;
- AgcStatus *agc_status =
- image_metadata->GetLocked<AgcStatus>("agc.status");
- current_.total_exposure = agc_status ? agc_status->total_exposure_value : 0s;
- current_.total_exposure_no_dg = current_.shutter * current_.analogue_gain;
+ current_.shutter = deviceStatus->shutterSpeed;
+ current_.analogueGain = deviceStatus->analogueGain;
+ AgcStatus *agcStatus =
+ imageMetadata->getLocked<AgcStatus>("agc.status");
+ current_.totalExposure = agcStatus ? agcStatus->totalExposureValue : 0s;
+ current_.totalExposureNoDG = current_.shutter * current_.analogueGain;
}
-void Agc::fetchAwbStatus(Metadata *image_metadata)
+void Agc::fetchAwbStatus(Metadata *imageMetadata)
{
- awb_.gain_r = 1.0; // in case not found in metadata
- awb_.gain_g = 1.0;
- awb_.gain_b = 1.0;
- if (image_metadata->Get("awb.status", awb_) != 0)
+ awb_.gainR = 1.0; // in case not found in metadata
+ awb_.gainG = 1.0;
+ awb_.gainB = 1.0;
+ if (imageMetadata->get("awb.status", awb_) != 0)
LOG(RPiAgc, Debug) << "Agc: no AWB status found";
}
-static double compute_initial_Y(bcm2835_isp_stats *stats, AwbStatus const &awb,
- double weights[], double gain)
+static double computeInitialY(bcm2835_isp_stats *stats, AwbStatus const &awb,
+ double weights[], double gain)
{
bcm2835_isp_stats_region *regions = stats->agc_stats;
// Note how the calculation below means that equal weights give you
// "average" metering (i.e. all pixels equally important).
- double R_sum = 0, G_sum = 0, B_sum = 0, pixel_sum = 0;
+ double rSum = 0, gSum = 0, bSum = 0, pixelSum = 0;
for (int i = 0; i < AGC_STATS_SIZE; i++) {
double counted = regions[i].counted;
- double r_sum = std::min(regions[i].r_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
- double g_sum = std::min(regions[i].g_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
- double b_sum = std::min(regions[i].b_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
- R_sum += r_sum * weights[i];
- G_sum += g_sum * weights[i];
- B_sum += b_sum * weights[i];
- pixel_sum += counted * weights[i];
+ double rAcc = std::min(regions[i].r_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
+ double gAcc = std::min(regions[i].g_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
+ double bAcc = std::min(regions[i].b_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
+ rSum += rAcc * weights[i];
+ gSum += gAcc * weights[i];
+ bSum += bAcc * weights[i];
+ pixelSum += counted * weights[i];
}
- if (pixel_sum == 0.0) {
- LOG(RPiAgc, Warning) << "compute_initial_Y: pixel_sum is zero";
+ if (pixelSum == 0.0) {
+ LOG(RPiAgc, Warning) << "computeInitialY: pixelSum is zero";
return 0;
}
- double Y_sum = R_sum * awb.gain_r * .299 +
- G_sum * awb.gain_g * .587 +
- B_sum * awb.gain_b * .114;
- return Y_sum / pixel_sum / (1 << PIPELINE_BITS);
+ double ySum = rSum * awb.gainR * .299 +
+ gSum * awb.gainG * .587 +
+ bSum * awb.gainB * .114;
+ return ySum / pixelSum / (1 << PIPELINE_BITS);
}
// We handle extra gain through EV by adjusting our Y targets. However, you
@@ -542,108 +533,102 @@ static double compute_initial_Y(bcm2835_isp_stats *stats, AwbStatus const &awb,
#define EV_GAIN_Y_TARGET_LIMIT 0.9
-static double constraint_compute_gain(AgcConstraint &c, Histogram &h,
- double lux, double ev_gain,
- double &target_Y)
+static double constraintComputeGain(AgcConstraint &c, Histogram &h, double lux,
+ double evGain, double &targetY)
{
- target_Y = c.Y_target.Eval(c.Y_target.Domain().Clip(lux));
- target_Y = std::min(EV_GAIN_Y_TARGET_LIMIT, target_Y * ev_gain);
- double iqm = h.InterQuantileMean(c.q_lo, c.q_hi);
- return (target_Y * NUM_HISTOGRAM_BINS) / iqm;
+ targetY = c.yTarget.eval(c.yTarget.domain().clip(lux));
+ targetY = std::min(EV_GAIN_Y_TARGET_LIMIT, targetY * evGain);
+ double iqm = h.interQuantileMean(c.qLo, c.qHi);
+ return (targetY * NUM_HISTOGRAM_BINS) / iqm;
}
-void Agc::computeGain(bcm2835_isp_stats *statistics, Metadata *image_metadata,
- double &gain, double &target_Y)
+void Agc::computeGain(bcm2835_isp_stats *statistics, Metadata *imageMetadata,
+ double &gain, double &targetY)
{
struct LuxStatus lux = {};
lux.lux = 400; // default lux level to 400 in case no metadata found
- if (image_metadata->Get("lux.status", lux) != 0)
+ if (imageMetadata->get("lux.status", lux) != 0)
LOG(RPiAgc, Warning) << "Agc: no lux level found";
Histogram h(statistics->hist[0].g_hist, NUM_HISTOGRAM_BINS);
- double ev_gain = status_.ev * config_.base_ev;
+ double evGain = status_.ev * config_.baseEv;
// The initial gain and target_Y come from some of the regions. After
// that we consider the histogram constraints.
- target_Y =
- config_.Y_target.Eval(config_.Y_target.Domain().Clip(lux.lux));
- target_Y = std::min(EV_GAIN_Y_TARGET_LIMIT, target_Y * ev_gain);
+ targetY = config_.yTarget.eval(config_.yTarget.domain().clip(lux.lux));
+ targetY = std::min(EV_GAIN_Y_TARGET_LIMIT, targetY * evGain);
// Do this calculation a few times as brightness increase can be
// non-linear when there are saturated regions.
gain = 1.0;
for (int i = 0; i < 8; i++) {
- double initial_Y = compute_initial_Y(statistics, awb_,
- metering_mode_->weights, gain);
- double extra_gain = std::min(10.0, target_Y / (initial_Y + .001));
- gain *= extra_gain;
- LOG(RPiAgc, Debug) << "Initial Y " << initial_Y << " target " << target_Y
+ double initialY = computeInitialY(statistics, awb_, meteringMode_->weights, gain);
+ double extraGain = std::min(10.0, targetY / (initialY + .001));
+ gain *= extraGain;
+ LOG(RPiAgc, Debug) << "Initial Y " << initialY << " target " << targetY
<< " gives gain " << gain;
- if (extra_gain < 1.01) // close enough
+ if (extraGain < 1.01) // close enough
break;
}
- for (auto &c : *constraint_mode_) {
- double new_target_Y;
- double new_gain =
- constraint_compute_gain(c, h, lux.lux, ev_gain,
- new_target_Y);
+ for (auto &c : *constraintMode_) {
+ double newTargetY;
+ double newGain = constraintComputeGain(c, h, lux.lux, evGain, newTargetY);
LOG(RPiAgc, Debug) << "Constraint has target_Y "
- << new_target_Y << " giving gain " << new_gain;
- if (c.bound == AgcConstraint::Bound::LOWER &&
- new_gain > gain) {
+ << newTargetY << " giving gain " << newGain;
+ if (c.bound == AgcConstraint::Bound::LOWER && newGain > gain) {
LOG(RPiAgc, Debug) << "Lower bound constraint adopted";
- gain = new_gain, target_Y = new_target_Y;
- } else if (c.bound == AgcConstraint::Bound::UPPER &&
- new_gain < gain) {
+ gain = newGain;
+ targetY = newTargetY;
+ } else if (c.bound == AgcConstraint::Bound::UPPER && newGain < gain) {
LOG(RPiAgc, Debug) << "Upper bound constraint adopted";
- gain = new_gain, target_Y = new_target_Y;
+ gain = newGain;
+ targetY = newTargetY;
}
}
- LOG(RPiAgc, Debug) << "Final gain " << gain << " (target_Y " << target_Y << " ev "
- << status_.ev << " base_ev " << config_.base_ev
+ LOG(RPiAgc, Debug) << "Final gain " << gain << " (target_Y " << targetY << " ev "
+ << status_.ev << " base_ev " << config_.baseEv
<< ")";
}
void Agc::computeTargetExposure(double gain)
{
- if (status_.fixed_shutter && status_.fixed_analogue_gain) {
+ if (status_.fixedShutter && status_.fixedAnalogueGain) {
// When ag and shutter are both fixed, we need to drive the
// total exposure so that we end up with a digital gain of at least
- // 1/min_colour_gain. Otherwise we'd desaturate channels causing
+ // 1/minColourGain. Otherwise we'd desaturate channels causing
// white to go cyan or magenta.
- double min_colour_gain = std::min({ awb_.gain_r, awb_.gain_g, awb_.gain_b, 1.0 });
- ASSERT(min_colour_gain != 0.0);
- target_.total_exposure =
- status_.fixed_shutter * status_.fixed_analogue_gain / min_colour_gain;
+ double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
+ ASSERT(minColourGain != 0.0);
+ target_.totalExposure =
+ status_.fixedShutter * status_.fixedAnalogueGain / minColourGain;
} else {
// The statistics reflect the image without digital gain, so the final
// total exposure we're aiming for is:
- target_.total_exposure = current_.total_exposure_no_dg * gain;
+ target_.totalExposure = current_.totalExposureNoDG * gain;
// The final target exposure is also limited to what the exposure
// mode allows.
- Duration max_shutter = status_.fixed_shutter
- ? status_.fixed_shutter
- : exposure_mode_->shutter.back();
- max_shutter = clipShutter(max_shutter);
- Duration max_total_exposure =
- max_shutter *
- (status_.fixed_analogue_gain != 0.0
- ? status_.fixed_analogue_gain
- : exposure_mode_->gain.back());
- target_.total_exposure = std::min(target_.total_exposure,
- max_total_exposure);
+ Duration maxShutter = status_.fixedShutter
+ ? status_.fixedShutter
+ : exposureMode_->shutter.back();
+ maxShutter = clipShutter(maxShutter);
+ Duration maxTotalExposure =
+ maxShutter *
+ (status_.fixedAnalogueGain != 0.0
+ ? status_.fixedAnalogueGain
+ : exposureMode_->gain.back());
+ target_.totalExposure = std::min(target_.totalExposure, maxTotalExposure);
}
- LOG(RPiAgc, Debug) << "Target total_exposure " << target_.total_exposure;
+ LOG(RPiAgc, Debug) << "Target totalExposure " << target_.totalExposure;
}
-bool Agc::applyDigitalGain(double gain, double target_Y)
+bool Agc::applyDigitalGain(double gain, double targetY)
{
- double min_colour_gain = std::min({ awb_.gain_r, awb_.gain_g, awb_.gain_b, 1.0 });
- ASSERT(min_colour_gain != 0.0);
- double dg = 1.0 / min_colour_gain;
+ double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
+ ASSERT(minColourGain != 0.0);
+ double dg = 1.0 / minColourGain;
// I think this pipeline subtracts black level and rescales before we
// get the stats, so no need to worry about it.
LOG(RPiAgc, Debug) << "after AWB, target dg " << dg << " gain " << gain
- << " target_Y " << target_Y;
+ << " target_Y " << targetY;
// Finally, if we're trying to reduce exposure but the target_Y is
// "close" to 1.0, then the gain computed for that constraint will be
// only slightly less than one, because the measured Y can never be
@@ -651,13 +636,13 @@ bool Agc::applyDigitalGain(double gain, double target_Y)
// that the exposure can be reduced, de-saturating the image much more
// quickly (and we then approach the correct value more quickly from
// below).
- bool desaturate = target_Y > config_.fast_reduce_threshold &&
- gain < sqrt(target_Y);
+ bool desaturate = targetY > config_.fastReduceThreshold &&
+ gain < sqrt(targetY);
if (desaturate)
- dg /= config_.fast_reduce_threshold;
+ dg /= config_.fastReduceThreshold;
LOG(RPiAgc, Debug) << "Digital gain " << dg << " desaturate? " << desaturate;
- target_.total_exposure_no_dg = target_.total_exposure / dg;
- LOG(RPiAgc, Debug) << "Target total_exposure_no_dg " << target_.total_exposure_no_dg;
+ target_.totalExposureNoDG = target_.totalExposure / dg;
+ LOG(RPiAgc, Debug) << "Target totalExposureNoDG " << target_.totalExposureNoDG;
return desaturate;
}
@@ -666,39 +651,38 @@ void Agc::filterExposure(bool desaturate)
double speed = config_.speed;
// AGC adapts instantly if both shutter and gain are directly specified
// or we're in the startup phase.
- if ((status_.fixed_shutter && status_.fixed_analogue_gain) ||
- frame_count_ <= config_.startup_frames)
+ if ((status_.fixedShutter && status_.fixedAnalogueGain) ||
+ frameCount_ <= config_.startupFrames)
speed = 1.0;
- if (!filtered_.total_exposure) {
- filtered_.total_exposure = target_.total_exposure;
- filtered_.total_exposure_no_dg = target_.total_exposure_no_dg;
+ if (!filtered_.totalExposure) {
+ filtered_.totalExposure = target_.totalExposure;
+ filtered_.totalExposureNoDG = target_.totalExposureNoDG;
} else {
// If close to the result go faster, to save making so many
// micro-adjustments on the way. (Make this customisable?)
- if (filtered_.total_exposure < 1.2 * target_.total_exposure &&
- filtered_.total_exposure > 0.8 * target_.total_exposure)
+ if (filtered_.totalExposure < 1.2 * target_.totalExposure &&
+ filtered_.totalExposure > 0.8 * target_.totalExposure)
speed = sqrt(speed);
- filtered_.total_exposure = speed * target_.total_exposure +
- filtered_.total_exposure * (1.0 - speed);
- // When desaturing, take a big jump down in exposure_no_dg,
+ filtered_.totalExposure = speed * target_.totalExposure +
+ filtered_.totalExposure * (1.0 - speed);
+ // When desaturing, take a big jump down in totalExposureNoDG,
// which we'll hide with digital gain.
if (desaturate)
- filtered_.total_exposure_no_dg =
- target_.total_exposure_no_dg;
+ filtered_.totalExposureNoDG =
+ target_.totalExposureNoDG;
else
- filtered_.total_exposure_no_dg =
- speed * target_.total_exposure_no_dg +
- filtered_.total_exposure_no_dg * (1.0 - speed);
+ filtered_.totalExposureNoDG =
+ speed * target_.totalExposureNoDG +
+ filtered_.totalExposureNoDG * (1.0 - speed);
}
- // We can't let the no_dg exposure deviate too far below the
+ // We can't let the totalExposureNoDG exposure deviate too far below the
// total exposure, as there might not be enough digital gain available
// in the ISP to hide it (which will cause nasty oscillation).
- if (filtered_.total_exposure_no_dg <
- filtered_.total_exposure * config_.fast_reduce_threshold)
- filtered_.total_exposure_no_dg = filtered_.total_exposure *
- config_.fast_reduce_threshold;
- LOG(RPiAgc, Debug) << "After filtering, total_exposure " << filtered_.total_exposure
- << " no dg " << filtered_.total_exposure_no_dg;
+ if (filtered_.totalExposureNoDG <
+ filtered_.totalExposure * config_.fastReduceThreshold)
+ filtered_.totalExposureNoDG = filtered_.totalExposure * config_.fastReduceThreshold;
+ LOG(RPiAgc, Debug) << "After filtering, totalExposure " << filtered_.totalExposure
+ << " no dg " << filtered_.totalExposureNoDG;
}
void Agc::divideUpExposure()
@@ -706,92 +690,84 @@ void Agc::divideUpExposure()
// Sending the fixed shutter/gain cases through the same code may seem
// unnecessary, but it will make more sense when extend this to cover
// variable aperture.
- Duration exposure_value = filtered_.total_exposure_no_dg;
- Duration shutter_time;
- double analogue_gain;
- shutter_time = status_.fixed_shutter
- ? status_.fixed_shutter
- : exposure_mode_->shutter[0];
- shutter_time = clipShutter(shutter_time);
- analogue_gain = status_.fixed_analogue_gain != 0.0
- ? status_.fixed_analogue_gain
- : exposure_mode_->gain[0];
- if (shutter_time * analogue_gain < exposure_value) {
+ Duration exposureValue = filtered_.totalExposureNoDG;
+ Duration shutterTime;
+ double analogueGain;
+ shutterTime = status_.fixedShutter ? status_.fixedShutter
+ : exposureMode_->shutter[0];
+ shutterTime = clipShutter(shutterTime);
+ analogueGain = status_.fixedAnalogueGain != 0.0 ? status_.fixedAnalogueGain
+ : exposureMode_->gain[0];
+ if (shutterTime * analogueGain < exposureValue) {
for (unsigned int stage = 1;
- stage < exposure_mode_->gain.size(); stage++) {
- if (!status_.fixed_shutter) {
- Duration stage_shutter =
- clipShutter(exposure_mode_->shutter[stage]);
- if (stage_shutter * analogue_gain >=
- exposure_value) {
- shutter_time =
- exposure_value / analogue_gain;
+ stage < exposureMode_->gain.size(); stage++) {
+ if (!status_.fixedShutter) {
+ Duration stageShutter =
+ clipShutter(exposureMode_->shutter[stage]);
+ if (stageShutter * analogueGain >= exposureValue) {
+ shutterTime = exposureValue / analogueGain;
break;
}
- shutter_time = stage_shutter;
+ shutterTime = stageShutter;
}
- if (status_.fixed_analogue_gain == 0.0) {
- if (exposure_mode_->gain[stage] *
- shutter_time >=
- exposure_value) {
- analogue_gain =
- exposure_value / shutter_time;
+ if (status_.fixedAnalogueGain == 0.0) {
+ if (exposureMode_->gain[stage] * shutterTime >= exposureValue) {
+ analogueGain = exposureValue / shutterTime;
break;
}
- analogue_gain = exposure_mode_->gain[stage];
+ analogueGain = exposureMode_->gain[stage];
}
}
}
- LOG(RPiAgc, Debug) << "Divided up shutter and gain are " << shutter_time << " and "
- << analogue_gain;
+ LOG(RPiAgc, Debug) << "Divided up shutter and gain are " << shutterTime << " and "
+ << analogueGain;
// Finally adjust shutter time for flicker avoidance (require both
// shutter and gain not to be fixed).
- if (!status_.fixed_shutter && !status_.fixed_analogue_gain &&
- status_.flicker_period) {
- int flicker_periods = shutter_time / status_.flicker_period;
- if (flicker_periods) {
- Duration new_shutter_time = flicker_periods * status_.flicker_period;
- analogue_gain *= shutter_time / new_shutter_time;
+ if (!status_.fixedShutter && !status_.fixedAnalogueGain &&
+ status_.flickerPeriod) {
+ int flickerPeriods = shutterTime / status_.flickerPeriod;
+ if (flickerPeriods) {
+ Duration newShutterTime = flickerPeriods * status_.flickerPeriod;
+ analogueGain *= shutterTime / newShutterTime;
// We should still not allow the ag to go over the
// largest value in the exposure mode. Note that this
// may force more of the total exposure into the digital
// gain as a side-effect.
- analogue_gain = std::min(analogue_gain,
- exposure_mode_->gain.back());
- shutter_time = new_shutter_time;
+ analogueGain = std::min(analogueGain, exposureMode_->gain.back());
+ shutterTime = newShutterTime;
}
LOG(RPiAgc, Debug) << "After flicker avoidance, shutter "
- << shutter_time << " gain " << analogue_gain;
+ << shutterTime << " gain " << analogueGain;
}
- filtered_.shutter = shutter_time;
- filtered_.analogue_gain = analogue_gain;
+ filtered_.shutter = shutterTime;
+ filtered_.analogueGain = analogueGain;
}
-void Agc::writeAndFinish(Metadata *image_metadata, bool desaturate)
+void Agc::writeAndFinish(Metadata *imageMetadata, bool desaturate)
{
- status_.total_exposure_value = filtered_.total_exposure;
- status_.target_exposure_value = desaturate ? 0s : target_.total_exposure_no_dg;
- status_.shutter_time = filtered_.shutter;
- status_.analogue_gain = filtered_.analogue_gain;
+ status_.totalExposureValue = filtered_.totalExposure;
+ status_.targetExposureValue = desaturate ? 0s : target_.totalExposureNoDG;
+ status_.shutterTime = filtered_.shutter;
+ status_.analogueGain = filtered_.analogueGain;
// Write to metadata as well, in case anyone wants to update the camera
// immediately.
- image_metadata->Set("agc.status", status_);
+ imageMetadata->set("agc.status", status_);
LOG(RPiAgc, Debug) << "Output written, total exposure requested is "
- << filtered_.total_exposure;
+ << filtered_.totalExposure;
LOG(RPiAgc, Debug) << "Camera exposure update: shutter time " << filtered_.shutter
- << " analogue gain " << filtered_.analogue_gain;
+ << " analogue gain " << filtered_.analogueGain;
}
Duration Agc::clipShutter(Duration shutter)
{
- if (max_shutter_)
- shutter = std::min(shutter, max_shutter_);
+ if (maxShutter_)
+ shutter = std::min(shutter, maxShutter_);
return shutter;
}
// Register algorithm with the system.
-static Algorithm *Create(Controller *controller)
+static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Agc(controller);
}
-static RegisterAlgorithm reg(NAME, &Create);
+static RegisterAlgorithm reg(NAME, &create);
diff --git a/src/ipa/raspberrypi/controller/rpi/agc.hpp b/src/ipa/raspberrypi/controller/rpi/agc.hpp
index c100d312..4ed7293b 100644
--- a/src/ipa/raspberrypi/controller/rpi/agc.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/agc.hpp
@@ -26,114 +26,114 @@ namespace RPiController {
struct AgcMeteringMode {
double weights[AGC_STATS_SIZE];
- void Read(boost::property_tree::ptree const &params);
+ void read(boost::property_tree::ptree const &params);
};
struct AgcExposureMode {
std::vector<libcamera::utils::Duration> shutter;
std::vector<double> gain;
- void Read(boost::property_tree::ptree const &params);
+ void read(boost::property_tree::ptree const &params);
};
struct AgcConstraint {
enum class Bound { LOWER = 0, UPPER = 1 };
Bound bound;
- double q_lo;
- double q_hi;
- Pwl Y_target;
- void Read(boost::property_tree::ptree const &params);
+ double qLo;
+ double qHi;
+ Pwl yTarget;
+ void read(boost::property_tree::ptree const &params);
};
typedef std::vector<AgcConstraint> AgcConstraintMode;
struct AgcConfig {
- void Read(boost::property_tree::ptree const &params);
- std::map<std::string, AgcMeteringMode> metering_modes;
- std::map<std::string, AgcExposureMode> exposure_modes;
- std::map<std::string, AgcConstraintMode> constraint_modes;
- Pwl Y_target;
+ void read(boost::property_tree::ptree const &params);
+ std::map<std::string, AgcMeteringMode> meteringModes;
+ std::map<std::string, AgcExposureMode> exposureModes;
+ std::map<std::string, AgcConstraintMode> constraintModes;
+ Pwl yTarget;
double speed;
- uint16_t startup_frames;
- unsigned int convergence_frames;
- double max_change;
- double min_change;
- double fast_reduce_threshold;
- double speed_up_threshold;
- std::string default_metering_mode;
- std::string default_exposure_mode;
- std::string default_constraint_mode;
- double base_ev;
- libcamera::utils::Duration default_exposure_time;
- double default_analogue_gain;
+ uint16_t startupFrames;
+ unsigned int convergenceFrames;
+ double maxChange;
+ double minChange;
+ double fastReduceThreshold;
+ double speedUpThreshold;
+ std::string defaultMeteringMode;
+ std::string defaultExposureMode;
+ std::string defaultConstraintMode;
+ double baseEv;
+ libcamera::utils::Duration defaultExposureTime;
+ double defaultAnalogueGain;
};
class Agc : public AgcAlgorithm
{
public:
Agc(Controller *controller);
- char const *Name() const override;
- void Read(boost::property_tree::ptree const &params) override;
+ char const *name() const override;
+ void read(boost::property_tree::ptree const &params) override;
// AGC handles "pausing" for itself.
- bool IsPaused() const override;
- void Pause() override;
- void Resume() override;
- unsigned int GetConvergenceFrames() const override;
- void SetEv(double ev) override;
- void SetFlickerPeriod(libcamera::utils::Duration flicker_period) override;
- void SetMaxShutter(libcamera::utils::Duration max_shutter) override;
- void SetFixedShutter(libcamera::utils::Duration fixed_shutter) override;
- void SetFixedAnalogueGain(double fixed_analogue_gain) override;
- void SetMeteringMode(std::string const &metering_mode_name) override;
- void SetExposureMode(std::string const &exposure_mode_name) override;
- void SetConstraintMode(std::string const &contraint_mode_name) override;
- void SwitchMode(CameraMode const &camera_mode, Metadata *metadata) override;
- void Prepare(Metadata *image_metadata) override;
- void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
+ bool isPaused() const override;
+ void pause() override;
+ void resume() override;
+ unsigned int getConvergenceFrames() const override;
+ void setEv(double ev) override;
+ void setFlickerPeriod(libcamera::utils::Duration flickerPeriod) override;
+ void setMaxShutter(libcamera::utils::Duration maxShutter) override;
+ void setFixedShutter(libcamera::utils::Duration fixedShutter) override;
+ void setFixedAnalogueGain(double fixedAnalogueGain) override;
+ void setMeteringMode(std::string const &meteringModeName) override;
+ void setExposureMode(std::string const &exposureModeName) override;
+ void setConstraintMode(std::string const &contraintModeName) override;
+ void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
+ void prepare(Metadata *imageMetadata) override;
+ void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
private:
- void updateLockStatus(DeviceStatus const &device_status);
+ void updateLockStatus(DeviceStatus const &deviceStatus);
AgcConfig config_;
void housekeepConfig();
- void fetchCurrentExposure(Metadata *image_metadata);
- void fetchAwbStatus(Metadata *image_metadata);
- void computeGain(bcm2835_isp_stats *statistics, Metadata *image_metadata,
- double &gain, double &target_Y);
+ void fetchCurrentExposure(Metadata *imageMetadata);
+ void fetchAwbStatus(Metadata *imageMetadata);
+ void computeGain(bcm2835_isp_stats *statistics, Metadata *imageMetadata,
+ double &gain, double &targetY);
void computeTargetExposure(double gain);
- bool applyDigitalGain(double gain, double target_Y);
+ bool applyDigitalGain(double gain, double targetY);
void filterExposure(bool desaturate);
void divideUpExposure();
- void writeAndFinish(Metadata *image_metadata, bool desaturate);
+ void writeAndFinish(Metadata *imageMetadata, bool desaturate);
libcamera::utils::Duration clipShutter(libcamera::utils::Duration shutter);
- AgcMeteringMode *metering_mode_;
- AgcExposureMode *exposure_mode_;
- AgcConstraintMode *constraint_mode_;
- uint64_t frame_count_;
+ AgcMeteringMode *meteringMode_;
+ AgcExposureMode *exposureMode_;
+ AgcConstraintMode *constraintMode_;
+ uint64_t frameCount_;
AwbStatus awb_;
struct ExposureValues {
ExposureValues();
libcamera::utils::Duration shutter;
- double analogue_gain;
- libcamera::utils::Duration total_exposure;
- libcamera::utils::Duration total_exposure_no_dg; // without digital gain
+ double analogueGain;
+ libcamera::utils::Duration totalExposure;
+ libcamera::utils::Duration totalExposureNoDG; // without digital gain
};
ExposureValues current_; // values for the current frame
ExposureValues target_; // calculate the values we want here
ExposureValues filtered_; // these values are filtered towards target
AgcStatus status_;
- int lock_count_;
- DeviceStatus last_device_status_;
- libcamera::utils::Duration last_target_exposure_;
- double last_sensitivity_; // sensitivity of the previous camera mode
+ int lockCount_;
+ DeviceStatus lastDeviceStatus_;
+ libcamera::utils::Duration lastTargetExposure_;
+ double lastSensitivity_; // sensitivity of the previous camera mode
// Below here the "settings" that applications can change.
- std::string metering_mode_name_;
- std::string exposure_mode_name_;
- std::string constraint_mode_name_;
+ std::string meteringModeName_;
+ std::string exposureModeName_;
+ std::string constraintModeName_;
double ev_;
- libcamera::utils::Duration flicker_period_;
- libcamera::utils::Duration max_shutter_;
- libcamera::utils::Duration fixed_shutter_;
- double fixed_analogue_gain_;
+ libcamera::utils::Duration flickerPeriod_;
+ libcamera::utils::Duration maxShutter_;
+ libcamera::utils::Duration fixedShutter_;
+ double fixedAnalogueGain_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/rpi/alsc.cpp b/src/ipa/raspberrypi/controller/rpi/alsc.cpp
index e575c14a..98b77154 100644
--- a/src/ipa/raspberrypi/controller/rpi/alsc.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/alsc.cpp
@@ -26,31 +26,31 @@ LOG_DEFINE_CATEGORY(RPiAlsc)
static const int X = ALSC_CELLS_X;
static const int Y = ALSC_CELLS_Y;
static const int XY = X * Y;
-static const double INSUFFICIENT_DATA = -1.0;
+static const double InsufficientData = -1.0;
Alsc::Alsc(Controller *controller)
: Algorithm(controller)
{
- async_abort_ = async_start_ = async_started_ = async_finished_ = false;
- async_thread_ = std::thread(std::bind(&Alsc::asyncFunc, this));
+ asyncAbort_ = asyncStart_ = asyncStarted_ = asyncFinished_ = false;
+ asyncThread_ = std::thread(std::bind(&Alsc::asyncFunc, this));
}
Alsc::~Alsc()
{
{
std::lock_guard<std::mutex> lock(mutex_);
- async_abort_ = true;
+ asyncAbort_ = true;
}
- async_signal_.notify_one();
- async_thread_.join();
+ asyncSignal_.notify_one();
+ asyncThread_.join();
}
-char const *Alsc::Name() const
+char const *Alsc::name() const
{
return NAME;
}
-static void generate_lut(double *lut, boost::property_tree::ptree const &params)
+static void generateLut(double *lut, boost::property_tree::ptree const &params)
{
double cstrength = params.get<double>("corner_strength", 2.0);
if (cstrength <= 1.0)
@@ -73,34 +73,34 @@ static void generate_lut(double *lut, boost::property_tree::ptree const &params)
}
}
-static void read_lut(double *lut, boost::property_tree::ptree const &params)
+static void readLut(double *lut, boost::property_tree::ptree const &params)
{
int num = 0;
- const int max_num = XY;
+ const int maxNum = XY;
for (auto &p : params) {
- if (num == max_num)
+ if (num == maxNum)
throw std::runtime_error(
"Alsc: too many entries in LSC table");
lut[num++] = p.second.get_value<double>();
}
- if (num < max_num)
+ if (num < maxNum)
throw std::runtime_error("Alsc: too few entries in LSC table");
}
-static void read_calibrations(std::vector<AlscCalibration> &calibrations,
- boost::property_tree::ptree const &params,
- std::string const &name)
+static void readCalibrations(std::vector<AlscCalibration> &calibrations,
+ boost::property_tree::ptree const &params,
+ std::string const &name)
{
if (params.get_child_optional(name)) {
- double last_ct = 0;
+ double lastCt = 0;
for (auto &p : params.get_child(name)) {
double ct = p.second.get<double>("ct");
- if (ct <= last_ct)
+ if (ct <= lastCt)
throw std::runtime_error(
"Alsc: entries in " + name +
" must be in increasing ct order");
AlscCalibration calibration;
- calibration.ct = last_ct = ct;
+ calibration.ct = lastCt = ct;
boost::property_tree::ptree const &table =
p.second.get_child("table");
int num = 0;
@@ -124,249 +124,239 @@ static void read_calibrations(std::vector<AlscCalibration> &calibrations,
}
}
-void Alsc::Read(boost::property_tree::ptree const &params)
+void Alsc::read(boost::property_tree::ptree const &params)
{
- config_.frame_period = params.get<uint16_t>("frame_period", 12);
- config_.startup_frames = params.get<uint16_t>("startup_frames", 10);
+ config_.framePeriod = params.get<uint16_t>("frame_period", 12);
+ config_.startupFrames = params.get<uint16_t>("startup_frames", 10);
config_.speed = params.get<double>("speed", 0.05);
double sigma = params.get<double>("sigma", 0.01);
- config_.sigma_Cr = params.get<double>("sigma_Cr", sigma);
- config_.sigma_Cb = params.get<double>("sigma_Cb", sigma);
- config_.min_count = params.get<double>("min_count", 10.0);
- config_.min_G = params.get<uint16_t>("min_G", 50);
+ config_.sigmaCr = params.get<double>("sigma_Cr", sigma);
+ config_.sigmaCb = params.get<double>("sigma_Cb", sigma);
+ config_.minCount = params.get<double>("min_count", 10.0);
+ config_.minG = params.get<uint16_t>("min_G", 50);
config_.omega = params.get<double>("omega", 1.3);
- config_.n_iter = params.get<uint32_t>("n_iter", X + Y);
- config_.luminance_strength =
+ config_.nIter = params.get<uint32_t>("n_iter", X + Y);
+ config_.luminanceStrength =
params.get<double>("luminance_strength", 1.0);
for (int i = 0; i < XY; i++)
- config_.luminance_lut[i] = 1.0;
+ config_.luminanceLut[i] = 1.0;
if (params.get_child_optional("corner_strength"))
- generate_lut(config_.luminance_lut, params);
+ generateLut(config_.luminanceLut, params);
else if (params.get_child_optional("luminance_lut"))
- read_lut(config_.luminance_lut,
- params.get_child("luminance_lut"));
+ readLut(config_.luminanceLut,
+ params.get_child("luminance_lut"));
else
LOG(RPiAlsc, Warning)
<< "no luminance table - assume unity everywhere";
- read_calibrations(config_.calibrations_Cr, params, "calibrations_Cr");
- read_calibrations(config_.calibrations_Cb, params, "calibrations_Cb");
- config_.default_ct = params.get<double>("default_ct", 4500.0);
+ readCalibrations(config_.calibrationsCr, params, "calibrations_Cr");
+ readCalibrations(config_.calibrationsCb, params, "calibrations_Cb");
+ config_.defaultCt = params.get<double>("default_ct", 4500.0);
config_.threshold = params.get<double>("threshold", 1e-3);
- config_.lambda_bound = params.get<double>("lambda_bound", 0.05);
-}
-
-static double get_ct(Metadata *metadata, double default_ct);
-static void get_cal_table(double ct,
- std::vector<AlscCalibration> const &calibrations,
- double cal_table[XY]);
-static void resample_cal_table(double const cal_table_in[XY],
- CameraMode const &camera_mode,
- double cal_table_out[XY]);
-static void compensate_lambdas_for_cal(double const cal_table[XY],
- double const old_lambdas[XY],
- double new_lambdas[XY]);
-static void add_luminance_to_tables(double results[3][Y][X],
- double const lambda_r[XY], double lambda_g,
- double const lambda_b[XY],
- double const luminance_lut[XY],
- double luminance_strength);
-
-void Alsc::Initialise()
-{
- frame_count2_ = frame_count_ = frame_phase_ = 0;
- first_time_ = true;
- ct_ = config_.default_ct;
+ config_.lambdaBound = params.get<double>("lambda_bound", 0.05);
+}
+
+static double getCt(Metadata *metadata, double defaultCt);
+static void getCalTable(double ct, std::vector<AlscCalibration> const &calibrations,
+ double calTable[XY]);
+static void resampleCalTable(double const calTableIn[XY], CameraMode const &cameraMode,
+ double calTableOut[XY]);
+static void compensateLambdasForCal(double const calTable[XY], double const oldLambdas[XY],
+ double newLambdas[XY]);
+static void addLuminanceToTables(double results[3][Y][X], double const lambdaR[XY], double lambdaG,
+ double const lambdaB[XY], double const luminanceLut[XY],
+ double luminanceStrength);
+
+void Alsc::initialise()
+{
+ frameCount2_ = frameCount_ = framePhase_ = 0;
+ firstTime_ = true;
+ ct_ = config_.defaultCt;
// The lambdas are initialised in the SwitchMode.
}
void Alsc::waitForAysncThread()
{
- if (async_started_) {
- async_started_ = false;
+ if (asyncStarted_) {
+ asyncStarted_ = false;
std::unique_lock<std::mutex> lock(mutex_);
- sync_signal_.wait(lock, [&] {
- return async_finished_;
+ syncSignal_.wait(lock, [&] {
+ return asyncFinished_;
});
- async_finished_ = false;
+ asyncFinished_ = false;
}
}
-static bool compare_modes(CameraMode const &cm0, CameraMode const &cm1)
+static bool compareModes(CameraMode const &cm0, CameraMode const &cm1)
{
// Return true if the modes crop from the sensor significantly differently,
// or if the user transform has changed.
if (cm0.transform != cm1.transform)
return true;
- int left_diff = abs(cm0.crop_x - cm1.crop_x);
- int top_diff = abs(cm0.crop_y - cm1.crop_y);
- int right_diff = fabs(cm0.crop_x + cm0.scale_x * cm0.width -
- cm1.crop_x - cm1.scale_x * cm1.width);
- int bottom_diff = fabs(cm0.crop_y + cm0.scale_y * cm0.height -
- cm1.crop_y - cm1.scale_y * cm1.height);
+ int leftDiff = abs(cm0.cropX - cm1.cropX);
+ int topDiff = abs(cm0.cropY - cm1.cropY);
+ int rightDiff = fabs(cm0.cropX + cm0.scaleX * cm0.width -
+ cm1.cropX - cm1.scaleX * cm1.width);
+ int bottomDiff = fabs(cm0.cropY + cm0.scaleY * cm0.height -
+ cm1.cropY - cm1.scaleY * cm1.height);
// These thresholds are a rather arbitrary amount chosen to trigger
// when carrying on with the previously calculated tables might be
// worse than regenerating them (but without the adaptive algorithm).
- int threshold_x = cm0.sensor_width >> 4;
- int threshold_y = cm0.sensor_height >> 4;
- return left_diff > threshold_x || right_diff > threshold_x ||
- top_diff > threshold_y || bottom_diff > threshold_y;
+ int thresholdX = cm0.sensorWidth >> 4;
+ int thresholdY = cm0.sensorHeight >> 4;
+ return leftDiff > thresholdX || rightDiff > thresholdX ||
+ topDiff > thresholdY || bottomDiff > thresholdY;
}
-void Alsc::SwitchMode(CameraMode const &camera_mode,
+void Alsc::switchMode(CameraMode const &cameraMode,
[[maybe_unused]] Metadata *metadata)
{
// We're going to start over with the tables if there's any "significant"
// change.
- bool reset_tables = first_time_ || compare_modes(camera_mode_, camera_mode);
+ bool resetTables = firstTime_ || compareModes(cameraMode_, cameraMode);
// Believe the colour temperature from the AWB, if there is one.
- ct_ = get_ct(metadata, ct_);
+ ct_ = getCt(metadata, ct_);
// Ensure the other thread isn't running while we do this.
waitForAysncThread();
- camera_mode_ = camera_mode;
+ cameraMode_ = cameraMode;
// We must resample the luminance table like we do the others, but it's
// fixed so we can simply do it up front here.
- resample_cal_table(config_.luminance_lut, camera_mode_, luminance_table_);
+ resampleCalTable(config_.luminanceLut, cameraMode_, luminanceTable_);
- if (reset_tables) {
+ if (resetTables) {
// Upon every "table reset", arrange for something sensible to be
// generated. Construct the tables for the previous recorded colour
// temperature. In order to start over from scratch we initialise
// the lambdas, but the rest of this code then echoes the code in
// doAlsc, without the adaptive algorithm.
for (int i = 0; i < XY; i++)
- lambda_r_[i] = lambda_b_[i] = 1.0;
- double cal_table_r[XY], cal_table_b[XY], cal_table_tmp[XY];
- get_cal_table(ct_, config_.calibrations_Cr, cal_table_tmp);
- resample_cal_table(cal_table_tmp, camera_mode_, cal_table_r);
- get_cal_table(ct_, config_.calibrations_Cb, cal_table_tmp);
- resample_cal_table(cal_table_tmp, camera_mode_, cal_table_b);
- compensate_lambdas_for_cal(cal_table_r, lambda_r_,
- async_lambda_r_);
- compensate_lambdas_for_cal(cal_table_b, lambda_b_,
- async_lambda_b_);
- add_luminance_to_tables(sync_results_, async_lambda_r_, 1.0,
- async_lambda_b_, luminance_table_,
- config_.luminance_strength);
- memcpy(prev_sync_results_, sync_results_,
- sizeof(prev_sync_results_));
- frame_phase_ = config_.frame_period; // run the algo again asap
- first_time_ = false;
+ lambdaR_[i] = lambdaB_[i] = 1.0;
+ double calTableR[XY], calTableB[XY], calTableTmp[XY];
+ getCalTable(ct_, config_.calibrationsCr, calTableTmp);
+ resampleCalTable(calTableTmp, cameraMode_, calTableR);
+ getCalTable(ct_, config_.calibrationsCb, calTableTmp);
+ resampleCalTable(calTableTmp, cameraMode_, calTableB);
+ compensateLambdasForCal(calTableR, lambdaR_, asyncLambdaR_);
+ compensateLambdasForCal(calTableB, lambdaB_, asyncLambdaB_);
+ addLuminanceToTables(syncResults_, asyncLambdaR_, 1.0, asyncLambdaB_,
+ luminanceTable_, config_.luminanceStrength);
+ memcpy(prevSyncResults_, syncResults_, sizeof(prevSyncResults_));
+ framePhase_ = config_.framePeriod; // run the algo again asap
+ firstTime_ = false;
}
}
void Alsc::fetchAsyncResults()
{
LOG(RPiAlsc, Debug) << "Fetch ALSC results";
- async_finished_ = false;
- async_started_ = false;
- memcpy(sync_results_, async_results_, sizeof(sync_results_));
+ asyncFinished_ = false;
+ asyncStarted_ = false;
+ memcpy(syncResults_, asyncResults_, sizeof(syncResults_));
}
-double get_ct(Metadata *metadata, double default_ct)
+double getCt(Metadata *metadata, double defaultCt)
{
- AwbStatus awb_status;
- awb_status.temperature_K = default_ct; // in case nothing found
- if (metadata->Get("awb.status", awb_status) != 0)
+ AwbStatus awbStatus;
+ awbStatus.temperatureK = defaultCt; // in case nothing found
+ if (metadata->get("awb.status", awbStatus) != 0)
LOG(RPiAlsc, Debug) << "no AWB results found, using "
- << awb_status.temperature_K;
+ << awbStatus.temperatureK;
else
LOG(RPiAlsc, Debug) << "AWB results found, using "
- << awb_status.temperature_K;
- return awb_status.temperature_K;
+ << awbStatus.temperatureK;
+ return awbStatus.temperatureK;
}
-static void copy_stats(bcm2835_isp_stats_region regions[XY], StatisticsPtr &stats,
- AlscStatus const &status)
+static void copyStats(bcm2835_isp_stats_region regions[XY], StatisticsPtr &stats,
+ AlscStatus const &status)
{
- bcm2835_isp_stats_region *input_regions = stats->awb_stats;
- double *r_table = (double *)status.r;
- double *g_table = (double *)status.g;
- double *b_table = (double *)status.b;
+ bcm2835_isp_stats_region *inputRegions = stats->awb_stats;
+ double *rTable = (double *)status.r;
+ double *gTable = (double *)status.g;
+ double *bTable = (double *)status.b;
for (int i = 0; i < XY; i++) {
- regions[i].r_sum = input_regions[i].r_sum / r_table[i];
- regions[i].g_sum = input_regions[i].g_sum / g_table[i];
- regions[i].b_sum = input_regions[i].b_sum / b_table[i];
- regions[i].counted = input_regions[i].counted;
+ regions[i].r_sum = inputRegions[i].r_sum / rTable[i];
+ regions[i].g_sum = inputRegions[i].g_sum / gTable[i];
+ regions[i].b_sum = inputRegions[i].b_sum / bTable[i];
+ regions[i].counted = inputRegions[i].counted;
// (don't care about the uncounted value)
}
}
-void Alsc::restartAsync(StatisticsPtr &stats, Metadata *image_metadata)
+void Alsc::restartAsync(StatisticsPtr &stats, Metadata *imageMetadata)
{
LOG(RPiAlsc, Debug) << "Starting ALSC calculation";
// Get the current colour temperature. It's all we need from the
// metadata. Default to the last CT value (which could be the default).
- ct_ = get_ct(image_metadata, ct_);
+ ct_ = getCt(imageMetadata, ct_);
// We have to copy the statistics here, dividing out our best guess of
// the LSC table that the pipeline applied to them.
- AlscStatus alsc_status;
- if (image_metadata->Get("alsc.status", alsc_status) != 0) {
+ AlscStatus alscStatus;
+ if (imageMetadata->get("alsc.status", alscStatus) != 0) {
LOG(RPiAlsc, Warning)
<< "No ALSC status found for applied gains!";
for (int y = 0; y < Y; y++)
for (int x = 0; x < X; x++) {
- alsc_status.r[y][x] = 1.0;
- alsc_status.g[y][x] = 1.0;
- alsc_status.b[y][x] = 1.0;
+ alscStatus.r[y][x] = 1.0;
+ alscStatus.g[y][x] = 1.0;
+ alscStatus.b[y][x] = 1.0;
}
}
- copy_stats(statistics_, stats, alsc_status);
- frame_phase_ = 0;
- async_started_ = true;
+ copyStats(statistics_, stats, alscStatus);
+ framePhase_ = 0;
+ asyncStarted_ = true;
{
std::lock_guard<std::mutex> lock(mutex_);
- async_start_ = true;
+ asyncStart_ = true;
}
- async_signal_.notify_one();
+ asyncSignal_.notify_one();
}
-void Alsc::Prepare(Metadata *image_metadata)
+void Alsc::prepare(Metadata *imageMetadata)
{
// Count frames since we started, and since we last poked the async
// thread.
- if (frame_count_ < (int)config_.startup_frames)
- frame_count_++;
- double speed = frame_count_ < (int)config_.startup_frames
+ if (frameCount_ < (int)config_.startupFrames)
+ frameCount_++;
+ double speed = frameCount_ < (int)config_.startupFrames
? 1.0
: config_.speed;
LOG(RPiAlsc, Debug)
- << "frame_count " << frame_count_ << " speed " << speed;
+ << "frame count " << frameCount_ << " speed " << speed;
{
std::unique_lock<std::mutex> lock(mutex_);
- if (async_started_ && async_finished_)
+ if (asyncStarted_ && asyncFinished_)
fetchAsyncResults();
}
// Apply IIR filter to results and program into the pipeline.
- double *ptr = (double *)sync_results_,
- *pptr = (double *)prev_sync_results_;
- for (unsigned int i = 0;
- i < sizeof(sync_results_) / sizeof(double); i++)
+ double *ptr = (double *)syncResults_,
+ *pptr = (double *)prevSyncResults_;
+ for (unsigned int i = 0; i < sizeof(syncResults_) / sizeof(double); i++)
pptr[i] = speed * ptr[i] + (1.0 - speed) * pptr[i];
// Put output values into status metadata.
AlscStatus status;
- memcpy(status.r, prev_sync_results_[0], sizeof(status.r));
- memcpy(status.g, prev_sync_results_[1], sizeof(status.g));
- memcpy(status.b, prev_sync_results_[2], sizeof(status.b));
- image_metadata->Set("alsc.status", status);
+ memcpy(status.r, prevSyncResults_[0], sizeof(status.r));
+ memcpy(status.g, prevSyncResults_[1], sizeof(status.g));
+ memcpy(status.b, prevSyncResults_[2], sizeof(status.b));
+ imageMetadata->set("alsc.status", status);
}
-void Alsc::Process(StatisticsPtr &stats, Metadata *image_metadata)
+void Alsc::process(StatisticsPtr &stats, Metadata *imageMetadata)
{
// Count frames since we started, and since we last poked the async
// thread.
- if (frame_phase_ < (int)config_.frame_period)
- frame_phase_++;
- if (frame_count2_ < (int)config_.startup_frames)
- frame_count2_++;
- LOG(RPiAlsc, Debug) << "frame_phase " << frame_phase_;
- if (frame_phase_ >= (int)config_.frame_period ||
- frame_count2_ < (int)config_.startup_frames) {
- if (async_started_ == false)
- restartAsync(stats, image_metadata);
+ if (framePhase_ < (int)config_.framePeriod)
+ framePhase_++;
+ if (frameCount2_ < (int)config_.startupFrames)
+ frameCount2_++;
+ LOG(RPiAlsc, Debug) << "frame_phase " << framePhase_;
+ if (framePhase_ >= (int)config_.framePeriod ||
+ frameCount2_ < (int)config_.startupFrames) {
+ if (asyncStarted_ == false)
+ restartAsync(stats, imageMetadata);
}
}
@@ -375,143 +365,140 @@ void Alsc::asyncFunc()
while (true) {
{
std::unique_lock<std::mutex> lock(mutex_);
- async_signal_.wait(lock, [&] {
- return async_start_ || async_abort_;
+ asyncSignal_.wait(lock, [&] {
+ return asyncStart_ || asyncAbort_;
});
- async_start_ = false;
- if (async_abort_)
+ asyncStart_ = false;
+ if (asyncAbort_)
break;
}
doAlsc();
{
std::lock_guard<std::mutex> lock(mutex_);
- async_finished_ = true;
+ asyncFinished_ = true;
}
- sync_signal_.notify_one();
+ syncSignal_.notify_one();
}
}
-void get_cal_table(double ct, std::vector<AlscCalibration> const &calibrations,
- double cal_table[XY])
+void getCalTable(double ct, std::vector<AlscCalibration> const &calibrations,
+ double calTable[XY])
{
if (calibrations.empty()) {
for (int i = 0; i < XY; i++)
- cal_table[i] = 1.0;
+ calTable[i] = 1.0;
LOG(RPiAlsc, Debug) << "no calibrations found";
} else if (ct <= calibrations.front().ct) {
- memcpy(cal_table, calibrations.front().table,
- XY * sizeof(double));
+ memcpy(calTable, calibrations.front().table, XY * sizeof(double));
LOG(RPiAlsc, Debug) << "using calibration for "
<< calibrations.front().ct;
} else if (ct >= calibrations.back().ct) {
- memcpy(cal_table, calibrations.back().table,
- XY * sizeof(double));
+ memcpy(calTable, calibrations.back().table, XY * sizeof(double));
LOG(RPiAlsc, Debug) << "using calibration for "
<< calibrations.back().ct;
} else {
int idx = 0;
while (ct > calibrations[idx + 1].ct)
idx++;
- double ct0 = calibrations[idx].ct,
- ct1 = calibrations[idx + 1].ct;
+ double ct0 = calibrations[idx].ct, ct1 = calibrations[idx + 1].ct;
LOG(RPiAlsc, Debug)
<< "ct is " << ct << ", interpolating between "
<< ct0 << " and " << ct1;
for (int i = 0; i < XY; i++)
- cal_table[i] =
+ calTable[i] =
(calibrations[idx].table[i] * (ct1 - ct) +
calibrations[idx + 1].table[i] * (ct - ct0)) /
(ct1 - ct0);
}
}
-void resample_cal_table(double const cal_table_in[XY],
- CameraMode const &camera_mode, double cal_table_out[XY])
+void resampleCalTable(double const calTableIn[XY],
+ CameraMode const &cameraMode, double calTableOut[XY])
{
// Precalculate and cache the x sampling locations and phases to save
// recomputing them on every row.
- int x_lo[X], x_hi[X];
+ int xLo[X], xHi[X];
double xf[X];
- double scale_x = camera_mode.sensor_width /
- (camera_mode.width * camera_mode.scale_x);
- double x_off = camera_mode.crop_x / (double)camera_mode.sensor_width;
- double x = .5 / scale_x + x_off * X - .5;
- double x_inc = 1 / scale_x;
- for (int i = 0; i < X; i++, x += x_inc) {
- x_lo[i] = floor(x);
- xf[i] = x - x_lo[i];
- x_hi[i] = std::min(x_lo[i] + 1, X - 1);
- x_lo[i] = std::max(x_lo[i], 0);
- if (!!(camera_mode.transform & libcamera::Transform::HFlip)) {
- x_lo[i] = X - 1 - x_lo[i];
- x_hi[i] = X - 1 - x_hi[i];
+ double scaleX = cameraMode.sensorWidth /
+ (cameraMode.width * cameraMode.scaleX);
+ double xOff = cameraMode.cropX / (double)cameraMode.sensorWidth;
+ double x = .5 / scaleX + xOff * X - .5;
+ double xInc = 1 / scaleX;
+ for (int i = 0; i < X; i++, x += xInc) {
+ xLo[i] = floor(x);
+ xf[i] = x - xLo[i];
+ xHi[i] = std::min(xLo[i] + 1, X - 1);
+ xLo[i] = std::max(xLo[i], 0);
+ if (!!(cameraMode.transform & libcamera::Transform::HFlip)) {
+ xLo[i] = X - 1 - xLo[i];
+ xHi[i] = X - 1 - xHi[i];
}
}
// Now march over the output table generating the new values.
- double scale_y = camera_mode.sensor_height /
- (camera_mode.height * camera_mode.scale_y);
- double y_off = camera_mode.crop_y / (double)camera_mode.sensor_height;
- double y = .5 / scale_y + y_off * Y - .5;
- double y_inc = 1 / scale_y;
- for (int j = 0; j < Y; j++, y += y_inc) {
- int y_lo = floor(y);
- double yf = y - y_lo;
- int y_hi = std::min(y_lo + 1, Y - 1);
- y_lo = std::max(y_lo, 0);
- if (!!(camera_mode.transform & libcamera::Transform::VFlip)) {
- y_lo = Y - 1 - y_lo;
- y_hi = Y - 1 - y_hi;
+ double scaleY = cameraMode.sensorHeight /
+ (cameraMode.height * cameraMode.scaleY);
+ double yOff = cameraMode.cropY / (double)cameraMode.sensorHeight;
+ double y = .5 / scaleY + yOff * Y - .5;
+ double yInc = 1 / scaleY;
+ for (int j = 0; j < Y; j++, y += yInc) {
+ int yLo = floor(y);
+ double yf = y - yLo;
+ int yHi = std::min(yLo + 1, Y - 1);
+ yLo = std::max(yLo, 0);
+ if (!!(cameraMode.transform & libcamera::Transform::VFlip)) {
+ yLo = Y - 1 - yLo;
+ yHi = Y - 1 - yHi;
}
- double const *row_above = cal_table_in + X * y_lo;
- double const *row_below = cal_table_in + X * y_hi;
+ double const *rowAbove = calTableIn + X * yLo;
+ double const *rowBelow = calTableIn + X * yHi;
for (int i = 0; i < X; i++) {
- double above = row_above[x_lo[i]] * (1 - xf[i]) +
- row_above[x_hi[i]] * xf[i];
- double below = row_below[x_lo[i]] * (1 - xf[i]) +
- row_below[x_hi[i]] * xf[i];
- *(cal_table_out++) = above * (1 - yf) + below * yf;
+ double above = rowAbove[xLo[i]] * (1 - xf[i]) +
+ rowAbove[xHi[i]] * xf[i];
+ double below = rowBelow[xLo[i]] * (1 - xf[i]) +
+ rowBelow[xHi[i]] * xf[i];
+ *(calTableOut++) = above * (1 - yf) + below * yf;
}
}
}
// Calculate chrominance statistics (R/G and B/G) for each region.
static_assert(XY == AWB_REGIONS, "ALSC/AWB statistics region mismatch");
-static void calculate_Cr_Cb(bcm2835_isp_stats_region *awb_region, double Cr[XY],
- double Cb[XY], uint32_t min_count, uint16_t min_G)
+static void calculateCrCb(bcm2835_isp_stats_region *awbRegion, double cr[XY],
+ double cb[XY], uint32_t minCount, uint16_t minG)
{
for (int i = 0; i < XY; i++) {
- bcm2835_isp_stats_region &zone = awb_region[i];
- if (zone.counted <= min_count ||
- zone.g_sum / zone.counted <= min_G) {
- Cr[i] = Cb[i] = INSUFFICIENT_DATA;
+ bcm2835_isp_stats_region &zone = awbRegion[i];
+ if (zone.counted <= minCount ||
+ zone.g_sum / zone.counted <= minG) {
+ cr[i] = cb[i] = InsufficientData;
continue;
}
- Cr[i] = zone.r_sum / (double)zone.g_sum;
- Cb[i] = zone.b_sum / (double)zone.g_sum;
+ cr[i] = zone.r_sum / (double)zone.g_sum;
+ cb[i] = zone.b_sum / (double)zone.g_sum;
}
}
-static void apply_cal_table(double const cal_table[XY], double C[XY])
+static void applyCalTable(double const calTable[XY], double C[XY])
{
for (int i = 0; i < XY; i++)
- if (C[i] != INSUFFICIENT_DATA)
- C[i] *= cal_table[i];
+ if (C[i] != InsufficientData)
+ C[i] *= calTable[i];
}
-void compensate_lambdas_for_cal(double const cal_table[XY],
- double const old_lambdas[XY],
- double new_lambdas[XY])
+void compensateLambdasForCal(double const calTable[XY],
+ double const oldLambdas[XY],
+ double newLambdas[XY])
{
- double min_new_lambda = std::numeric_limits<double>::max();
+ double minNewLambda = std::numeric_limits<double>::max();
for (int i = 0; i < XY; i++) {
- new_lambdas[i] = old_lambdas[i] * cal_table[i];
- min_new_lambda = std::min(min_new_lambda, new_lambdas[i]);
+ newLambdas[i] = oldLambdas[i] * calTable[i];
+ minNewLambda = std::min(minNewLambda, newLambdas[i]);
}
for (int i = 0; i < XY; i++)
- new_lambdas[i] /= min_new_lambda;
+ newLambdas[i] /= minNewLambda;
}
-[[maybe_unused]] static void print_cal_table(double const C[XY])
+[[maybe_unused]] static void printCalTable(double const C[XY])
{
printf("table: [\n");
for (int j = 0; j < Y; j++) {
@@ -527,31 +514,29 @@ void compensate_lambdas_for_cal(double const cal_table[XY],
// Compute weight out of 1.0 which reflects how similar we wish to make the
// colours of these two regions.
-static double compute_weight(double C_i, double C_j, double sigma)
+static double computeWeight(double Ci, double Cj, double sigma)
{
- if (C_i == INSUFFICIENT_DATA || C_j == INSUFFICIENT_DATA)
+ if (Ci == InsufficientData || Cj == InsufficientData)
return 0;
- double diff = (C_i - C_j) / sigma;
+ double diff = (Ci - Cj) / sigma;
return exp(-diff * diff / 2);
}
// Compute all weights.
-static void compute_W(double const C[XY], double sigma, double W[XY][4])
+static void computeW(double const C[XY], double sigma, double W[XY][4])
{
for (int i = 0; i < XY; i++) {
// Start with neighbour above and go clockwise.
- W[i][0] = i >= X ? compute_weight(C[i], C[i - X], sigma) : 0;
- W[i][1] = i % X < X - 1 ? compute_weight(C[i], C[i + 1], sigma)
- : 0;
- W[i][2] =
- i < XY - X ? compute_weight(C[i], C[i + X], sigma) : 0;
- W[i][3] = i % X ? compute_weight(C[i], C[i - 1], sigma) : 0;
+ W[i][0] = i >= X ? computeWeight(C[i], C[i - X], sigma) : 0;
+ W[i][1] = i % X < X - 1 ? computeWeight(C[i], C[i + 1], sigma) : 0;
+ W[i][2] = i < XY - X ? computeWeight(C[i], C[i + X], sigma) : 0;
+ W[i][3] = i % X ? computeWeight(C[i], C[i - 1], sigma) : 0;
}
}
// Compute M, the large but sparse matrix such that M * lambdas = 0.
-static void construct_M(double const C[XY], double const W[XY][4],
- double M[XY][4])
+static void constructM(double const C[XY], double const W[XY][4],
+ double M[XY][4])
{
double epsilon = 0.001;
for (int i = 0; i < XY; i++) {
@@ -560,108 +545,96 @@ static void construct_M(double const C[XY], double const W[XY][4],
int m = !!(i >= X) + !!(i % X < X - 1) + !!(i < XY - X) +
!!(i % X); // total number of neighbours
// we'll divide the diagonal out straight away
- double diagonal =
- (epsilon + W[i][0] + W[i][1] + W[i][2] + W[i][3]) *
- C[i];
- M[i][0] = i >= X ? (W[i][0] * C[i - X] + epsilon / m * C[i]) /
- diagonal
- : 0;
- M[i][1] = i % X < X - 1
- ? (W[i][1] * C[i + 1] + epsilon / m * C[i]) /
- diagonal
- : 0;
- M[i][2] = i < XY - X
- ? (W[i][2] * C[i + X] + epsilon / m * C[i]) /
- diagonal
- : 0;
- M[i][3] = i % X ? (W[i][3] * C[i - 1] + epsilon / m * C[i]) /
- diagonal
- : 0;
+ double diagonal = (epsilon + W[i][0] + W[i][1] + W[i][2] + W[i][3]) * C[i];
+ M[i][0] = i >= X ? (W[i][0] * C[i - X] + epsilon / m * C[i]) / diagonal : 0;
+ M[i][1] = i % X < X - 1 ? (W[i][1] * C[i + 1] + epsilon / m * C[i]) / diagonal : 0;
+ M[i][2] = i < XY - X ? (W[i][2] * C[i + X] + epsilon / m * C[i]) / diagonal : 0;
+ M[i][3] = i % X ? (W[i][3] * C[i - 1] + epsilon / m * C[i]) / diagonal : 0;
}
}
// In the compute_lambda_ functions, note that the matrix coefficients for the
// left/right neighbours are zero down the left/right edges, so we don't need
// need to test the i value to exclude them.
-static double compute_lambda_bottom(int i, double const M[XY][4],
- double lambda[XY])
+static double computeLambdaBottom(int i, double const M[XY][4],
+ double lambda[XY])
{
return M[i][1] * lambda[i + 1] + M[i][2] * lambda[i + X] +
M[i][3] * lambda[i - 1];
}
-static double compute_lambda_bottom_start(int i, double const M[XY][4],
- double lambda[XY])
+static double computeLambdaBottomStart(int i, double const M[XY][4],
+ double lambda[XY])
{
return M[i][1] * lambda[i + 1] + M[i][2] * lambda[i + X];
}
-static double compute_lambda_interior(int i, double const M[XY][4],
- double lambda[XY])
+static double computeLambdaInterior(int i, double const M[XY][4],
+ double lambda[XY])
{
return M[i][0] * lambda[i - X] + M[i][1] * lambda[i + 1] +
M[i][2] * lambda[i + X] + M[i][3] * lambda[i - 1];
}
-static double compute_lambda_top(int i, double const M[XY][4],
- double lambda[XY])
+static double computeLambdaTop(int i, double const M[XY][4],
+ double lambda[XY])
{
return M[i][0] * lambda[i - X] + M[i][1] * lambda[i + 1] +
M[i][3] * lambda[i - 1];
}
-static double compute_lambda_top_end(int i, double const M[XY][4],
- double lambda[XY])
+static double computeLambdaTopEnd(int i, double const M[XY][4],
+ double lambda[XY])
{
return M[i][0] * lambda[i - X] + M[i][3] * lambda[i - 1];
}
// Gauss-Seidel iteration with over-relaxation.
-static double gauss_seidel2_SOR(double const M[XY][4], double omega,
- double lambda[XY], double lambda_bound)
+static double gaussSeidel2Sor(double const M[XY][4], double omega,
+ double lambda[XY], double lambdaBound)
{
- const double min = 1 - lambda_bound, max = 1 + lambda_bound;
- double old_lambda[XY];
+ const double min = 1 - lambdaBound, max = 1 + lambdaBound;
+ double oldLambda[XY];
int i;
for (i = 0; i < XY; i++)
- old_lambda[i] = lambda[i];
- lambda[0] = compute_lambda_bottom_start(0, M, lambda);
+ oldLambda[i] = lambda[i];
+ lambda[0] = computeLambdaBottomStart(0, M, lambda);
lambda[0] = std::clamp(lambda[0], min, max);
for (i = 1; i < X; i++) {
- lambda[i] = compute_lambda_bottom(i, M, lambda);
+ lambda[i] = computeLambdaBottom(i, M, lambda);
lambda[i] = std::clamp(lambda[i], min, max);
}
for (; i < XY - X; i++) {
- lambda[i] = compute_lambda_interior(i, M, lambda);
+ lambda[i] = computeLambdaInterior(i, M, lambda);
lambda[i] = std::clamp(lambda[i], min, max);
}
for (; i < XY - 1; i++) {
- lambda[i] = compute_lambda_top(i, M, lambda);
+ lambda[i] = computeLambdaTop(i, M, lambda);
lambda[i] = std::clamp(lambda[i], min, max);
}
- lambda[i] = compute_lambda_top_end(i, M, lambda);
+ lambda[i] = computeLambdaTopEnd(i, M, lambda);
lambda[i] = std::clamp(lambda[i], min, max);
// Also solve the system from bottom to top, to help spread the updates
// better.
- lambda[i] = compute_lambda_top_end(i, M, lambda);
+ lambda[i] = computeLambdaTopEnd(i, M, lambda);
lambda[i] = std::clamp(lambda[i], min, max);
for (i = XY - 2; i >= XY - X; i--) {
- lambda[i] = compute_lambda_top(i, M, lambda);
+ lambda[i] = computeLambdaTop(i, M, lambda);
lambda[i] = std::clamp(lambda[i], min, max);
}
for (; i >= X; i--) {
- lambda[i] = compute_lambda_interior(i, M, lambda);
+ lambda[i] = computeLambdaInterior(i, M, lambda);
lambda[i] = std::clamp(lambda[i], min, max);
}
for (; i >= 1; i--) {
- lambda[i] = compute_lambda_bottom(i, M, lambda);
+ lambda[i] = computeLambdaBottom(i, M, lambda);
lambda[i] = std::clamp(lambda[i], min, max);
}
- lambda[0] = compute_lambda_bottom_start(0, M, lambda);
+ lambda[0] = computeLambdaBottomStart(0, M, lambda);
lambda[0] = std::clamp(lambda[0], min, max);
- double max_diff = 0;
+ double maxDiff = 0;
for (i = 0; i < XY; i++) {
- lambda[i] = old_lambda[i] + (lambda[i] - old_lambda[i]) * omega;
- if (fabs(lambda[i] - old_lambda[i]) > fabs(max_diff))
- max_diff = lambda[i] - old_lambda[i];
+ lambda[i] = oldLambda[i] + (lambda[i] - oldLambda[i]) * omega;
+ if (fabs(lambda[i] - oldLambda[i]) > fabs(maxDiff))
+ maxDiff = lambda[i] - oldLambda[i];
}
- return max_diff;
+ return maxDiff;
}
// Normalise the values so that the smallest value is 1.
@@ -683,105 +656,99 @@ static void reaverage(Span<double> data)
d *= ratio;
}
-static void run_matrix_iterations(double const C[XY], double lambda[XY],
- double const W[XY][4], double omega,
- int n_iter, double threshold, double lambda_bound)
+static void runMatrixIterations(double const C[XY], double lambda[XY],
+ double const W[XY][4], double omega,
+ int nIter, double threshold, double lambdaBound)
{
double M[XY][4];
- construct_M(C, W, M);
- double last_max_diff = std::numeric_limits<double>::max();
- for (int i = 0; i < n_iter; i++) {
- double max_diff = fabs(gauss_seidel2_SOR(M, omega, lambda, lambda_bound));
- if (max_diff < threshold) {
+ constructM(C, W, M);
+ double lastMaxDiff = std::numeric_limits<double>::max();
+ for (int i = 0; i < nIter; i++) {
+ double maxDiff = fabs(gaussSeidel2Sor(M, omega, lambda, lambdaBound));
+ if (maxDiff < threshold) {
LOG(RPiAlsc, Debug)
<< "Stop after " << i + 1 << " iterations";
break;
}
// this happens very occasionally (so make a note), though
// doesn't seem to matter
- if (max_diff > last_max_diff)
+ if (maxDiff > lastMaxDiff)
LOG(RPiAlsc, Debug)
- << "Iteration " << i << ": max_diff gone up "
- << last_max_diff << " to " << max_diff;
- last_max_diff = max_diff;
+ << "Iteration " << i << ": maxDiff gone up "
+ << lastMaxDiff << " to " << maxDiff;
+ lastMaxDiff = maxDiff;
}
// We're going to normalise the lambdas so the total average is 1.
reaverage({ lambda, XY });
}
-static void add_luminance_rb(double result[XY], double const lambda[XY],
- double const luminance_lut[XY],
- double luminance_strength)
+static void addLuminanceRb(double result[XY], double const lambda[XY],
+ double const luminanceLut[XY],
+ double luminanceStrength)
{
for (int i = 0; i < XY; i++)
- result[i] = lambda[i] *
- ((luminance_lut[i] - 1) * luminance_strength + 1);
+ result[i] = lambda[i] * ((luminanceLut[i] - 1) * luminanceStrength + 1);
}
-static void add_luminance_g(double result[XY], double lambda,
- double const luminance_lut[XY],
- double luminance_strength)
+static void addLuminanceG(double result[XY], double lambda,
+ double const luminanceLut[XY],
+ double luminanceStrength)
{
for (int i = 0; i < XY; i++)
- result[i] = lambda *
- ((luminance_lut[i] - 1) * luminance_strength + 1);
+ result[i] = lambda * ((luminanceLut[i] - 1) * luminanceStrength + 1);
}
-void add_luminance_to_tables(double results[3][Y][X], double const lambda_r[XY],
- double lambda_g, double const lambda_b[XY],
- double const luminance_lut[XY],
- double luminance_strength)
+void addLuminanceToTables(double results[3][Y][X], double const lambdaR[XY],
+ double lambdaG, double const lambdaB[XY],
+ double const luminanceLut[XY],
+ double luminanceStrength)
{
- add_luminance_rb((double *)results[0], lambda_r, luminance_lut,
- luminance_strength);
- add_luminance_g((double *)results[1], lambda_g, luminance_lut,
- luminance_strength);
- add_luminance_rb((double *)results[2], lambda_b, luminance_lut,
- luminance_strength);
+ addLuminanceRb((double *)results[0], lambdaR, luminanceLut, luminanceStrength);
+ addLuminanceG((double *)results[1], lambdaG, luminanceLut, luminanceStrength);
+ addLuminanceRb((double *)results[2], lambdaB, luminanceLut, luminanceStrength);
normalise((double *)results, 3 * XY);
}
void Alsc::doAlsc()
{
- double Cr[XY], Cb[XY], Wr[XY][4], Wb[XY][4], cal_table_r[XY],
- cal_table_b[XY], cal_table_tmp[XY];
+ double cr[XY], cb[XY], wr[XY][4], wb[XY][4], calTableR[XY], calTableB[XY], calTableTmp[XY];
// Calculate our R/B ("Cr"/"Cb") colour statistics, and assess which are
// usable.
- calculate_Cr_Cb(statistics_, Cr, Cb, config_.min_count, config_.min_G);
+ calculateCrCb(statistics_, cr, cb, config_.minCount, config_.minG);
// Fetch the new calibrations (if any) for this CT. Resample them in
// case the camera mode is not full-frame.
- get_cal_table(ct_, config_.calibrations_Cr, cal_table_tmp);
- resample_cal_table(cal_table_tmp, camera_mode_, cal_table_r);
- get_cal_table(ct_, config_.calibrations_Cb, cal_table_tmp);
- resample_cal_table(cal_table_tmp, camera_mode_, cal_table_b);
+ getCalTable(ct_, config_.calibrationsCr, calTableTmp);
+ resampleCalTable(calTableTmp, cameraMode_, calTableR);
+ getCalTable(ct_, config_.calibrationsCb, calTableTmp);
+ resampleCalTable(calTableTmp, cameraMode_, calTableB);
// You could print out the cal tables for this image here, if you're
// tuning the algorithm...
// Apply any calibration to the statistics, so the adaptive algorithm
// makes only the extra adjustments.
- apply_cal_table(cal_table_r, Cr);
- apply_cal_table(cal_table_b, Cb);
+ applyCalTable(calTableR, cr);
+ applyCalTable(calTableB, cb);
// Compute weights between zones.
- compute_W(Cr, config_.sigma_Cr, Wr);
- compute_W(Cb, config_.sigma_Cb, Wb);
+ computeW(cr, config_.sigmaCr, wr);
+ computeW(cb, config_.sigmaCb, wb);
// Run Gauss-Seidel iterations over the resulting matrix, for R and B.
- run_matrix_iterations(Cr, lambda_r_, Wr, config_.omega, config_.n_iter,
- config_.threshold, config_.lambda_bound);
- run_matrix_iterations(Cb, lambda_b_, Wb, config_.omega, config_.n_iter,
- config_.threshold, config_.lambda_bound);
+ runMatrixIterations(cr, lambdaR_, wr, config_.omega, config_.nIter,
+ config_.threshold, config_.lambdaBound);
+ runMatrixIterations(cb, lambdaB_, wb, config_.omega, config_.nIter,
+ config_.threshold, config_.lambdaBound);
// Fold the calibrated gains into our final lambda values. (Note that on
// the next run, we re-start with the lambda values that don't have the
// calibration gains included.)
- compensate_lambdas_for_cal(cal_table_r, lambda_r_, async_lambda_r_);
- compensate_lambdas_for_cal(cal_table_b, lambda_b_, async_lambda_b_);
+ compensateLambdasForCal(calTableR, lambdaR_, asyncLambdaR_);
+ compensateLambdasForCal(calTableB, lambdaB_, asyncLambdaB_);
// Fold in the luminance table at the appropriate strength.
- add_luminance_to_tables(async_results_, async_lambda_r_, 1.0,
- async_lambda_b_, luminance_table_,
- config_.luminance_strength);
+ addLuminanceToTables(asyncResults_, asyncLambdaR_, 1.0,
+ asyncLambdaB_, luminanceTable_,
+ config_.luminanceStrength);
}
// Register algorithm with the system.
-static Algorithm *Create(Controller *controller)
+static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Alsc(controller);
}
-static RegisterAlgorithm reg(NAME, &Create);
+static RegisterAlgorithm reg(NAME, &create);
diff --git a/src/ipa/raspberrypi/controller/rpi/alsc.hpp b/src/ipa/raspberrypi/controller/rpi/alsc.hpp
index d1dbe0d1..7a0949d1 100644
--- a/src/ipa/raspberrypi/controller/rpi/alsc.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/alsc.hpp
@@ -24,24 +24,24 @@ struct AlscCalibration {
struct AlscConfig {
// Only repeat the ALSC calculation every "this many" frames
- uint16_t frame_period;
+ uint16_t framePeriod;
// number of initial frames for which speed taken as 1.0 (maximum)
- uint16_t startup_frames;
+ uint16_t startupFrames;
// IIR filter speed applied to algorithm results
double speed;
- double sigma_Cr;
- double sigma_Cb;
- double min_count;
- uint16_t min_G;
+ double sigmaCr;
+ double sigmaCb;
+ double minCount;
+ uint16_t minG;
double omega;
- uint32_t n_iter;
- double luminance_lut[ALSC_CELLS_X * ALSC_CELLS_Y];
- double luminance_strength;
- std::vector<AlscCalibration> calibrations_Cr;
- std::vector<AlscCalibration> calibrations_Cb;
- double default_ct; // colour temperature if no metadata found
+ uint32_t nIter;
+ double luminanceLut[ALSC_CELLS_X * ALSC_CELLS_Y];
+ double luminanceStrength;
+ std::vector<AlscCalibration> calibrationsCr;
+ std::vector<AlscCalibration> calibrationsCb;
+ double defaultCt; // colour temperature if no metadata found
double threshold; // iteration termination threshold
- double lambda_bound; // upper/lower bound for lambda from a value of 1
+ double lambdaBound; // upper/lower bound for lambda from a value of 1
};
class Alsc : public Algorithm
@@ -49,58 +49,58 @@ class Alsc : public Algorithm
public:
Alsc(Controller *controller = NULL);
~Alsc();
- char const *Name() const override;
- void Initialise() override;
- void SwitchMode(CameraMode const &camera_mode, Metadata *metadata) override;
- void Read(boost::property_tree::ptree const &params) override;
- void Prepare(Metadata *image_metadata) override;
- void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
+ char const *name() const override;
+ void initialise() override;
+ void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
+ void read(boost::property_tree::ptree const &params) override;
+ void prepare(Metadata *imageMetadata) override;
+ void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
private:
// configuration is read-only, and available to both threads
AlscConfig config_;
- bool first_time_;
- CameraMode camera_mode_;
- double luminance_table_[ALSC_CELLS_X * ALSC_CELLS_Y];
- std::thread async_thread_;
+ bool firstTime_;
+ CameraMode cameraMode_;
+ double luminanceTable_[ALSC_CELLS_X * ALSC_CELLS_Y];
+ std::thread asyncThread_;
void asyncFunc(); // asynchronous thread function
std::mutex mutex_;
// condvar for async thread to wait on
- std::condition_variable async_signal_;
+ std::condition_variable asyncSignal_;
// condvar for synchronous thread to wait on
- std::condition_variable sync_signal_;
+ std::condition_variable syncSignal_;
// for sync thread to check if async thread finished (requires mutex)
- bool async_finished_;
+ bool asyncFinished_;
// for async thread to check if it's been told to run (requires mutex)
- bool async_start_;
+ bool asyncStart_;
// for async thread to check if it's been told to quit (requires mutex)
- bool async_abort_;
+ bool asyncAbort_;
// The following are only for the synchronous thread to use:
// for sync thread to note its has asked async thread to run
- bool async_started_;
- // counts up to frame_period before restarting the async thread
- int frame_phase_;
- // counts up to startup_frames
- int frame_count_;
- // counts up to startup_frames for Process function
- int frame_count2_;
- double sync_results_[3][ALSC_CELLS_Y][ALSC_CELLS_X];
- double prev_sync_results_[3][ALSC_CELLS_Y][ALSC_CELLS_X];
+ bool asyncStarted_;
+ // counts up to framePeriod before restarting the async thread
+ int framePhase_;
+ // counts up to startupFrames
+ int frameCount_;
+ // counts up to startupFrames for Process function
+ int frameCount2_;
+ double syncResults_[3][ALSC_CELLS_Y][ALSC_CELLS_X];
+ double prevSyncResults_[3][ALSC_CELLS_Y][ALSC_CELLS_X];
void waitForAysncThread();
// The following are for the asynchronous thread to use, though the main
// thread can set/reset them if the async thread is known to be idle:
- void restartAsync(StatisticsPtr &stats, Metadata *image_metadata);
+ void restartAsync(StatisticsPtr &stats, Metadata *imageMetadata);
// copy out the results from the async thread so that it can be restarted
void fetchAsyncResults();
double ct_;
bcm2835_isp_stats_region statistics_[ALSC_CELLS_Y * ALSC_CELLS_X];
- double async_results_[3][ALSC_CELLS_Y][ALSC_CELLS_X];
- double async_lambda_r_[ALSC_CELLS_X * ALSC_CELLS_Y];
- double async_lambda_b_[ALSC_CELLS_X * ALSC_CELLS_Y];
+ double asyncResults_[3][ALSC_CELLS_Y][ALSC_CELLS_X];
+ double asyncLambdaR_[ALSC_CELLS_X * ALSC_CELLS_Y];
+ double asyncLambdaB_[ALSC_CELLS_X * ALSC_CELLS_Y];
void doAlsc();
- double lambda_r_[ALSC_CELLS_X * ALSC_CELLS_Y];
- double lambda_b_[ALSC_CELLS_X * ALSC_CELLS_Y];
+ double lambdaR_[ALSC_CELLS_X * ALSC_CELLS_Y];
+ double lambdaB_[ALSC_CELLS_X * ALSC_CELLS_Y];
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/rpi/awb.cpp b/src/ipa/raspberrypi/controller/rpi/awb.cpp
index d4c93447..07791e8b 100644
--- a/src/ipa/raspberrypi/controller/rpi/awb.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/awb.cpp
@@ -24,33 +24,33 @@ LOG_DEFINE_CATEGORY(RPiAwb)
// todo - the locking in this algorithm needs some tidying up as has been done
// elsewhere (ALSC and AGC).
-void AwbMode::Read(boost::property_tree::ptree const &params)
+void AwbMode::read(boost::property_tree::ptree const &params)
{
- ct_lo = params.get<double>("lo");
- ct_hi = params.get<double>("hi");
+ ctLo = params.get<double>("lo");
+ ctHi = params.get<double>("hi");
}
-void AwbPrior::Read(boost::property_tree::ptree const &params)
+void AwbPrior::read(boost::property_tree::ptree const &params)
{
lux = params.get<double>("lux");
- prior.Read(params.get_child("prior"));
+ prior.read(params.get_child("prior"));
}
-static void read_ct_curve(Pwl &ct_r, Pwl &ct_b,
- boost::property_tree::ptree const &params)
+static void readCtCurve(Pwl &ctR, Pwl &ctB,
+ boost::property_tree::ptree const &params)
{
int num = 0;
for (auto it = params.begin(); it != params.end(); it++) {
double ct = it->second.get_value<double>();
- assert(it == params.begin() || ct != ct_r.Domain().end);
+ assert(it == params.begin() || ct != ctR.domain().end);
if (++it == params.end())
throw std::runtime_error(
"AwbConfig: incomplete CT curve entry");
- ct_r.Append(ct, it->second.get_value<double>());
+ ctR.append(ct, it->second.get_value<double>());
if (++it == params.end())
throw std::runtime_error(
"AwbConfig: incomplete CT curve entry");
- ct_b.Append(ct, it->second.get_value<double>());
+ ctB.append(ct, it->second.get_value<double>());
num++;
}
if (num < 2)
@@ -58,22 +58,21 @@ static void read_ct_curve(Pwl &ct_r, Pwl &ct_b,
"AwbConfig: insufficient points in CT curve");
}
-void AwbConfig::Read(boost::property_tree::ptree const &params)
+void AwbConfig::read(boost::property_tree::ptree const &params)
{
bayes = params.get<int>("bayes", 1);
- frame_period = params.get<uint16_t>("frame_period", 10);
- startup_frames = params.get<uint16_t>("startup_frames", 10);
- convergence_frames = params.get<unsigned int>("convergence_frames", 3);
+ framePeriod = params.get<uint16_t>("framePeriod", 10);
+ startupFrames = params.get<uint16_t>("startupFrames", 10);
+ convergenceFrames = params.get<unsigned int>("convergence_frames", 3);
speed = params.get<double>("speed", 0.05);
if (params.get_child_optional("ct_curve"))
- read_ct_curve(ct_r, ct_b, params.get_child("ct_curve"));
+ readCtCurve(ctR, ctB, params.get_child("ct_curve"));
if (params.get_child_optional("priors")) {
for (auto &p : params.get_child("priors")) {
AwbPrior prior;
- prior.Read(p.second);
+ prior.read(p.second);
if (!priors.empty() && prior.lux <= priors.back().lux)
- throw std::runtime_error(
- "AwbConfig: Prior must be ordered in increasing lux value");
+ throw std::runtime_error("AwbConfig: Prior must be ordered in increasing lux value");
priors.push_back(prior);
}
if (priors.empty())
@@ -82,177 +81,170 @@ void AwbConfig::Read(boost::property_tree::ptree const &params)
}
if (params.get_child_optional("modes")) {
for (auto &p : params.get_child("modes")) {
- modes[p.first].Read(p.second);
- if (default_mode == nullptr)
- default_mode = &modes[p.first];
+ modes[p.first].read(p.second);
+ if (defaultMode == nullptr)
+ defaultMode = &modes[p.first];
}
- if (default_mode == nullptr)
- throw std::runtime_error(
- "AwbConfig: no AWB modes configured");
+ if (defaultMode == nullptr)
+ throw std::runtime_error("AwbConfig: no AWB modes configured");
}
- min_pixels = params.get<double>("min_pixels", 16.0);
- min_G = params.get<uint16_t>("min_G", 32);
- min_regions = params.get<uint32_t>("min_regions", 10);
- delta_limit = params.get<double>("delta_limit", 0.2);
- coarse_step = params.get<double>("coarse_step", 0.2);
- transverse_pos = params.get<double>("transverse_pos", 0.01);
- transverse_neg = params.get<double>("transverse_neg", 0.01);
- if (transverse_pos <= 0 || transverse_neg <= 0)
- throw std::runtime_error(
- "AwbConfig: transverse_pos/neg must be > 0");
- sensitivity_r = params.get<double>("sensitivity_r", 1.0);
- sensitivity_b = params.get<double>("sensitivity_b", 1.0);
+ minPixels = params.get<double>("min_pixels", 16.0);
+ minG = params.get<uint16_t>("min_G", 32);
+ minRegions = params.get<uint32_t>("min_regions", 10);
+ deltaLimit = params.get<double>("delta_limit", 0.2);
+ coarseStep = params.get<double>("coarse_step", 0.2);
+ transversePos = params.get<double>("transverse_pos", 0.01);
+ transverseNeg = params.get<double>("transverse_neg", 0.01);
+ if (transversePos <= 0 || transverseNeg <= 0)
+ throw std::runtime_error("AwbConfig: transverse_pos/neg must be > 0");
+ sensitivityR = params.get<double>("sensitivity_r", 1.0);
+ sensitivityB = params.get<double>("sensitivity_b", 1.0);
if (bayes) {
- if (ct_r.Empty() || ct_b.Empty() || priors.empty() ||
- default_mode == nullptr) {
+ if (ctR.empty() || ctB.empty() || priors.empty() ||
+ defaultMode == nullptr) {
LOG(RPiAwb, Warning)
<< "Bayesian AWB mis-configured - switch to Grey method";
bayes = false;
}
}
- fast = params.get<int>(
- "fast", bayes); // default to fast for Bayesian, otherwise slow
- whitepoint_r = params.get<double>("whitepoint_r", 0.0);
- whitepoint_b = params.get<double>("whitepoint_b", 0.0);
+ fast = params.get<int>("fast", bayes); // default to fast for Bayesian, otherwise slow
+ whitepointR = params.get<double>("whitepoint_r", 0.0);
+ whitepointB = params.get<double>("whitepoint_b", 0.0);
if (bayes == false)
- sensitivity_r = sensitivity_b =
- 1.0; // nor do sensitivities make any sense
+ sensitivityR = sensitivityB = 1.0; // nor do sensitivities make any sense
}
Awb::Awb(Controller *controller)
: AwbAlgorithm(controller)
{
- async_abort_ = async_start_ = async_started_ = async_finished_ = false;
+ asyncAbort_ = asyncStart_ = asyncStarted_ = asyncFinished_ = false;
mode_ = nullptr;
- manual_r_ = manual_b_ = 0.0;
- first_switch_mode_ = true;
- async_thread_ = std::thread(std::bind(&Awb::asyncFunc, this));
+ manualR_ = manualB_ = 0.0;
+ firstSwitchMode_ = true;
+ asyncThread_ = std::thread(std::bind(&Awb::asyncFunc, this));
}
Awb::~Awb()
{
{
std::lock_guard<std::mutex> lock(mutex_);
- async_abort_ = true;
+ asyncAbort_ = true;
}
- async_signal_.notify_one();
- async_thread_.join();
+ asyncSignal_.notify_one();
+ asyncThread_.join();
}
-char const *Awb::Name() const
+char const *Awb::name() const
{
return NAME;
}
-void Awb::Read(boost::property_tree::ptree const &params)
+void Awb::read(boost::property_tree::ptree const &params)
{
- config_.Read(params);
+ config_.read(params);
}
-void Awb::Initialise()
+void Awb::initialise()
{
- frame_count_ = frame_phase_ = 0;
+ frameCount_ = framePhase_ = 0;
// Put something sane into the status that we are filtering towards,
// just in case the first few frames don't have anything meaningful in
// them.
- if (!config_.ct_r.Empty() && !config_.ct_b.Empty()) {
- sync_results_.temperature_K = config_.ct_r.Domain().Clip(4000);
- sync_results_.gain_r =
- 1.0 / config_.ct_r.Eval(sync_results_.temperature_K);
- sync_results_.gain_g = 1.0;
- sync_results_.gain_b =
- 1.0 / config_.ct_b.Eval(sync_results_.temperature_K);
+ if (!config_.ctR.empty() && !config_.ctB.empty()) {
+ syncResults_.temperatureK = config_.ctR.domain().clip(4000);
+ syncResults_.gainR = 1.0 / config_.ctR.eval(syncResults_.temperatureK);
+ syncResults_.gainG = 1.0;
+ syncResults_.gainB = 1.0 / config_.ctB.eval(syncResults_.temperatureK);
} else {
// random values just to stop the world blowing up
- sync_results_.temperature_K = 4500;
- sync_results_.gain_r = sync_results_.gain_g =
- sync_results_.gain_b = 1.0;
+ syncResults_.temperatureK = 4500;
+ syncResults_.gainR = syncResults_.gainG = syncResults_.gainB = 1.0;
}
- prev_sync_results_ = sync_results_;
- async_results_ = sync_results_;
+ prevSyncResults_ = syncResults_;
+ asyncResults_ = syncResults_;
}
-bool Awb::IsPaused() const
+bool Awb::isPaused() const
{
return false;
}
-void Awb::Pause()
+void Awb::pause()
{
// "Pause" by fixing everything to the most recent values.
- manual_r_ = sync_results_.gain_r = prev_sync_results_.gain_r;
- manual_b_ = sync_results_.gain_b = prev_sync_results_.gain_b;
- sync_results_.gain_g = prev_sync_results_.gain_g;
- sync_results_.temperature_K = prev_sync_results_.temperature_K;
+ manualR_ = syncResults_.gainR = prevSyncResults_.gainR;
+ manualB_ = syncResults_.gainB = prevSyncResults_.gainB;
+ syncResults_.gainG = prevSyncResults_.gainG;
+ syncResults_.temperatureK = prevSyncResults_.temperatureK;
}
-void Awb::Resume()
+void Awb::resume()
{
- manual_r_ = 0.0;
- manual_b_ = 0.0;
+ manualR_ = 0.0;
+ manualB_ = 0.0;
}
-unsigned int Awb::GetConvergenceFrames() const
+unsigned int Awb::getConvergenceFrames() const
{
// If not in auto mode, there is no convergence
// to happen, so no need to drop any frames - return zero.
if (!isAutoEnabled())
return 0;
else
- return config_.convergence_frames;
+ return config_.convergenceFrames;
}
-void Awb::SetMode(std::string const &mode_name)
+void Awb::setMode(std::string const &modeName)
{
- mode_name_ = mode_name;
+ modeName_ = modeName;
}
-void Awb::SetManualGains(double manual_r, double manual_b)
+void Awb::setManualGains(double manualR, double manualB)
{
// If any of these are 0.0, we swich back to auto.
- manual_r_ = manual_r;
- manual_b_ = manual_b;
- // If not in auto mode, set these values into the sync_results which
+ manualR_ = manualR;
+ manualB_ = manualB;
+ // If not in auto mode, set these values into the syncResults which
// means that Prepare() will adopt them immediately.
if (!isAutoEnabled()) {
- sync_results_.gain_r = prev_sync_results_.gain_r = manual_r_;
- sync_results_.gain_g = prev_sync_results_.gain_g = 1.0;
- sync_results_.gain_b = prev_sync_results_.gain_b = manual_b_;
+ syncResults_.gainR = prevSyncResults_.gainR = manualR_;
+ syncResults_.gainG = prevSyncResults_.gainG = 1.0;
+ syncResults_.gainB = prevSyncResults_.gainB = manualB_;
}
}
-void Awb::SwitchMode([[maybe_unused]] CameraMode const &camera_mode,
+void Awb::switchMode([[maybe_unused]] CameraMode const &cameraMode,
Metadata *metadata)
{
// On the first mode switch we'll have no meaningful colour
// temperature, so try to dead reckon one if in manual mode.
- if (!isAutoEnabled() && first_switch_mode_ && config_.bayes) {
- Pwl ct_r_inverse = config_.ct_r.Inverse();
- Pwl ct_b_inverse = config_.ct_b.Inverse();
- double ct_r = ct_r_inverse.Eval(ct_r_inverse.Domain().Clip(1 / manual_r_));
- double ct_b = ct_b_inverse.Eval(ct_b_inverse.Domain().Clip(1 / manual_b_));
- prev_sync_results_.temperature_K = (ct_r + ct_b) / 2;
- sync_results_.temperature_K = prev_sync_results_.temperature_K;
+ if (!isAutoEnabled() && firstSwitchMode_ && config_.bayes) {
+ Pwl ctRInverse = config_.ctR.inverse();
+ Pwl ctBInverse = config_.ctB.inverse();
+ double ctR = ctRInverse.eval(ctRInverse.domain().clip(1 / manualR_));
+ double ctB = ctBInverse.eval(ctBInverse.domain().clip(1 / manualB_));
+ prevSyncResults_.temperatureK = (ctR + ctB) / 2;
+ syncResults_.temperatureK = prevSyncResults_.temperatureK;
}
// Let other algorithms know the current white balance values.
- metadata->Set("awb.status", prev_sync_results_);
- first_switch_mode_ = false;
+ metadata->set("awb.status", prevSyncResults_);
+ firstSwitchMode_ = false;
}
bool Awb::isAutoEnabled() const
{
- return manual_r_ == 0.0 || manual_b_ == 0.0;
+ return manualR_ == 0.0 || manualB_ == 0.0;
}
void Awb::fetchAsyncResults()
{
LOG(RPiAwb, Debug) << "Fetch AWB results";
- async_finished_ = false;
- async_started_ = false;
+ asyncFinished_ = false;
+ asyncStarted_ = false;
// It's possible manual gains could be set even while the async
// thread was running, so only copy the results if still in auto mode.
if (isAutoEnabled())
- sync_results_ = async_results_;
+ syncResults_ = asyncResults_;
}
void Awb::restartAsync(StatisticsPtr &stats, double lux)
@@ -261,75 +253,74 @@ void Awb::restartAsync(StatisticsPtr &stats, double lux)
// this makes a new reference which belongs to the asynchronous thread
statistics_ = stats;
// store the mode as it could technically change
- auto m = config_.modes.find(mode_name_);
+ auto m = config_.modes.find(modeName_);
mode_ = m != config_.modes.end()
? &m->second
- : (mode_ == nullptr ? config_.default_mode : mode_);
+ : (mode_ == nullptr ? config_.defaultMode : mode_);
lux_ = lux;
- frame_phase_ = 0;
- async_started_ = true;
- size_t len = mode_name_.copy(async_results_.mode,
- sizeof(async_results_.mode) - 1);
- async_results_.mode[len] = '\0';
+ framePhase_ = 0;
+ asyncStarted_ = true;
+ size_t len = modeName_.copy(asyncResults_.mode,
+ sizeof(asyncResults_.mode) - 1);
+ asyncResults_.mode[len] = '\0';
{
std::lock_guard<std::mutex> lock(mutex_);
- async_start_ = true;
+ asyncStart_ = true;
}
- async_signal_.notify_one();
+ asyncSignal_.notify_one();
}
-void Awb::Prepare(Metadata *image_metadata)
+void Awb::prepare(Metadata *imageMetadata)
{
- if (frame_count_ < (int)config_.startup_frames)
- frame_count_++;
- double speed = frame_count_ < (int)config_.startup_frames
+ if (frameCount_ < (int)config_.startupFrames)
+ frameCount_++;
+ double speed = frameCount_ < (int)config_.startupFrames
? 1.0
: config_.speed;
LOG(RPiAwb, Debug)
- << "frame_count " << frame_count_ << " speed " << speed;
+ << "frame_count " << frameCount_ << " speed " << speed;
{
std::unique_lock<std::mutex> lock(mutex_);
- if (async_started_ && async_finished_)
+ if (asyncStarted_ && asyncFinished_)
fetchAsyncResults();
}
// Finally apply IIR filter to results and put into metadata.
- memcpy(prev_sync_results_.mode, sync_results_.mode,
- sizeof(prev_sync_results_.mode));
- prev_sync_results_.temperature_K =
- speed * sync_results_.temperature_K +
- (1.0 - speed) * prev_sync_results_.temperature_K;
- prev_sync_results_.gain_r = speed * sync_results_.gain_r +
- (1.0 - speed) * prev_sync_results_.gain_r;
- prev_sync_results_.gain_g = speed * sync_results_.gain_g +
- (1.0 - speed) * prev_sync_results_.gain_g;
- prev_sync_results_.gain_b = speed * sync_results_.gain_b +
- (1.0 - speed) * prev_sync_results_.gain_b;
- image_metadata->Set("awb.status", prev_sync_results_);
+ memcpy(prevSyncResults_.mode, syncResults_.mode,
+ sizeof(prevSyncResults_.mode));
+ prevSyncResults_.temperatureK = speed * syncResults_.temperatureK +
+ (1.0 - speed) * prevSyncResults_.temperatureK;
+ prevSyncResults_.gainR = speed * syncResults_.gainR +
+ (1.0 - speed) * prevSyncResults_.gainR;
+ prevSyncResults_.gainG = speed * syncResults_.gainG +
+ (1.0 - speed) * prevSyncResults_.gainG;
+ prevSyncResults_.gainB = speed * syncResults_.gainB +
+ (1.0 - speed) * prevSyncResults_.gainB;
+ imageMetadata->set("awb.status", prevSyncResults_);
LOG(RPiAwb, Debug)
- << "Using AWB gains r " << prev_sync_results_.gain_r << " g "
- << prev_sync_results_.gain_g << " b "
- << prev_sync_results_.gain_b;
+ << "Using AWB gains r " << prevSyncResults_.gainR << " g "
+ << prevSyncResults_.gainG << " b "
+ << prevSyncResults_.gainB;
}
-void Awb::Process(StatisticsPtr &stats, Metadata *image_metadata)
+void Awb::process(StatisticsPtr &stats, Metadata *imageMetadata)
{
// Count frames since we last poked the async thread.
- if (frame_phase_ < (int)config_.frame_period)
- frame_phase_++;
- LOG(RPiAwb, Debug) << "frame_phase " << frame_phase_;
+ if (framePhase_ < (int)config_.framePeriod)
+ framePhase_++;
+ LOG(RPiAwb, Debug) << "frame_phase " << framePhase_;
// We do not restart the async thread if we're not in auto mode.
if (isAutoEnabled() &&
- (frame_phase_ >= (int)config_.frame_period ||
- frame_count_ < (int)config_.startup_frames)) {
+ (framePhase_ >= (int)config_.framePeriod ||
+ frameCount_ < (int)config_.startupFrames)) {
// Update any settings and any image metadata that we need.
- struct LuxStatus lux_status = {};
- lux_status.lux = 400; // in case no metadata
- if (image_metadata->Get("lux.status", lux_status) != 0)
+ struct LuxStatus luxStatus = {};
+ luxStatus.lux = 400; // in case no metadata
+ if (imageMetadata->get("lux.status", luxStatus) != 0)
LOG(RPiAwb, Debug) << "No lux metadata found";
- LOG(RPiAwb, Debug) << "Awb lux value is " << lux_status.lux;
+ LOG(RPiAwb, Debug) << "Awb lux value is " << luxStatus.lux;
- if (async_started_ == false)
- restartAsync(stats, lux_status.lux);
+ if (asyncStarted_ == false)
+ restartAsync(stats, luxStatus.lux);
}
}
@@ -338,32 +329,32 @@ void Awb::asyncFunc()
while (true) {
{
std::unique_lock<std::mutex> lock(mutex_);
- async_signal_.wait(lock, [&] {
- return async_start_ || async_abort_;
+ asyncSignal_.wait(lock, [&] {
+ return asyncStart_ || asyncAbort_;
});
- async_start_ = false;
- if (async_abort_)
+ asyncStart_ = false;
+ if (asyncAbort_)
break;
}
doAwb();
{
std::lock_guard<std::mutex> lock(mutex_);
- async_finished_ = true;
+ asyncFinished_ = true;
}
- sync_signal_.notify_one();
+ syncSignal_.notify_one();
}
}
-static void generate_stats(std::vector<Awb::RGB> &zones,
- bcm2835_isp_stats_region *stats, double min_pixels,
- double min_G)
+static void generateStats(std::vector<Awb::RGB> &zones,
+ bcm2835_isp_stats_region *stats, double minPixels,
+ double minG)
{
for (int i = 0; i < AWB_STATS_SIZE_X * AWB_STATS_SIZE_Y; i++) {
Awb::RGB zone;
double counted = stats[i].counted;
- if (counted >= min_pixels) {
+ if (counted >= minPixels) {
zone.G = stats[i].g_sum / counted;
- if (zone.G >= min_G) {
+ if (zone.G >= minG) {
zone.R = stats[i].r_sum / counted;
zone.B = stats[i].b_sum / counted;
zones.push_back(zone);
@@ -377,32 +368,33 @@ void Awb::prepareStats()
zones_.clear();
// LSC has already been applied to the stats in this pipeline, so stop
// any LSC compensation. We also ignore config_.fast in this version.
- generate_stats(zones_, statistics_->awb_stats, config_.min_pixels,
- config_.min_G);
+ generateStats(zones_, statistics_->awb_stats, config_.minPixels,
+ config_.minG);
// we're done with these; we may as well relinquish our hold on the
// pointer.
statistics_.reset();
// apply sensitivities, so values appear to come from our "canonical"
// sensor.
- for (auto &zone : zones_)
- zone.R *= config_.sensitivity_r,
- zone.B *= config_.sensitivity_b;
+ for (auto &zone : zones_) {
+ zone.R *= config_.sensitivityR;
+ zone.B *= config_.sensitivityB;
+ }
}
-double Awb::computeDelta2Sum(double gain_r, double gain_b)
+double Awb::computeDelta2Sum(double gainR, double gainB)
{
// Compute the sum of the squared colour error (non-greyness) as it
// appears in the log likelihood equation.
- double delta2_sum = 0;
+ double delta2Sum = 0;
for (auto &z : zones_) {
- double delta_r = gain_r * z.R - 1 - config_.whitepoint_r;
- double delta_b = gain_b * z.B - 1 - config_.whitepoint_b;
- double delta2 = delta_r * delta_r + delta_b * delta_b;
- //LOG(RPiAwb, Debug) << "delta_r " << delta_r << " delta_b " << delta_b << " delta2 " << delta2;
- delta2 = std::min(delta2, config_.delta_limit);
- delta2_sum += delta2;
+ double deltaR = gainR * z.R - 1 - config_.whitepointR;
+ double deltaB = gainB * z.B - 1 - config_.whitepointB;
+ double delta2 = deltaR * deltaR + deltaB * deltaB;
+ //LOG(RPiAwb, Debug) << "deltaR " << deltaR << " deltaB " << deltaB << " delta2 " << delta2;
+ delta2 = std::min(delta2, config_.deltaLimit);
+ delta2Sum += delta2;
}
- return delta2_sum;
+ return delta2Sum;
}
Pwl Awb::interpolatePrior()
@@ -420,7 +412,7 @@ Pwl Awb::interpolatePrior()
idx++;
double lux0 = config_.priors[idx].lux,
lux1 = config_.priors[idx + 1].lux;
- return Pwl::Combine(config_.priors[idx].prior,
+ return Pwl::combine(config_.priors[idx].prior,
config_.priors[idx + 1].prior,
[&](double /*x*/, double y0, double y1) {
return y0 + (y1 - y0) *
@@ -429,62 +421,60 @@ Pwl Awb::interpolatePrior()
}
}
-static double interpolate_quadatric(Pwl::Point const &A, Pwl::Point const &B,
- Pwl::Point const &C)
+static double interpolateQuadatric(Pwl::Point const &a, Pwl::Point const &b,
+ Pwl::Point const &c)
{
// Given 3 points on a curve, find the extremum of the function in that
// interval by fitting a quadratic.
const double eps = 1e-3;
- Pwl::Point CA = C - A, BA = B - A;
- double denominator = 2 * (BA.y * CA.x - CA.y * BA.x);
+ Pwl::Point ca = c - a, ba = b - a;
+ double denominator = 2 * (ba.y * ca.x - ca.y * ba.x);
if (abs(denominator) > eps) {
- double numerator = BA.y * CA.x * CA.x - CA.y * BA.x * BA.x;
- double result = numerator / denominator + A.x;
- return std::max(A.x, std::min(C.x, result));
+ double numerator = ba.y * ca.x * ca.x - ca.y * ba.x * ba.x;
+ double result = numerator / denominator + a.x;
+ return std::max(a.x, std::min(c.x, result));
}
// has degenerated to straight line segment
- return A.y < C.y - eps ? A.x : (C.y < A.y - eps ? C.x : B.x);
+ return a.y < c.y - eps ? a.x : (c.y < a.y - eps ? c.x : b.x);
}
double Awb::coarseSearch(Pwl const &prior)
{
points_.clear(); // assume doesn't deallocate memory
- size_t best_point = 0;
- double t = mode_->ct_lo;
- int span_r = 0, span_b = 0;
+ size_t bestPoint = 0;
+ double t = mode_->ctLo;
+ int spanR = 0, spanB = 0;
// Step down the CT curve evaluating log likelihood.
while (true) {
- double r = config_.ct_r.Eval(t, &span_r);
- double b = config_.ct_b.Eval(t, &span_b);
- double gain_r = 1 / r, gain_b = 1 / b;
- double delta2_sum = computeDelta2Sum(gain_r, gain_b);
- double prior_log_likelihood =
- prior.Eval(prior.Domain().Clip(t));
- double final_log_likelihood = delta2_sum - prior_log_likelihood;
+ double r = config_.ctR.eval(t, &spanR);
+ double b = config_.ctB.eval(t, &spanB);
+ double gainR = 1 / r, gainB = 1 / b;
+ double delta2Sum = computeDelta2Sum(gainR, gainB);
+ double priorLogLikelihood = prior.eval(prior.domain().clip(t));
+ double finalLogLikelihood = delta2Sum - priorLogLikelihood;
LOG(RPiAwb, Debug)
- << "t: " << t << " gain_r " << gain_r << " gain_b "
- << gain_b << " delta2_sum " << delta2_sum
- << " prior " << prior_log_likelihood << " final "
- << final_log_likelihood;
- points_.push_back(Pwl::Point(t, final_log_likelihood));
- if (points_.back().y < points_[best_point].y)
- best_point = points_.size() - 1;
- if (t == mode_->ct_hi)
+ << "t: " << t << " gain R " << gainR << " gain B "
+ << gainB << " delta2_sum " << delta2Sum
+ << " prior " << priorLogLikelihood << " final "
+ << finalLogLikelihood;
+ points_.push_back(Pwl::Point(t, finalLogLikelihood));
+ if (points_.back().y < points_[bestPoint].y)
+ bestPoint = points_.size() - 1;
+ if (t == mode_->ctHi)
break;
// for even steps along the r/b curve scale them by the current t
- t = std::min(t + t / 10 * config_.coarse_step,
- mode_->ct_hi);
+ t = std::min(t + t / 10 * config_.coarseStep, mode_->ctHi);
}
- t = points_[best_point].x;
+ t = points_[bestPoint].x;
LOG(RPiAwb, Debug) << "Coarse search found CT " << t;
// We have the best point of the search, but refine it with a quadratic
// interpolation around its neighbours.
if (points_.size() > 2) {
- unsigned long bp = std::min(best_point, points_.size() - 2);
- best_point = std::max(1UL, bp);
- t = interpolate_quadatric(points_[best_point - 1],
- points_[best_point],
- points_[best_point + 1]);
+ unsigned long bp = std::min(bestPoint, points_.size() - 2);
+ bestPoint = std::max(1UL, bp);
+ t = interpolateQuadatric(points_[bestPoint - 1],
+ points_[bestPoint],
+ points_[bestPoint + 1]);
LOG(RPiAwb, Debug)
<< "After quadratic refinement, coarse search has CT "
<< t;
@@ -494,80 +484,76 @@ double Awb::coarseSearch(Pwl const &prior)
void Awb::fineSearch(double &t, double &r, double &b, Pwl const &prior)
{
- int span_r = -1, span_b = -1;
- config_.ct_r.Eval(t, &span_r);
- config_.ct_b.Eval(t, &span_b);
- double step = t / 10 * config_.coarse_step * 0.1;
+ int spanR = -1, spanB = -1;
+ config_.ctR.eval(t, &spanR);
+ config_.ctB.eval(t, &spanB);
+ double step = t / 10 * config_.coarseStep * 0.1;
int nsteps = 5;
- double r_diff = config_.ct_r.Eval(t + nsteps * step, &span_r) -
- config_.ct_r.Eval(t - nsteps * step, &span_r);
- double b_diff = config_.ct_b.Eval(t + nsteps * step, &span_b) -
- config_.ct_b.Eval(t - nsteps * step, &span_b);
- Pwl::Point transverse(b_diff, -r_diff);
- if (transverse.Len2() < 1e-6)
+ double rDiff = config_.ctR.eval(t + nsteps * step, &spanR) -
+ config_.ctR.eval(t - nsteps * step, &spanR);
+ double bDiff = config_.ctB.eval(t + nsteps * step, &spanB) -
+ config_.ctB.eval(t - nsteps * step, &spanB);
+ Pwl::Point transverse(bDiff, -rDiff);
+ if (transverse.len2() < 1e-6)
return;
// unit vector orthogonal to the b vs. r function (pointing outwards
// with r and b increasing)
- transverse = transverse / transverse.Len();
- double best_log_likelihood = 0, best_t = 0, best_r = 0, best_b = 0;
- double transverse_range =
- config_.transverse_neg + config_.transverse_pos;
- const int MAX_NUM_DELTAS = 12;
+ transverse = transverse / transverse.len();
+ double bestLogLikelihood = 0, bestT = 0, bestR = 0, bestB = 0;
+ double transverseRange = config_.transverseNeg + config_.transversePos;
+ const int maxNumDeltas = 12;
// a transverse step approximately every 0.01 r/b units
- int num_deltas = floor(transverse_range * 100 + 0.5) + 1;
- num_deltas = num_deltas < 3 ? 3 :
- (num_deltas > MAX_NUM_DELTAS ? MAX_NUM_DELTAS : num_deltas);
+ int numDeltas = floor(transverseRange * 100 + 0.5) + 1;
+ numDeltas = numDeltas < 3 ? 3 : (numDeltas > maxNumDeltas ? maxNumDeltas : numDeltas);
// Step down CT curve. March a bit further if the transverse range is
// large.
- nsteps += num_deltas;
+ nsteps += numDeltas;
for (int i = -nsteps; i <= nsteps; i++) {
- double t_test = t + i * step;
- double prior_log_likelihood =
- prior.Eval(prior.Domain().Clip(t_test));
- double r_curve = config_.ct_r.Eval(t_test, &span_r);
- double b_curve = config_.ct_b.Eval(t_test, &span_b);
+ double tTest = t + i * step;
+ double priorLogLikelihood =
+ prior.eval(prior.domain().clip(tTest));
+ double rCurve = config_.ctR.eval(tTest, &spanR);
+ double bCurve = config_.ctB.eval(tTest, &spanB);
// x will be distance off the curve, y the log likelihood there
- Pwl::Point points[MAX_NUM_DELTAS];
- int best_point = 0;
+ Pwl::Point points[maxNumDeltas];
+ int bestPoint = 0;
// Take some measurements transversely *off* the CT curve.
- for (int j = 0; j < num_deltas; j++) {
- points[j].x = -config_.transverse_neg +
- (transverse_range * j) / (num_deltas - 1);
- Pwl::Point rb_test = Pwl::Point(r_curve, b_curve) +
- transverse * points[j].x;
- double r_test = rb_test.x, b_test = rb_test.y;
- double gain_r = 1 / r_test, gain_b = 1 / b_test;
- double delta2_sum = computeDelta2Sum(gain_r, gain_b);
- points[j].y = delta2_sum - prior_log_likelihood;
+ for (int j = 0; j < numDeltas; j++) {
+ points[j].x = -config_.transverseNeg +
+ (transverseRange * j) / (numDeltas - 1);
+ Pwl::Point rbTest = Pwl::Point(rCurve, bCurve) +
+ transverse * points[j].x;
+ double rTest = rbTest.x, bTest = rbTest.y;
+ double gainR = 1 / rTest, gainB = 1 / bTest;
+ double delta2Sum = computeDelta2Sum(gainR, gainB);
+ points[j].y = delta2Sum - priorLogLikelihood;
LOG(RPiAwb, Debug)
- << "At t " << t_test << " r " << r_test << " b "
- << b_test << ": " << points[j].y;
- if (points[j].y < points[best_point].y)
- best_point = j;
+ << "At t " << tTest << " r " << rTest << " b "
+ << bTest << ": " << points[j].y;
+ if (points[j].y < points[bestPoint].y)
+ bestPoint = j;
}
// We have NUM_DELTAS points transversely across the CT curve,
// now let's do a quadratic interpolation for the best result.
- best_point = std::max(1, std::min(best_point, num_deltas - 2));
- Pwl::Point rb_test =
- Pwl::Point(r_curve, b_curve) +
- transverse *
- interpolate_quadatric(points[best_point - 1],
- points[best_point],
- points[best_point + 1]);
- double r_test = rb_test.x, b_test = rb_test.y;
- double gain_r = 1 / r_test, gain_b = 1 / b_test;
- double delta2_sum = computeDelta2Sum(gain_r, gain_b);
- double final_log_likelihood = delta2_sum - prior_log_likelihood;
+ bestPoint = std::max(1, std::min(bestPoint, numDeltas - 2));
+ Pwl::Point rbTest = Pwl::Point(rCurve, bCurve) +
+ transverse * interpolateQuadatric(points[bestPoint - 1],
+ points[bestPoint],
+ points[bestPoint + 1]);
+ double rTest = rbTest.x, bTest = rbTest.y;
+ double gainR = 1 / rTest, gainB = 1 / bTest;
+ double delta2Sum = computeDelta2Sum(gainR, gainB);
+ double finalLogLikelihood = delta2Sum - priorLogLikelihood;
LOG(RPiAwb, Debug)
<< "Finally "
- << t_test << " r " << r_test << " b " << b_test << ": "
- << final_log_likelihood
- << (final_log_likelihood < best_log_likelihood ? " BEST" : "");
- if (best_t == 0 || final_log_likelihood < best_log_likelihood)
- best_log_likelihood = final_log_likelihood,
- best_t = t_test, best_r = r_test, best_b = b_test;
+ << tTest << " r " << rTest << " b " << bTest << ": "
+ << finalLogLikelihood
+ << (finalLogLikelihood < bestLogLikelihood ? " BEST" : "");
+ if (bestT == 0 || finalLogLikelihood < bestLogLikelihood)
+ bestLogLikelihood = finalLogLikelihood,
+ bestT = tTest, bestR = rTest, bestB = bTest;
}
- t = best_t, r = best_r, b = best_b;
+ t = bestT, r = bestR, b = bestB;
LOG(RPiAwb, Debug)
<< "Fine search found t " << t << " r " << r << " b " << b;
}
@@ -582,12 +568,12 @@ void Awb::awbBayes()
// valid... not entirely sure about this.
Pwl prior = interpolatePrior();
prior *= zones_.size() / (double)(AWB_STATS_SIZE_X * AWB_STATS_SIZE_Y);
- prior.Map([](double x, double y) {
+ prior.map([](double x, double y) {
LOG(RPiAwb, Debug) << "(" << x << "," << y << ")";
});
double t = coarseSearch(prior);
- double r = config_.ct_r.Eval(t);
- double b = config_.ct_b.Eval(t);
+ double r = config_.ctR.eval(t);
+ double b = config_.ctB.eval(t);
LOG(RPiAwb, Debug)
<< "After coarse search: r " << r << " b " << b << " (gains r "
<< 1 / r << " b " << 1 / b << ")";
@@ -604,10 +590,10 @@ void Awb::awbBayes()
// Write results out for the main thread to pick up. Remember to adjust
// the gains from the ones that the "canonical sensor" would require to
// the ones needed by *this* sensor.
- async_results_.temperature_K = t;
- async_results_.gain_r = 1.0 / r * config_.sensitivity_r;
- async_results_.gain_g = 1.0;
- async_results_.gain_b = 1.0 / b * config_.sensitivity_b;
+ asyncResults_.temperatureK = t;
+ asyncResults_.gainR = 1.0 / r * config_.sensitivityR;
+ asyncResults_.gainG = 1.0;
+ asyncResults_.gainB = 1.0 / b * config_.sensitivityB;
}
void Awb::awbGrey()
@@ -617,51 +603,51 @@ void Awb::awbGrey()
// that we can sort them to exclude the extreme gains. We could
// consider some variations, such as normalising all the zones first, or
// doing an L2 average etc.
- std::vector<RGB> &derivs_R(zones_);
- std::vector<RGB> derivs_B(derivs_R);
- std::sort(derivs_R.begin(), derivs_R.end(),
+ std::vector<RGB> &derivsR(zones_);
+ std::vector<RGB> derivsB(derivsR);
+ std::sort(derivsR.begin(), derivsR.end(),
[](RGB const &a, RGB const &b) {
return a.G * b.R < b.G * a.R;
});
- std::sort(derivs_B.begin(), derivs_B.end(),
+ std::sort(derivsB.begin(), derivsB.end(),
[](RGB const &a, RGB const &b) {
return a.G * b.B < b.G * a.B;
});
// Average the middle half of the values.
- int discard = derivs_R.size() / 4;
- RGB sum_R(0, 0, 0), sum_B(0, 0, 0);
- for (auto ri = derivs_R.begin() + discard,
- bi = derivs_B.begin() + discard;
- ri != derivs_R.end() - discard; ri++, bi++)
- sum_R += *ri, sum_B += *bi;
- double gain_r = sum_R.G / (sum_R.R + 1),
- gain_b = sum_B.G / (sum_B.B + 1);
- async_results_.temperature_K = 4500; // don't know what it is
- async_results_.gain_r = gain_r;
- async_results_.gain_g = 1.0;
- async_results_.gain_b = gain_b;
+ int discard = derivsR.size() / 4;
+ RGB sumR(0, 0, 0), sumB(0, 0, 0);
+ for (auto ri = derivsR.begin() + discard,
+ bi = derivsB.begin() + discard;
+ ri != derivsR.end() - discard; ri++, bi++)
+ sumR += *ri, sumB += *bi;
+ double gainR = sumR.G / (sumR.R + 1),
+ gainB = sumB.G / (sumB.B + 1);
+ asyncResults_.temperatureK = 4500; // don't know what it is
+ asyncResults_.gainR = gainR;
+ asyncResults_.gainG = 1.0;
+ asyncResults_.gainB = gainB;
}
void Awb::doAwb()
{
prepareStats();
LOG(RPiAwb, Debug) << "Valid zones: " << zones_.size();
- if (zones_.size() > config_.min_regions) {
+ if (zones_.size() > config_.minRegions) {
if (config_.bayes)
awbBayes();
else
awbGrey();
LOG(RPiAwb, Debug)
<< "CT found is "
- << async_results_.temperature_K
- << " with gains r " << async_results_.gain_r
- << " and b " << async_results_.gain_b;
+ << asyncResults_.temperatureK
+ << " with gains r " << asyncResults_.gainR
+ << " and b " << asyncResults_.gainB;
}
}
// Register algorithm with the system.
-static Algorithm *Create(Controller *controller)
+static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Awb(controller);
}
-static RegisterAlgorithm reg(NAME, &Create);
+static RegisterAlgorithm reg(NAME, &create);
diff --git a/src/ipa/raspberrypi/controller/rpi/awb.hpp b/src/ipa/raspberrypi/controller/rpi/awb.hpp
index ac3dca6f..021aafa9 100644
--- a/src/ipa/raspberrypi/controller/rpi/awb.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/awb.hpp
@@ -19,59 +19,59 @@ namespace RPiController {
// Control algorithm to perform AWB calculations.
struct AwbMode {
- void Read(boost::property_tree::ptree const &params);
- double ct_lo; // low CT value for search
- double ct_hi; // high CT value for search
+ void read(boost::property_tree::ptree const &params);
+ double ctLo; // low CT value for search
+ double ctHi; // high CT value for search
};
struct AwbPrior {
- void Read(boost::property_tree::ptree const &params);
+ void read(boost::property_tree::ptree const &params);
double lux; // lux level
Pwl prior; // maps CT to prior log likelihood for this lux level
};
struct AwbConfig {
- AwbConfig() : default_mode(nullptr) {}
- void Read(boost::property_tree::ptree const &params);
+ AwbConfig() : defaultMode(nullptr) {}
+ void read(boost::property_tree::ptree const &params);
// Only repeat the AWB calculation every "this many" frames
- uint16_t frame_period;
+ uint16_t framePeriod;
// number of initial frames for which speed taken as 1.0 (maximum)
- uint16_t startup_frames;
- unsigned int convergence_frames; // approx number of frames to converge
+ uint16_t startupFrames;
+ unsigned int convergenceFrames; // approx number of frames to converge
double speed; // IIR filter speed applied to algorithm results
bool fast; // "fast" mode uses a 16x16 rather than 32x32 grid
- Pwl ct_r; // function maps CT to r (= R/G)
- Pwl ct_b; // function maps CT to b (= B/G)
+ Pwl ctR; // function maps CT to r (= R/G)
+ Pwl ctB; // function maps CT to b (= B/G)
// table of illuminant priors at different lux levels
std::vector<AwbPrior> priors;
// AWB "modes" (determines the search range)
std::map<std::string, AwbMode> modes;
- AwbMode *default_mode; // mode used if no mode selected
+ AwbMode *defaultMode; // mode used if no mode selected
// minimum proportion of pixels counted within AWB region for it to be
// "useful"
- double min_pixels;
+ double minPixels;
// minimum G value of those pixels, to be regarded a "useful"
- uint16_t min_G;
+ uint16_t minG;
// number of AWB regions that must be "useful" in order to do the AWB
// calculation
- uint32_t min_regions;
+ uint32_t minRegions;
// clamp on colour error term (so as not to penalise non-grey excessively)
- double delta_limit;
+ double deltaLimit;
// step size control in coarse search
- double coarse_step;
+ double coarseStep;
// how far to wander off CT curve towards "more purple"
- double transverse_pos;
+ double transversePos;
// how far to wander off CT curve towards "more green"
- double transverse_neg;
+ double transverseNeg;
// red sensitivity ratio (set to canonical sensor's R/G divided by this
// sensor's R/G)
- double sensitivity_r;
+ double sensitivityR;
// blue sensitivity ratio (set to canonical sensor's B/G divided by this
// sensor's B/G)
- double sensitivity_b;
+ double sensitivityB;
// The whitepoint (which we normally "aim" for) can be moved.
- double whitepoint_r;
- double whitepoint_b;
+ double whitepointR;
+ double whitepointB;
bool bayes; // use Bayesian algorithm
};
@@ -80,22 +80,22 @@ class Awb : public AwbAlgorithm
public:
Awb(Controller *controller = NULL);
~Awb();
- char const *Name() const override;
- void Initialise() override;
- void Read(boost::property_tree::ptree const &params) override;
+ char const *name() const override;
+ void initialise() override;
+ void read(boost::property_tree::ptree const &params) override;
// AWB handles "pausing" for itself.
- bool IsPaused() const override;
- void Pause() override;
- void Resume() override;
- unsigned int GetConvergenceFrames() const override;
- void SetMode(std::string const &name) override;
- void SetManualGains(double manual_r, double manual_b) override;
- void SwitchMode(CameraMode const &camera_mode, Metadata *metadata) override;
- void Prepare(Metadata *image_metadata) override;
- void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
+ bool isPaused() const override;
+ void pause() override;
+ void resume() override;
+ unsigned int getConvergenceFrames() const override;
+ void setMode(std::string const &name) override;
+ void setManualGains(double manualR, double manualB) override;
+ void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
+ void prepare(Metadata *imageMetadata) override;
+ void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
struct RGB {
- RGB(double _R = 0, double _G = 0, double _B = 0)
- : R(_R), G(_G), B(_B)
+ RGB(double r = 0, double g = 0, double b = 0)
+ : R(r), G(g), B(b)
{
}
double R, G, B;
@@ -110,29 +110,29 @@ private:
bool isAutoEnabled() const;
// configuration is read-only, and available to both threads
AwbConfig config_;
- std::thread async_thread_;
+ std::thread asyncThread_;
void asyncFunc(); // asynchronous thread function
std::mutex mutex_;
// condvar for async thread to wait on
- std::condition_variable async_signal_;
+ std::condition_variable asyncSignal_;
// condvar for synchronous thread to wait on
- std::condition_variable sync_signal_;
+ std::condition_variable syncSignal_;
// for sync thread to check if async thread finished (requires mutex)
- bool async_finished_;
+ bool asyncFinished_;
// for async thread to check if it's been told to run (requires mutex)
- bool async_start_;
+ bool asyncStart_;
// for async thread to check if it's been told to quit (requires mutex)
- bool async_abort_;
+ bool asyncAbort_;
// The following are only for the synchronous thread to use:
// for sync thread to note its has asked async thread to run
- bool async_started_;
- // counts up to frame_period before restarting the async thread
- int frame_phase_;
- int frame_count_; // counts up to startup_frames
- AwbStatus sync_results_;
- AwbStatus prev_sync_results_;
- std::string mode_name_;
+ bool asyncStarted_;
+ // counts up to framePeriod before restarting the async thread
+ int framePhase_;
+ int frameCount_; // counts up to startup_frames
+ AwbStatus syncResults_;
+ AwbStatus prevSyncResults_;
+ std::string modeName_;
// The following are for the asynchronous thread to use, though the main
// thread can set/reset them if the async thread is known to be idle:
void restartAsync(StatisticsPtr &stats, double lux);
@@ -141,22 +141,22 @@ private:
StatisticsPtr statistics_;
AwbMode *mode_;
double lux_;
- AwbStatus async_results_;
+ AwbStatus asyncResults_;
void doAwb();
void awbBayes();
void awbGrey();
void prepareStats();
- double computeDelta2Sum(double gain_r, double gain_b);
+ double computeDelta2Sum(double gainR, double gainB);
Pwl interpolatePrior();
double coarseSearch(Pwl const &prior);
void fineSearch(double &t, double &r, double &b, Pwl const &prior);
std::vector<RGB> zones_;
std::vector<Pwl::Point> points_;
// manual r setting
- double manual_r_;
+ double manualR_;
// manual b setting
- double manual_b_;
- bool first_switch_mode_; // is this the first call to SwitchMode?
+ double manualB_;
+ bool firstSwitchMode_; // is this the first call to SwitchMode?
};
static inline Awb::RGB operator+(Awb::RGB const &a, Awb::RGB const &b)
diff --git a/src/ipa/raspberrypi/controller/rpi/black_level.cpp b/src/ipa/raspberrypi/controller/rpi/black_level.cpp
index 6b3497f1..340da0f0 100644
--- a/src/ipa/raspberrypi/controller/rpi/black_level.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/black_level.cpp
@@ -26,38 +26,38 @@ BlackLevel::BlackLevel(Controller *controller)
{
}
-char const *BlackLevel::Name() const
+char const *BlackLevel::name() const
{
return NAME;
}
-void BlackLevel::Read(boost::property_tree::ptree const &params)
+void BlackLevel::read(boost::property_tree::ptree const &params)
{
- uint16_t black_level = params.get<uint16_t>(
+ uint16_t blackLevel = params.get<uint16_t>(
"black_level", 4096); // 64 in 10 bits scaled to 16 bits
- black_level_r_ = params.get<uint16_t>("black_level_r", black_level);
- black_level_g_ = params.get<uint16_t>("black_level_g", black_level);
- black_level_b_ = params.get<uint16_t>("black_level_b", black_level);
+ blackLevelR_ = params.get<uint16_t>("black_level_r", blackLevel);
+ blackLevelG_ = params.get<uint16_t>("black_level_g", blackLevel);
+ blackLevelB_ = params.get<uint16_t>("black_level_b", blackLevel);
LOG(RPiBlackLevel, Debug)
- << " Read black levels red " << black_level_r_
- << " green " << black_level_g_
- << " blue " << black_level_b_;
+ << " Read black levels red " << blackLevelR_
+ << " green " << blackLevelG_
+ << " blue " << blackLevelB_;
}
-void BlackLevel::Prepare(Metadata *image_metadata)
+void BlackLevel::prepare(Metadata *imageMetadata)
{
- // Possibly we should think about doing this in a switch_mode or
+ // Possibly we should think about doing this in a switchMode or
// something?
struct BlackLevelStatus status;
- status.black_level_r = black_level_r_;
- status.black_level_g = black_level_g_;
- status.black_level_b = black_level_b_;
- image_metadata->Set("black_level.status", status);
+ status.blackLevelR = blackLevelR_;
+ status.blackLevelG = blackLevelG_;
+ status.blackLevelB = blackLevelB_;
+ imageMetadata->set("black_level.status", status);
}
// Register algorithm with the system.
-static Algorithm *Create(Controller *controller)
+static Algorithm *create(Controller *controller)
{
return new BlackLevel(controller);
}
-static RegisterAlgorithm reg(NAME, &Create);
+static RegisterAlgorithm reg(NAME, &create);
diff --git a/src/ipa/raspberrypi/controller/rpi/black_level.hpp b/src/ipa/raspberrypi/controller/rpi/black_level.hpp
index 65ec4d0e..0d74f6a4 100644
--- a/src/ipa/raspberrypi/controller/rpi/black_level.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/black_level.hpp
@@ -17,14 +17,14 @@ class BlackLevel : public Algorithm
{
public:
BlackLevel(Controller *controller);
- char const *Name() const override;
- void Read(boost::property_tree::ptree const &params) override;
- void Prepare(Metadata *image_metadata) override;
+ char const *name() const override;
+ void read(boost::property_tree::ptree const &params) override;
+ void prepare(Metadata *imageMetadata) override;
private:
- double black_level_r_;
- double black_level_g_;
- double black_level_b_;
+ double blackLevelR_;
+ double blackLevelG_;
+ double blackLevelB_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/rpi/ccm.cpp b/src/ipa/raspberrypi/controller/rpi/ccm.cpp
index 821a4c7c..24d8e5bd 100644
--- a/src/ipa/raspberrypi/controller/rpi/ccm.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/ccm.cpp
@@ -37,7 +37,7 @@ Matrix::Matrix(double m0, double m1, double m2, double m3, double m4, double m5,
m[0][0] = m0, m[0][1] = m1, m[0][2] = m2, m[1][0] = m3, m[1][1] = m4,
m[1][2] = m5, m[2][0] = m6, m[2][1] = m7, m[2][2] = m8;
}
-void Matrix::Read(boost::property_tree::ptree const &params)
+void Matrix::read(boost::property_tree::ptree const &params)
{
double *ptr = (double *)m;
int n = 0;
@@ -53,47 +53,49 @@ void Matrix::Read(boost::property_tree::ptree const &params)
Ccm::Ccm(Controller *controller)
: CcmAlgorithm(controller), saturation_(1.0) {}
-char const *Ccm::Name() const
+char const *Ccm::name() const
{
return NAME;
}
-void Ccm::Read(boost::property_tree::ptree const &params)
+void Ccm::read(boost::property_tree::ptree const &params)
{
if (params.get_child_optional("saturation"))
- config_.saturation.Read(params.get_child("saturation"));
+ config_.saturation.read(params.get_child("saturation"));
for (auto &p : params.get_child("ccms")) {
- CtCcm ct_ccm;
- ct_ccm.ct = p.second.get<double>("ct");
- ct_ccm.ccm.Read(p.second.get_child("ccm"));
+ CtCcm ctCcm;
+ ctCcm.ct = p.second.get<double>("ct");
+ ctCcm.ccm.read(p.second.get_child("ccm"));
if (!config_.ccms.empty() &&
- ct_ccm.ct <= config_.ccms.back().ct)
+ ctCcm.ct <= config_.ccms.back().ct)
throw std::runtime_error(
"Ccm: CCM not in increasing colour temperature order");
- config_.ccms.push_back(std::move(ct_ccm));
+ config_.ccms.push_back(std::move(ctCcm));
}
if (config_.ccms.empty())
throw std::runtime_error("Ccm: no CCMs specified");
}
-void Ccm::SetSaturation(double saturation)
+void Ccm::setSaturation(double saturation)
{
saturation_ = saturation;
}
-void Ccm::Initialise() {}
+void Ccm::initialise()
+{
+}
template<typename T>
-static bool get_locked(Metadata *metadata, std::string const &tag, T &value)
+static bool getLocked(Metadata *metadata, std::string const &tag, T &value)
{
- T *ptr = metadata->GetLocked<T>(tag);
+ T *ptr = metadata->getLocked<T>(tag);
if (ptr == nullptr)
return false;
value = *ptr;
return true;
}
-Matrix calculate_ccm(std::vector<CtCcm> const &ccms, double ct)
+Matrix calculateCcm(std::vector<CtCcm> const &ccms, double ct)
{
if (ct <= ccms.front().ct)
return ccms.front().ccm;
@@ -109,7 +111,7 @@ Matrix calculate_ccm(std::vector<CtCcm> const &ccms, double ct)
}
}
-Matrix apply_saturation(Matrix const &ccm, double saturation)
+Matrix applySaturation(Matrix const &ccm, double saturation)
{
Matrix RGB2Y(0.299, 0.587, 0.114, -0.169, -0.331, 0.500, 0.500, -0.419,
-0.081);
@@ -119,51 +121,51 @@ Matrix apply_saturation(Matrix const &ccm, double saturation)
return Y2RGB * S * RGB2Y * ccm;
}
-void Ccm::Prepare(Metadata *image_metadata)
+void Ccm::prepare(Metadata *imageMetadata)
{
- bool awb_ok = false, lux_ok = false;
+ bool awbOk = false, luxOk = false;
struct AwbStatus awb = {};
- awb.temperature_K = 4000; // in case no metadata
+ awb.temperatureK = 4000; // in case no metadata
struct LuxStatus lux = {};
lux.lux = 400; // in case no metadata
{
// grab mutex just once to get everything
- std::lock_guard<Metadata> lock(*image_metadata);
- awb_ok = get_locked(image_metadata, "awb.status", awb);
- lux_ok = get_locked(image_metadata, "lux.status", lux);
+ std::lock_guard<Metadata> lock(*imageMetadata);
+ awbOk = getLocked(imageMetadata, "awb.status", awb);
+ luxOk = getLocked(imageMetadata, "lux.status", lux);
}
- if (!awb_ok)
+ if (!awbOk)
LOG(RPiCcm, Warning) << "no colour temperature found";
- if (!lux_ok)
+ if (!luxOk)
LOG(RPiCcm, Warning) << "no lux value found";
- Matrix ccm = calculate_ccm(config_.ccms, awb.temperature_K);
+ Matrix ccm = calculateCcm(config_.ccms, awb.temperatureK);
double saturation = saturation_;
- struct CcmStatus ccm_status;
- ccm_status.saturation = saturation;
- if (!config_.saturation.Empty())
- saturation *= config_.saturation.Eval(
- config_.saturation.Domain().Clip(lux.lux));
- ccm = apply_saturation(ccm, saturation);
+ struct CcmStatus ccmStatus;
+ ccmStatus.saturation = saturation;
+ if (!config_.saturation.empty())
+ saturation *= config_.saturation.eval(
+ config_.saturation.domain().clip(lux.lux));
+ ccm = applySaturation(ccm, saturation);
for (int j = 0; j < 3; j++)
for (int i = 0; i < 3; i++)
- ccm_status.matrix[j * 3 + i] =
+ ccmStatus.matrix[j * 3 + i] =
std::max(-8.0, std::min(7.9999, ccm.m[j][i]));
LOG(RPiCcm, Debug)
- << "colour temperature " << awb.temperature_K << "K";
+ << "colour temperature " << awb.temperatureK << "K";
LOG(RPiCcm, Debug)
- << "CCM: " << ccm_status.matrix[0] << " " << ccm_status.matrix[1]
- << " " << ccm_status.matrix[2] << " "
- << ccm_status.matrix[3] << " " << ccm_status.matrix[4]
- << " " << ccm_status.matrix[5] << " "
- << ccm_status.matrix[6] << " " << ccm_status.matrix[7]
- << " " << ccm_status.matrix[8];
- image_metadata->Set("ccm.status", ccm_status);
+ << "CCM: " << ccmStatus.matrix[0] << " " << ccmStatus.matrix[1]
+ << " " << ccmStatus.matrix[2] << " "
+ << ccmStatus.matrix[3] << " " << ccmStatus.matrix[4]
+ << " " << ccmStatus.matrix[5] << " "
+ << ccmStatus.matrix[6] << " " << ccmStatus.matrix[7]
+ << " " << ccmStatus.matrix[8];
+ imageMetadata->set("ccm.status", ccmStatus);
}
// Register algorithm with the system.
-static Algorithm *Create(Controller *controller)
+static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Ccm(controller);
;
}
-static RegisterAlgorithm reg(NAME, &Create);
+static RegisterAlgorithm reg(NAME, &create);
diff --git a/src/ipa/raspberrypi/controller/rpi/ccm.hpp b/src/ipa/raspberrypi/controller/rpi/ccm.hpp
index 330ed51f..4c4807b8 100644
--- a/src/ipa/raspberrypi/controller/rpi/ccm.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/ccm.hpp
@@ -20,7 +20,7 @@ struct Matrix {
double m6, double m7, double m8);
Matrix();
double m[3][3];
- void Read(boost::property_tree::ptree const &params);
+ void read(boost::property_tree::ptree const &params);
};
static inline Matrix operator*(double d, Matrix const &m)
{
@@ -61,11 +61,11 @@ class Ccm : public CcmAlgorithm
{
public:
Ccm(Controller *controller = NULL);
- char const *Name() const override;
- void Read(boost::property_tree::ptree const &params) override;
- void SetSaturation(double saturation) override;
- void Initialise() override;
- void Prepare(Metadata *image_metadata) override;
+ char const *name() const override;
+ void read(boost::property_tree::ptree const &params) override;
+ void setSaturation(double saturation) override;
+ void initialise() override;
+ void prepare(Metadata *imageMetadata) override;
private:
CcmConfig config_;
diff --git a/src/ipa/raspberrypi/controller/rpi/contrast.cpp b/src/ipa/raspberrypi/controller/rpi/contrast.cpp
index ae55aad5..16983757 100644
--- a/src/ipa/raspberrypi/controller/rpi/contrast.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/contrast.cpp
@@ -31,40 +31,40 @@ Contrast::Contrast(Controller *controller)
{
}
-char const *Contrast::Name() const
+char const *Contrast::name() const
{
return NAME;
}
-void Contrast::Read(boost::property_tree::ptree const &params)
+void Contrast::read(boost::property_tree::ptree const &params)
{
// enable adaptive enhancement by default
- config_.ce_enable = params.get<int>("ce_enable", 1);
+ config_.ceEnable = params.get<int>("ce_enable", 1);
// the point near the bottom of the histogram to move
- config_.lo_histogram = params.get<double>("lo_histogram", 0.01);
+ config_.loHistogram = params.get<double>("lo_histogram", 0.01);
// where in the range to try and move it to
- config_.lo_level = params.get<double>("lo_level", 0.015);
+ config_.loLevel = params.get<double>("lo_level", 0.015);
// but don't move by more than this
- config_.lo_max = params.get<double>("lo_max", 500);
+ config_.loMax = params.get<double>("lo_max", 500);
// equivalent values for the top of the histogram...
- config_.hi_histogram = params.get<double>("hi_histogram", 0.95);
- config_.hi_level = params.get<double>("hi_level", 0.95);
- config_.hi_max = params.get<double>("hi_max", 2000);
- config_.gamma_curve.Read(params.get_child("gamma_curve"));
+ config_.hiHistogram = params.get<double>("hi_histogram", 0.95);
+ config_.hiLevel = params.get<double>("hi_level", 0.95);
+ config_.hiMax = params.get<double>("hi_max", 2000);
+ config_.gammaCurve.read(params.get_child("gamma_curve"));
}
-void Contrast::SetBrightness(double brightness)
+void Contrast::setBrightness(double brightness)
{
brightness_ = brightness;
}
-void Contrast::SetContrast(double contrast)
+void Contrast::setContrast(double contrast)
{
contrast_ = contrast;
}
-static void fill_in_status(ContrastStatus &status, double brightness,
- double contrast, Pwl &gamma_curve)
+static void fillInStatus(ContrastStatus &status, double brightness,
+ double contrast, Pwl &gammaCurve)
{
status.brightness = brightness;
status.contrast = contrast;
@@ -73,104 +73,100 @@ static void fill_in_status(ContrastStatus &status, double brightness,
: (i < 24 ? (i - 16) * 2048 + 16384
: (i - 24) * 4096 + 32768);
status.points[i].x = x;
- status.points[i].y = std::min(65535.0, gamma_curve.Eval(x));
+ status.points[i].y = std::min(65535.0, gammaCurve.eval(x));
}
status.points[CONTRAST_NUM_POINTS - 1].x = 65535;
status.points[CONTRAST_NUM_POINTS - 1].y = 65535;
}
-void Contrast::Initialise()
+void Contrast::initialise()
{
// Fill in some default values as Prepare will run before Process gets
// called.
- fill_in_status(status_, brightness_, contrast_, config_.gamma_curve);
+ fillInStatus(status_, brightness_, contrast_, config_.gammaCurve);
}
-void Contrast::Prepare(Metadata *image_metadata)
+void Contrast::prepare(Metadata *imageMetadata)
{
std::unique_lock<std::mutex> lock(mutex_);
- image_metadata->Set("contrast.status", status_);
+ imageMetadata->set("contrast.status", status_);
}
-Pwl compute_stretch_curve(Histogram const &histogram,
- ContrastConfig const &config)
+Pwl computeStretchCurve(Histogram const &histogram,
+ ContrastConfig const &config)
{
Pwl enhance;
- enhance.Append(0, 0);
+ enhance.append(0, 0);
// If the start of the histogram is rather empty, try to pull it down a
// bit.
- double hist_lo = histogram.Quantile(config.lo_histogram) *
- (65536 / NUM_HISTOGRAM_BINS);
- double level_lo = config.lo_level * 65536;
+ double histLo = histogram.quantile(config.loHistogram) *
+ (65536 / NUM_HISTOGRAM_BINS);
+ double levelLo = config.loLevel * 65536;
LOG(RPiContrast, Debug)
- << "Move histogram point " << hist_lo << " to " << level_lo;
- hist_lo = std::max(
- level_lo,
- std::min(65535.0, std::min(hist_lo, level_lo + config.lo_max)));
+ << "Move histogram point " << histLo << " to " << levelLo;
+ histLo = std::max(levelLo,
+ std::min(65535.0, std::min(histLo, levelLo + config.loMax)));
LOG(RPiContrast, Debug)
- << "Final values " << hist_lo << " -> " << level_lo;
- enhance.Append(hist_lo, level_lo);
+ << "Final values " << histLo << " -> " << levelLo;
+ enhance.append(histLo, levelLo);
// Keep the mid-point (median) in the same place, though, to limit the
// apparent amount of global brightness shift.
- double mid = histogram.Quantile(0.5) * (65536 / NUM_HISTOGRAM_BINS);
- enhance.Append(mid, mid);
+ double mid = histogram.quantile(0.5) * (65536 / NUM_HISTOGRAM_BINS);
+ enhance.append(mid, mid);
// If the top to the histogram is empty, try to pull the pixel values
// there up.
- double hist_hi = histogram.Quantile(config.hi_histogram) *
- (65536 / NUM_HISTOGRAM_BINS);
- double level_hi = config.hi_level * 65536;
+ double histHi = histogram.quantile(config.hiHistogram) *
+ (65536 / NUM_HISTOGRAM_BINS);
+ double levelHi = config.hiLevel * 65536;
LOG(RPiContrast, Debug)
- << "Move histogram point " << hist_hi << " to " << level_hi;
- hist_hi = std::min(
- level_hi,
- std::max(0.0, std::max(hist_hi, level_hi - config.hi_max)));
+ << "Move histogram point " << histHi << " to " << levelHi;
+ histHi = std::min(levelHi,
+ std::max(0.0, std::max(histHi, levelHi - config.hiMax)));
LOG(RPiContrast, Debug)
- << "Final values " << hist_hi << " -> " << level_hi;
- enhance.Append(hist_hi, level_hi);
- enhance.Append(65535, 65535);
+ << "Final values " << histHi << " -> " << levelHi;
+ enhance.append(histHi, levelHi);
+ enhance.append(65535, 65535);
return enhance;
}
-Pwl apply_manual_contrast(Pwl const &gamma_curve, double brightness,
- double contrast)
+Pwl applyManualContrast(Pwl const &gammaCurve, double brightness,
+ double contrast)
{
- Pwl new_gamma_curve;
+ Pwl newGammaCurve;
LOG(RPiContrast, Debug)
<< "Manual brightness " << brightness << " contrast " << contrast;
- gamma_curve.Map([&](double x, double y) {
- new_gamma_curve.Append(
+ gammaCurve.map([&](double x, double y) {
+ newGammaCurve.append(
x, std::max(0.0, std::min(65535.0,
(y - 32768) * contrast +
32768 + brightness)));
});
- return new_gamma_curve;
+ return newGammaCurve;
}
-void Contrast::Process(StatisticsPtr &stats,
- [[maybe_unused]] Metadata *image_metadata)
+void Contrast::process(StatisticsPtr &stats,
+ [[maybe_unused]] Metadata *imageMetadata)
{
Histogram histogram(stats->hist[0].g_hist, NUM_HISTOGRAM_BINS);
// We look at the histogram and adjust the gamma curve in the following
// ways: 1. Adjust the gamma curve so as to pull the start of the
// histogram down, and possibly push the end up.
- Pwl gamma_curve = config_.gamma_curve;
- if (config_.ce_enable) {
- if (config_.lo_max != 0 || config_.hi_max != 0)
- gamma_curve = compute_stretch_curve(histogram, config_)
- .Compose(gamma_curve);
+ Pwl gammaCurve = config_.gammaCurve;
+ if (config_.ceEnable) {
+ if (config_.loMax != 0 || config_.hiMax != 0)
+ gammaCurve = computeStretchCurve(histogram, config_).compose(gammaCurve);
// We could apply other adjustments (e.g. partial equalisation)
// based on the histogram...?
}
// 2. Finally apply any manually selected brightness/contrast
// adjustment.
if (brightness_ != 0 || contrast_ != 1.0)
- gamma_curve = apply_manual_contrast(gamma_curve, brightness_,
- contrast_);
+ gammaCurve = applyManualContrast(gammaCurve, brightness_, contrast_);
// And fill in the status for output. Use more points towards the bottom
// of the curve.
ContrastStatus status;
- fill_in_status(status, brightness_, contrast_, gamma_curve);
+ fillInStatus(status, brightness_, contrast_, gammaCurve);
{
std::unique_lock<std::mutex> lock(mutex_);
status_ = status;
@@ -178,8 +174,8 @@ void Contrast::Process(StatisticsPtr &stats,
}
// Register algorithm with the system.
-static Algorithm *Create(Controller *controller)
+static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Contrast(controller);
}
-static RegisterAlgorithm reg(NAME, &Create);
+static RegisterAlgorithm reg(NAME, &create);
diff --git a/src/ipa/raspberrypi/controller/rpi/contrast.hpp b/src/ipa/raspberrypi/controller/rpi/contrast.hpp
index 85624539..5a6d530f 100644
--- a/src/ipa/raspberrypi/controller/rpi/contrast.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/contrast.hpp
@@ -17,27 +17,27 @@ namespace RPiController {
// Back End AWB.
struct ContrastConfig {
- bool ce_enable;
- double lo_histogram;
- double lo_level;
- double lo_max;
- double hi_histogram;
- double hi_level;
- double hi_max;
- Pwl gamma_curve;
+ bool ceEnable;
+ double loHistogram;
+ double loLevel;
+ double loMax;
+ double hiHistogram;
+ double hiLevel;
+ double hiMax;
+ Pwl gammaCurve;
};
class Contrast : public ContrastAlgorithm
{
public:
Contrast(Controller *controller = NULL);
- char const *Name() const override;
- void Read(boost::property_tree::ptree const &params) override;
- void SetBrightness(double brightness) override;
- void SetContrast(double contrast) override;
- void Initialise() override;
- void Prepare(Metadata *image_metadata) override;
- void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
+ char const *name() const override;
+ void read(boost::property_tree::ptree const &params) override;
+ void setBrightness(double brightness) override;
+ void setContrast(double contrast) override;
+ void initialise() override;
+ void prepare(Metadata *imageMetadata) override;
+ void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
private:
ContrastConfig config_;
diff --git a/src/ipa/raspberrypi/controller/rpi/dpc.cpp b/src/ipa/raspberrypi/controller/rpi/dpc.cpp
index 110f5056..42154cf3 100644
--- a/src/ipa/raspberrypi/controller/rpi/dpc.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/dpc.cpp
@@ -24,30 +24,30 @@ Dpc::Dpc(Controller *controller)
{
}
-char const *Dpc::Name() const
+char const *Dpc::name() const
{
return NAME;
}
-void Dpc::Read(boost::property_tree::ptree const &params)
+void Dpc::read(boost::property_tree::ptree const &params)
{
config_.strength = params.get<int>("strength", 1);
if (config_.strength < 0 || config_.strength > 2)
throw std::runtime_error("Dpc: bad strength value");
}
-void Dpc::Prepare(Metadata *image_metadata)
+void Dpc::prepare(Metadata *imageMetadata)
{
- DpcStatus dpc_status = {};
+ DpcStatus dpcStatus = {};
// Should we vary this with lux level or analogue gain? TBD.
- dpc_status.strength = config_.strength;
- LOG(RPiDpc, Debug) << "strength " << dpc_status.strength;
- image_metadata->Set("dpc.status", dpc_status);
+ dpcStatus.strength = config_.strength;
+ LOG(RPiDpc, Debug) << "strength " << dpcStatus.strength;
+ imageMetadata->set("dpc.status", dpcStatus);
}
// Register algorithm with the system.
-static Algorithm *Create(Controller *controller)
+static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Dpc(controller);
}
-static RegisterAlgorithm reg(NAME, &Create);
+static RegisterAlgorithm reg(NAME, &create);
diff --git a/src/ipa/raspberrypi/controller/rpi/dpc.hpp b/src/ipa/raspberrypi/controller/rpi/dpc.hpp
index d90285c4..039310cc 100644
--- a/src/ipa/raspberrypi/controller/rpi/dpc.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/dpc.hpp
@@ -21,9 +21,9 @@ class Dpc : public Algorithm
{
public:
Dpc(Controller *controller);
- char const *Name() const override;
- void Read(boost::property_tree::ptree const &params) override;
- void Prepare(Metadata *image_metadata) override;
+ char const *name() const override;
+ void read(boost::property_tree::ptree const &params) override;
+ void prepare(Metadata *imageMetadata) override;
private:
DpcConfig config_;
diff --git a/src/ipa/raspberrypi/controller/rpi/focus.cpp b/src/ipa/raspberrypi/controller/rpi/focus.cpp
index a87ec802..90f36e58 100644
--- a/src/ipa/raspberrypi/controller/rpi/focus.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/focus.cpp
@@ -23,28 +23,28 @@ Focus::Focus(Controller *controller)
{
}
-char const *Focus::Name() const
+char const *Focus::name() const
{
return NAME;
}
-void Focus::Process(StatisticsPtr &stats, Metadata *image_metadata)
+void Focus::process(StatisticsPtr &stats, Metadata *imageMetadata)
{
FocusStatus status;
unsigned int i;
for (i = 0; i < FOCUS_REGIONS; i++)
- status.focus_measures[i] = stats->focus_stats[i].contrast_val[1][1] / 1000;
+ status.focusMeasures[i] = stats->focus_stats[i].contrast_val[1][1] / 1000;
status.num = i;
- image_metadata->Set("focus.status", status);
+ imageMetadata->set("focus.status", status);
LOG(RPiFocus, Debug)
<< "Focus contrast measure: "
- << (status.focus_measures[5] + status.focus_measures[6]) / 10;
+ << (status.focusMeasures[5] + status.focusMeasures[6]) / 10;
}
/* Register algorithm with the system. */
-static Algorithm *Create(Controller *controller)
+static Algorithm *create(Controller *controller)
{
return new Focus(controller);
}
-static RegisterAlgorithm reg(NAME, &Create);
+static RegisterAlgorithm reg(NAME, &create);
diff --git a/src/ipa/raspberrypi/controller/rpi/focus.hpp b/src/ipa/raspberrypi/controller/rpi/focus.hpp
index 131b1d0f..a9207eb3 100644
--- a/src/ipa/raspberrypi/controller/rpi/focus.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/focus.hpp
@@ -21,8 +21,8 @@ class Focus : public Algorithm
{
public:
Focus(Controller *controller);
- char const *Name() const override;
- void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
+ char const *name() const override;
+ void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
};
} /* namespace RPiController */
diff --git a/src/ipa/raspberrypi/controller/rpi/geq.cpp b/src/ipa/raspberrypi/controller/rpi/geq.cpp
index 4530cb75..0da5efdf 100644
--- a/src/ipa/raspberrypi/controller/rpi/geq.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/geq.cpp
@@ -28,54 +28,52 @@ Geq::Geq(Controller *controller)
{
}
-char const *Geq::Name() const
+char const *Geq::name() const
{
return NAME;
}
-void Geq::Read(boost::property_tree::ptree const &params)
+void Geq::read(boost::property_tree::ptree const &params)
{
config_.offset = params.get<uint16_t>("offset", 0);
config_.slope = params.get<double>("slope", 0.0);
if (config_.slope < 0.0 || config_.slope >= 1.0)
throw std::runtime_error("Geq: bad slope value");
if (params.get_child_optional("strength"))
- config_.strength.Read(params.get_child("strength"));
+ config_.strength.read(params.get_child("strength"));
}
-void Geq::Prepare(Metadata *image_metadata)
+void Geq::prepare(Metadata *imageMetadata)
{
- LuxStatus lux_status = {};
- lux_status.lux = 400;
- if (image_metadata->Get("lux.status", lux_status))
+ LuxStatus luxStatus = {};
+ luxStatus.lux = 400;
+ if (imageMetadata->get("lux.status", luxStatus))
LOG(RPiGeq, Warning) << "no lux data found";
- DeviceStatus device_status;
- device_status.analogue_gain = 1.0; // in case not found
- if (image_metadata->Get("device.status", device_status))
+ DeviceStatus deviceStatus;
+ deviceStatus.analogueGain = 1.0; // in case not found
+ if (imageMetadata->get("device.status", deviceStatus))
LOG(RPiGeq, Warning)
<< "no device metadata - use analogue gain of 1x";
- GeqStatus geq_status = {};
- double strength =
- config_.strength.Empty()
+ GeqStatus geqStatus = {};
+ double strength = config_.strength.empty()
? 1.0
- : config_.strength.Eval(config_.strength.Domain().Clip(
- lux_status.lux));
- strength *= device_status.analogue_gain;
+ : config_.strength.eval(config_.strength.domain().clip(luxStatus.lux));
+ strength *= deviceStatus.analogueGain;
double offset = config_.offset * strength;
double slope = config_.slope * strength;
- geq_status.offset = std::min(65535.0, std::max(0.0, offset));
- geq_status.slope = std::min(.99999, std::max(0.0, slope));
+ geqStatus.offset = std::min(65535.0, std::max(0.0, offset));
+ geqStatus.slope = std::min(.99999, std::max(0.0, slope));
LOG(RPiGeq, Debug)
- << "offset " << geq_status.offset << " slope "
- << geq_status.slope << " (analogue gain "
- << device_status.analogue_gain << " lux "
- << lux_status.lux << ")";
- image_metadata->Set("geq.status", geq_status);
+ << "offset " << geqStatus.offset << " slope "
+ << geqStatus.slope << " (analogue gain "
+ << deviceStatus.analogueGain << " lux "
+ << luxStatus.lux << ")";
+ imageMetadata->set("geq.status", geqStatus);
}
// Register algorithm with the system.
-static Algorithm *Create(Controller *controller)
+static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Geq(controller);
}
-static RegisterAlgorithm reg(NAME, &Create);
+static RegisterAlgorithm reg(NAME, &create);
diff --git a/src/ipa/raspberrypi/controller/rpi/geq.hpp b/src/ipa/raspberrypi/controller/rpi/geq.hpp
index 8ba3046b..bdbc55b2 100644
--- a/src/ipa/raspberrypi/controller/rpi/geq.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/geq.hpp
@@ -23,9 +23,9 @@ class Geq : public Algorithm
{
public:
Geq(Controller *controller);
- char const *Name() const override;
- void Read(boost::property_tree::ptree const &params) override;
- void Prepare(Metadata *image_metadata) override;
+ char const *name() const override;
+ void read(boost::property_tree::ptree const &params) override;
+ void prepare(Metadata *imageMetadata) override;
private:
GeqConfig config_;
diff --git a/src/ipa/raspberrypi/controller/rpi/lux.cpp b/src/ipa/raspberrypi/controller/rpi/lux.cpp
index 4d145b6f..739a3d53 100644
--- a/src/ipa/raspberrypi/controller/rpi/lux.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/lux.cpp
@@ -31,74 +31,74 @@ Lux::Lux(Controller *controller)
status_.lux = 400;
}
-char const *Lux::Name() const
+char const *Lux::name() const
{
return NAME;
}
-void Lux::Read(boost::property_tree::ptree const &params)
+void Lux::read(boost::property_tree::ptree const &params)
{
- reference_shutter_speed_ =
+ referenceShutterSpeed_ =
params.get<double>("reference_shutter_speed") * 1.0us;
- reference_gain_ = params.get<double>("reference_gain");
- reference_aperture_ = params.get<double>("reference_aperture", 1.0);
- reference_Y_ = params.get<double>("reference_Y");
- reference_lux_ = params.get<double>("reference_lux");
- current_aperture_ = reference_aperture_;
+ referenceGain_ = params.get<double>("reference_gain");
+ referenceAperture_ = params.get<double>("reference_aperture", 1.0);
+ referenceY_ = params.get<double>("reference_Y");
+ referenceLux_ = params.get<double>("reference_lux");
+ currentAperture_ = referenceAperture_;
}
-void Lux::SetCurrentAperture(double aperture)
+void Lux::setCurrentAperture(double aperture)
{
- current_aperture_ = aperture;
+ currentAperture_ = aperture;
}
-void Lux::Prepare(Metadata *image_metadata)
+void Lux::prepare(Metadata *imageMetadata)
{
std::unique_lock<std::mutex> lock(mutex_);
- image_metadata->Set("lux.status", status_);
+ imageMetadata->set("lux.status", status_);
}
-void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
+void Lux::process(StatisticsPtr &stats, Metadata *imageMetadata)
{
- DeviceStatus device_status;
- if (image_metadata->Get("device.status", device_status) == 0) {
- double current_gain = device_status.analogue_gain;
- double current_aperture = device_status.aperture.value_or(current_aperture_);
+ DeviceStatus deviceStatus;
+ if (imageMetadata->get("device.status", deviceStatus) == 0) {
+ double currentGain = deviceStatus.analogueGain;
+ double currentAperture = deviceStatus.aperture.value_or(currentAperture_);
uint64_t sum = 0;
uint32_t num = 0;
uint32_t *bin = stats->hist[0].g_hist;
- const int num_bins = sizeof(stats->hist[0].g_hist) /
- sizeof(stats->hist[0].g_hist[0]);
- for (int i = 0; i < num_bins; i++)
+ const int numBins = sizeof(stats->hist[0].g_hist) /
+ sizeof(stats->hist[0].g_hist[0]);
+ for (int i = 0; i < numBins; i++)
sum += bin[i] * (uint64_t)i, num += bin[i];
// add .5 to reflect the mid-points of bins
- double current_Y = sum / (double)num + .5;
- double gain_ratio = reference_gain_ / current_gain;
- double shutter_speed_ratio =
- reference_shutter_speed_ / device_status.shutter_speed;
- double aperture_ratio = reference_aperture_ / current_aperture;
- double Y_ratio = current_Y * (65536 / num_bins) / reference_Y_;
- double estimated_lux = shutter_speed_ratio * gain_ratio *
- aperture_ratio * aperture_ratio *
- Y_ratio * reference_lux_;
+ double currentY = sum / (double)num + .5;
+ double gainRatio = referenceGain_ / currentGain;
+ double shutterSpeedRatio =
+ referenceShutterSpeed_ / deviceStatus.shutterSpeed;
+ double apertureRatio = referenceAperture_ / currentAperture;
+ double yRatio = currentY * (65536 / numBins) / referenceY_;
+ double estimatedLux = shutterSpeedRatio * gainRatio *
+ apertureRatio * apertureRatio *
+ yRatio * referenceLux_;
LuxStatus status;
- status.lux = estimated_lux;
- status.aperture = current_aperture;
- LOG(RPiLux, Debug) << ": estimated lux " << estimated_lux;
+ status.lux = estimatedLux;
+ status.aperture = currentAperture;
+ LOG(RPiLux, Debug) << ": estimated lux " << estimatedLux;
{
std::unique_lock<std::mutex> lock(mutex_);
status_ = status;
}
// Overwrite the metadata here as well, so that downstream
// algorithms get the latest value.
- image_metadata->Set("lux.status", status);
+ imageMetadata->set("lux.status", status);
} else
LOG(RPiLux, Warning) << ": no device metadata";
}
// Register algorithm with the system.
-static Algorithm *Create(Controller *controller)
+static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Lux(controller);
}
-static RegisterAlgorithm reg(NAME, &Create);
+static RegisterAlgorithm reg(NAME, &create);
diff --git a/src/ipa/raspberrypi/controller/rpi/lux.hpp b/src/ipa/raspberrypi/controller/rpi/lux.hpp
index 3ebd35d1..bd49a409 100644
--- a/src/ipa/raspberrypi/controller/rpi/lux.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/lux.hpp
@@ -21,21 +21,21 @@ class Lux : public Algorithm
{
public:
Lux(Controller *controller);
- char const *Name() const override;
- void Read(boost::property_tree::ptree const &params) override;
- void Prepare(Metadata *image_metadata) override;
- void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
- void SetCurrentAperture(double aperture);
+ char const *name() const override;
+ void read(boost::property_tree::ptree const &params) override;
+ void prepare(Metadata *imageMetadata) override;
+ void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
+ void setCurrentAperture(double aperture);
private:
// These values define the conditions of the reference image, against
// which we compare the new image.
- libcamera::utils::Duration reference_shutter_speed_;
- double reference_gain_;
- double reference_aperture_; // units of 1/f
- double reference_Y_; // out of 65536
- double reference_lux_;
- double current_aperture_;
+ libcamera::utils::Duration referenceShutterSpeed_;
+ double referenceGain_;
+ double referenceAperture_; // units of 1/f
+ double referenceY_; // out of 65536
+ double referenceLux_;
+ double currentAperture_;
LuxStatus status_;
std::mutex mutex_;
};
diff --git a/src/ipa/raspberrypi/controller/rpi/noise.cpp b/src/ipa/raspberrypi/controller/rpi/noise.cpp
index 63cad639..97b0fd05 100644
--- a/src/ipa/raspberrypi/controller/rpi/noise.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/noise.cpp
@@ -22,55 +22,55 @@ LOG_DEFINE_CATEGORY(RPiNoise)
#define NAME "rpi.noise"
Noise::Noise(Controller *controller)
- : Algorithm(controller), mode_factor_(1.0)
+ : Algorithm(controller), modeFactor_(1.0)
{
}
-char const *Noise::Name() const
+char const *Noise::name() const
{
return NAME;
}
-void Noise::SwitchMode(CameraMode const &camera_mode,
+void Noise::switchMode(CameraMode const &cameraMode,
[[maybe_unused]] Metadata *metadata)
{
// For example, we would expect a 2x2 binned mode to have a "noise
// factor" of sqrt(2x2) = 2. (can't be less than one, right?)
- mode_factor_ = std::max(1.0, camera_mode.noise_factor);
+ modeFactor_ = std::max(1.0, cameraMode.noiseFactor);
}
-void Noise::Read(boost::property_tree::ptree const &params)
+void Noise::read(boost::property_tree::ptree const &params)
{
- reference_constant_ = params.get<double>("reference_constant");
- reference_slope_ = params.get<double>("reference_slope");
+ referenceConstant_ = params.get<double>("reference_constant");
+ referenceSlope_ = params.get<double>("reference_slope");
}
-void Noise::Prepare(Metadata *image_metadata)
+void Noise::prepare(Metadata *imageMetadata)
{
- struct DeviceStatus device_status;
- device_status.analogue_gain = 1.0; // keep compiler calm
- if (image_metadata->Get("device.status", device_status) == 0) {
+ struct DeviceStatus deviceStatus;
+ deviceStatus.analogueGain = 1.0; // keep compiler calm
+ if (imageMetadata->get("device.status", deviceStatus) == 0) {
// There is a slight question as to exactly how the noise
// profile, specifically the constant part of it, scales. For
// now we assume it all scales the same, and we'll revisit this
// if it proves substantially wrong. NOTE: we may also want to
// make some adjustments based on the camera mode (such as
// binning), if we knew how to discover it...
- double factor = sqrt(device_status.analogue_gain) / mode_factor_;
+ double factor = sqrt(deviceStatus.analogueGain) / modeFactor_;
struct NoiseStatus status;
- status.noise_constant = reference_constant_ * factor;
- status.noise_slope = reference_slope_ * factor;
- image_metadata->Set("noise.status", status);
+ status.noiseConstant = referenceConstant_ * factor;
+ status.noiseSlope = referenceSlope_ * factor;
+ imageMetadata->set("noise.status", status);
LOG(RPiNoise, Debug)
- << "constant " << status.noise_constant
- << " slope " << status.noise_slope;
+ << "constant " << status.noiseConstant
+ << " slope " << status.noiseSlope;
} else
LOG(RPiNoise, Warning) << " no metadata";
}
// Register algorithm with the system.
-static Algorithm *Create(Controller *controller)
+static Algorithm *create(Controller *controller)
{
return new Noise(controller);
}
-static RegisterAlgorithm reg(NAME, &Create);
+static RegisterAlgorithm reg(NAME, &create);
diff --git a/src/ipa/raspberrypi/controller/rpi/noise.hpp b/src/ipa/raspberrypi/controller/rpi/noise.hpp
index 1c9de5c8..ed6ffe91 100644
--- a/src/ipa/raspberrypi/controller/rpi/noise.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/noise.hpp
@@ -17,16 +17,16 @@ class Noise : public Algorithm
{
public:
Noise(Controller *controller);
- char const *Name() const override;
- void SwitchMode(CameraMode const &camera_mode, Metadata *metadata) override;
- void Read(boost::property_tree::ptree const &params) override;
- void Prepare(Metadata *image_metadata) override;
+ char const *name() const override;
+ void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
+ void read(boost::property_tree::ptree const &params) override;
+ void prepare(Metadata *imageMetadata) override;
private:
// the noise profile for analogue gain of 1.0
- double reference_constant_;
- double reference_slope_;
- double mode_factor_;
+ double referenceConstant_;
+ double referenceSlope_;
+ double modeFactor_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/rpi/sdn.cpp b/src/ipa/raspberrypi/controller/rpi/sdn.cpp
index 93845509..480da38d 100644
--- a/src/ipa/raspberrypi/controller/rpi/sdn.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/sdn.cpp
@@ -27,49 +27,51 @@ Sdn::Sdn(Controller *controller)
{
}
-char const *Sdn::Name() const
+char const *Sdn::name() const
{
return NAME;
}
-void Sdn::Read(boost::property_tree::ptree const &params)
+void Sdn::read(boost::property_tree::ptree const &params)
{
deviation_ = params.get<double>("deviation", 3.2);
strength_ = params.get<double>("strength", 0.75);
}
-void Sdn::Initialise() {}
+void Sdn::initialise()
+{
+}
-void Sdn::Prepare(Metadata *image_metadata)
+void Sdn::prepare(Metadata *imageMetadata)
{
- struct NoiseStatus noise_status = {};
- noise_status.noise_slope = 3.0; // in case no metadata
- if (image_metadata->Get("noise.status", noise_status) != 0)
+ struct NoiseStatus noiseStatus = {};
+ noiseStatus.noiseSlope = 3.0; // in case no metadata
+ if (imageMetadata->get("noise.status", noiseStatus) != 0)
LOG(RPiSdn, Warning) << "no noise profile found";
LOG(RPiSdn, Debug)
- << "Noise profile: constant " << noise_status.noise_constant
- << " slope " << noise_status.noise_slope;
+ << "Noise profile: constant " << noiseStatus.noiseConstant
+ << " slope " << noiseStatus.noiseSlope;
struct DenoiseStatus status;
- status.noise_constant = noise_status.noise_constant * deviation_;
- status.noise_slope = noise_status.noise_slope * deviation_;
+ status.noiseConstant = noiseStatus.noiseConstant * deviation_;
+ status.noiseSlope = noiseStatus.noiseSlope * deviation_;
status.strength = strength_;
status.mode = static_cast<std::underlying_type_t<DenoiseMode>>(mode_);
- image_metadata->Set("denoise.status", status);
+ imageMetadata->set("denoise.status", status);
LOG(RPiSdn, Debug)
- << "programmed constant " << status.noise_constant
- << " slope " << status.noise_slope
+ << "programmed constant " << status.noiseConstant
+ << " slope " << status.noiseSlope
<< " strength " << status.strength;
}
-void Sdn::SetMode(DenoiseMode mode)
+void Sdn::setMode(DenoiseMode mode)
{
// We only distinguish between off and all other modes.
mode_ = mode;
}
// Register algorithm with the system.
-static Algorithm *Create(Controller *controller)
+static Algorithm *create(Controller *controller)
{
return (Algorithm *)new Sdn(controller);
}
-static RegisterAlgorithm reg(NAME, &Create);
+static RegisterAlgorithm reg(NAME, &create);
diff --git a/src/ipa/raspberrypi/controller/rpi/sdn.hpp b/src/ipa/raspberrypi/controller/rpi/sdn.hpp
index 2371ce04..d9b18f29 100644
--- a/src/ipa/raspberrypi/controller/rpi/sdn.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/sdn.hpp
@@ -17,11 +17,11 @@ class Sdn : public DenoiseAlgorithm
{
public:
Sdn(Controller *controller = NULL);
- char const *Name() const override;
- void Read(boost::property_tree::ptree const &params) override;
- void Initialise() override;
- void Prepare(Metadata *image_metadata) override;
- void SetMode(DenoiseMode mode) override;
+ char const *name() const override;
+ void read(boost::property_tree::ptree const &params) override;
+ void initialise() override;
+ void prepare(Metadata *imageMetadata) override;
+ void setMode(DenoiseMode mode) override;
private:
double deviation_;
diff --git a/src/ipa/raspberrypi/controller/rpi/sharpen.cpp b/src/ipa/raspberrypi/controller/rpi/sharpen.cpp
index 18825a43..3fe62bc8 100644
--- a/src/ipa/raspberrypi/controller/rpi/sharpen.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/sharpen.cpp
@@ -21,23 +21,23 @@ LOG_DEFINE_CATEGORY(RPiSharpen)
#define NAME "rpi.sharpen"
Sharpen::Sharpen(Controller *controller)
- : SharpenAlgorithm(controller), user_strength_(1.0)
+ : SharpenAlgorithm(controller), userStrength_(1.0)
{
}
-char const *Sharpen::Name() const
+char const *Sharpen::name() const
{
return NAME;
}
-void Sharpen::SwitchMode(CameraMode const &camera_mode,
+void Sharpen::switchMode(CameraMode const &cameraMode,
[[maybe_unused]] Metadata *metadata)
{
// can't be less than one, right?
- mode_factor_ = std::max(1.0, camera_mode.noise_factor);
+ modeFactor_ = std::max(1.0, cameraMode.noiseFactor);
}
-void Sharpen::Read(boost::property_tree::ptree const &params)
+void Sharpen::read(boost::property_tree::ptree const &params)
{
threshold_ = params.get<double>("threshold", 1.0);
strength_ = params.get<double>("strength", 1.0);
@@ -48,38 +48,38 @@ void Sharpen::Read(boost::property_tree::ptree const &params)
<< " limit " << limit_;
}
-void Sharpen::SetStrength(double strength)
+void Sharpen::setStrength(double strength)
{
// Note that this function is how an application sets the overall
// sharpening "strength". We call this the "user strength" field
// as there already is a strength_ field - being an internal gain
// parameter that gets passed to the ISP control code. Negative
// values are not allowed - coerce them to zero (no sharpening).
- user_strength_ = std::max(0.0, strength);
+ userStrength_ = std::max(0.0, strength);
}
-void Sharpen::Prepare(Metadata *image_metadata)
+void Sharpen::prepare(Metadata *imageMetadata)
{
- // The user_strength_ affects the algorithm's internal gain directly, but
+ // The userStrength_ affects the algorithm's internal gain directly, but
// we adjust the limit and threshold less aggressively. Using a sqrt
// function is an arbitrary but gentle way of accomplishing this.
- double user_strength_sqrt = sqrt(user_strength_);
+ double userStrengthSqrt = sqrt(userStrength_);
struct SharpenStatus status;
// Binned modes seem to need the sharpening toned down with this
- // pipeline, thus we use the mode_factor here. Also avoid
- // divide-by-zero with the user_strength_sqrt.
- status.threshold = threshold_ * mode_factor_ /
- std::max(0.01, user_strength_sqrt);
- status.strength = strength_ / mode_factor_ * user_strength_;
- status.limit = limit_ / mode_factor_ * user_strength_sqrt;
- // Finally, report any application-supplied parameters that were used.
- status.user_strength = user_strength_;
- image_metadata->Set("sharpen.status", status);
+ // pipeline, thus we use the modeFactor_ here. Also avoid
+ // divide-by-zero with the userStrengthSqrt.
+ status.threshold = threshold_ * modeFactor_ /
+ std::max(0.01, userStrengthSqrt);
+ status.strength = strength_ / modeFactor_ * userStrength_;
+ status.limit = limit_ / modeFactor_ * userStrengthSqrt;
+ /* Finally, report any application-supplied parameters that were used. */
+ status.userStrength = userStrength_;
+ imageMetadata->set("sharpen.status", status);
}
// Register algorithm with the system.
-static Algorithm *Create(Controller *controller)
+static Algorithm *create(Controller *controller)
{
return new Sharpen(controller);
}
-static RegisterAlgorithm reg(NAME, &Create);
+static RegisterAlgorithm reg(NAME, &create);
diff --git a/src/ipa/raspberrypi/controller/rpi/sharpen.hpp b/src/ipa/raspberrypi/controller/rpi/sharpen.hpp
index 13a076a8..ced917f3 100644
--- a/src/ipa/raspberrypi/controller/rpi/sharpen.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/sharpen.hpp
@@ -17,18 +17,18 @@ class Sharpen : public SharpenAlgorithm
{
public:
Sharpen(Controller *controller);
- char const *Name() const override;
- void SwitchMode(CameraMode const &camera_mode, Metadata *metadata) override;
- void Read(boost::property_tree::ptree const &params) override;
- void SetStrength(double strength) override;
- void Prepare(Metadata *image_metadata) override;
+ char const *name() const override;
+ void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
+ void read(boost::property_tree::ptree const &params) override;
+ void setStrength(double strength) override;
+ void prepare(Metadata *imageMetadata) override;
private:
double threshold_;
double strength_;
double limit_;
- double mode_factor_;
- double user_strength_;
+ double modeFactor_;
+ double userStrength_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/sharpen_algorithm.hpp b/src/ipa/raspberrypi/controller/sharpen_algorithm.hpp
index ca800308..888f4569 100644
--- a/src/ipa/raspberrypi/controller/sharpen_algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/sharpen_algorithm.hpp
@@ -15,7 +15,7 @@ class SharpenAlgorithm : public Algorithm
public:
SharpenAlgorithm(Controller *controller) : Algorithm(controller) {}
// A sharpness control algorithm must provide the following:
- virtual void SetStrength(double strength) = 0;
+ virtual void setStrength(double strength) = 0;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/sharpen_status.h b/src/ipa/raspberrypi/controller/sharpen_status.h
index 7501b191..2b049074 100644
--- a/src/ipa/raspberrypi/controller/sharpen_status.h
+++ b/src/ipa/raspberrypi/controller/sharpen_status.h
@@ -20,7 +20,7 @@ struct SharpenStatus {
// upper limit of the allowed sharpening response
double limit;
// The sharpening strength requested by the user or application.
- double user_strength;
+ double userStrength;
};
#ifdef __cplusplus
diff --git a/src/ipa/raspberrypi/md_parser.hpp b/src/ipa/raspberrypi/md_parser.hpp
index d32d0f54..298d5181 100644
--- a/src/ipa/raspberrypi/md_parser.hpp
+++ b/src/ipa/raspberrypi/md_parser.hpp
@@ -75,40 +75,40 @@ public:
};
MdParser()
- : reset_(true), bits_per_pixel_(0), num_lines_(0), line_length_bytes_(0)
+ : reset_(true), bitsPerPixel_(0), numLines_(0), lineLengthBytes_(0)
{
}
virtual ~MdParser() = default;
- void Reset()
+ void reset()
{
reset_ = true;
}
- void SetBitsPerPixel(int bpp)
+ void setBitsPerPixel(int bpp)
{
- bits_per_pixel_ = bpp;
+ bitsPerPixel_ = bpp;
}
- void SetNumLines(unsigned int num_lines)
+ void setNumLines(unsigned int numLines)
{
- num_lines_ = num_lines;
+ numLines_ = numLines;
}
- void SetLineLengthBytes(unsigned int num_bytes)
+ void setLineLengthBytes(unsigned int numBytes)
{
- line_length_bytes_ = num_bytes;
+ lineLengthBytes_ = numBytes;
}
- virtual Status Parse(libcamera::Span<const uint8_t> buffer,
+ virtual Status parse(libcamera::Span<const uint8_t> buffer,
RegisterMap &registers) = 0;
protected:
bool reset_;
- int bits_per_pixel_;
- unsigned int num_lines_;
- unsigned int line_length_bytes_;
+ int bitsPerPixel_;
+ unsigned int numLines_;
+ unsigned int lineLengthBytes_;
};
/*
@@ -123,7 +123,7 @@ class MdParserSmia final : public MdParser
public:
MdParserSmia(std::initializer_list<uint32_t> registerList);
- MdParser::Status Parse(libcamera::Span<const uint8_t> buffer,
+ MdParser::Status parse(libcamera::Span<const uint8_t> buffer,
RegisterMap &registers) override;
private:
@@ -133,18 +133,18 @@ private:
/*
* Note that error codes > 0 are regarded as non-fatal; codes < 0
* indicate a bad data buffer. Status codes are:
- * PARSE_OK - found all registers, much happiness
- * MISSING_REGS - some registers found; should this be a hard error?
+ * ParseOk - found all registers, much happiness
+ * MissingRegs - some registers found; should this be a hard error?
* The remaining codes are all hard errors.
*/
enum ParseStatus {
- PARSE_OK = 0,
- MISSING_REGS = 1,
- NO_LINE_START = -1,
- ILLEGAL_TAG = -2,
- BAD_DUMMY = -3,
- BAD_LINE_END = -4,
- BAD_PADDING = -5
+ ParseOk = 0,
+ MissingRegs = 1,
+ NoLineStart = -1,
+ IllegalTag = -2,
+ BadDummy = -3,
+ BadLineEnd = -4,
+ BadPadding = -5
};
ParseStatus findRegs(libcamera::Span<const uint8_t> buffer);
diff --git a/src/ipa/raspberrypi/md_parser_smia.cpp b/src/ipa/raspberrypi/md_parser_smia.cpp
index ea5eac41..10d677fb 100644
--- a/src/ipa/raspberrypi/md_parser_smia.cpp
+++ b/src/ipa/raspberrypi/md_parser_smia.cpp
@@ -20,12 +20,12 @@ using namespace libcamera;
* sensors, I think.
*/
-constexpr unsigned int LINE_START = 0x0a;
-constexpr unsigned int LINE_END_TAG = 0x07;
-constexpr unsigned int REG_HI_BITS = 0xaa;
-constexpr unsigned int REG_LOW_BITS = 0xa5;
-constexpr unsigned int REG_VALUE = 0x5a;
-constexpr unsigned int REG_SKIP = 0x55;
+constexpr unsigned int LineStart = 0x0a;
+constexpr unsigned int LineEndTag = 0x07;
+constexpr unsigned int RegHiBits = 0xaa;
+constexpr unsigned int RegLowBits = 0xa5;
+constexpr unsigned int RegValue = 0x5a;
+constexpr unsigned int RegSkip = 0x55;
MdParserSmia::MdParserSmia(std::initializer_list<uint32_t> registerList)
{
@@ -33,7 +33,7 @@ MdParserSmia::MdParserSmia(std::initializer_list<uint32_t> registerList)
offsets_[r] = {};
}
-MdParser::Status MdParserSmia::Parse(libcamera::Span<const uint8_t> buffer,
+MdParser::Status MdParserSmia::parse(libcamera::Span<const uint8_t> buffer,
RegisterMap &registers)
{
if (reset_) {
@@ -41,7 +41,7 @@ MdParser::Status MdParserSmia::Parse(libcamera::Span<const uint8_t> buffer,
* Search again through the metadata for all the registers
* requested.
*/
- ASSERT(bits_per_pixel_);
+ ASSERT(bitsPerPixel_);
for (const auto &kv : offsets_)
offsets_[kv.first] = {};
@@ -53,7 +53,7 @@ MdParser::Status MdParserSmia::Parse(libcamera::Span<const uint8_t> buffer,
*
* In either case, we retry parsing on the next frame.
*/
- if (ret != PARSE_OK)
+ if (ret != ParseOk)
return ERROR;
reset_ = false;
@@ -76,74 +76,74 @@ MdParserSmia::ParseStatus MdParserSmia::findRegs(libcamera::Span<const uint8_t>
{
ASSERT(offsets_.size());
- if (buffer[0] != LINE_START)
- return NO_LINE_START;
+ if (buffer[0] != LineStart)
+ return NoLineStart;
- unsigned int current_offset = 1; /* after the LINE_START */
- unsigned int current_line_start = 0, current_line = 0;
- unsigned int reg_num = 0, regs_done = 0;
+ unsigned int currentOffset = 1; /* after the LineStart */
+ unsigned int currentLineStart = 0, currentLine = 0;
+ unsigned int regNum = 0, regsDone = 0;
while (1) {
- int tag = buffer[current_offset++];
-
- if ((bits_per_pixel_ == 10 &&
- (current_offset + 1 - current_line_start) % 5 == 0) ||
- (bits_per_pixel_ == 12 &&
- (current_offset + 1 - current_line_start) % 3 == 0)) {
- if (buffer[current_offset++] != REG_SKIP)
- return BAD_DUMMY;
+ int tag = buffer[currentOffset++];
+
+ if ((bitsPerPixel_ == 10 &&
+ (currentOffset + 1 - currentLineStart) % 5 == 0) ||
+ (bitsPerPixel_ == 12 &&
+ (currentOffset + 1 - currentLineStart) % 3 == 0)) {
+ if (buffer[currentOffset++] != RegSkip)
+ return BadDummy;
}
- int data_byte = buffer[current_offset++];
+ int dataByte = buffer[currentOffset++];
- if (tag == LINE_END_TAG) {
- if (data_byte != LINE_END_TAG)
- return BAD_LINE_END;
+ if (tag == LineEndTag) {
+ if (dataByte != LineEndTag)
+ return BadLineEnd;
- if (num_lines_ && ++current_line == num_lines_)
- return MISSING_REGS;
+ if (numLines_ && ++currentLine == numLines_)
+ return MissingRegs;
- if (line_length_bytes_) {
- current_offset = current_line_start + line_length_bytes_;
+ if (lineLengthBytes_) {
+ currentOffset = currentLineStart + lineLengthBytes_;
/* Require whole line to be in the buffer (if buffer size set). */
if (buffer.size() &&
- current_offset + line_length_bytes_ > buffer.size())
- return MISSING_REGS;
+ currentOffset + lineLengthBytes_ > buffer.size())
+ return MissingRegs;
- if (buffer[current_offset] != LINE_START)
- return NO_LINE_START;
+ if (buffer[currentOffset] != LineStart)
+ return NoLineStart;
} else {
/* allow a zero line length to mean "hunt for the next line" */
- while (current_offset < buffer.size() &&
- buffer[current_offset] != LINE_START)
- current_offset++;
+ while (currentOffset < buffer.size() &&
+ buffer[currentOffset] != LineStart)
+ currentOffset++;
- if (current_offset == buffer.size())
- return NO_LINE_START;
+ if (currentOffset == buffer.size())
+ return NoLineStart;
}
- /* inc current_offset to after LINE_START */
- current_line_start = current_offset++;
+ /* inc currentOffset to after LineStart */
+ currentLineStart = currentOffset++;
} else {
- if (tag == REG_HI_BITS)
- reg_num = (reg_num & 0xff) | (data_byte << 8);
- else if (tag == REG_LOW_BITS)
- reg_num = (reg_num & 0xff00) | data_byte;
- else if (tag == REG_SKIP)
- reg_num++;
- else if (tag == REG_VALUE) {
- auto reg = offsets_.find(reg_num);
+ if (tag == RegHiBits)
+ regNum = (regNum & 0xff) | (dataByte << 8);
+ else if (tag == RegLowBits)
+ regNum = (regNum & 0xff00) | dataByte;
+ else if (tag == RegSkip)
+ regNum++;
+ else if (tag == RegValue) {
+ auto reg = offsets_.find(regNum);
if (reg != offsets_.end()) {
- offsets_[reg_num] = current_offset - 1;
+ offsets_[regNum] = currentOffset - 1;
- if (++regs_done == offsets_.size())
- return PARSE_OK;
+ if (++regsDone == offsets_.size())
+ return ParseOk;
}
- reg_num++;
+ regNum++;
} else
- return ILLEGAL_TAG;
+ return IllegalTag;
}
}
}
diff --git a/src/ipa/raspberrypi/raspberrypi.cpp b/src/ipa/raspberrypi/raspberrypi.cpp
index c7492a77..f315ebcd 100644
--- a/src/ipa/raspberrypi/raspberrypi.cpp
+++ b/src/ipa/raspberrypi/raspberrypi.cpp
@@ -208,7 +208,7 @@ int IPARPi::init(const IPASettings &settings, IPAInitResult *result)
* that the kernel driver doesn't. We only do this the first time; we don't need
* to re-parse the metadata after a simple mode-switch for no reason.
*/
- helper_ = std::unique_ptr<RPiController::CamHelper>(RPiController::CamHelper::Create(settings.sensorModel));
+ helper_ = std::unique_ptr<RPiController::CamHelper>(RPiController::CamHelper::create(settings.sensorModel));
if (!helper_) {
LOG(IPARPI, Error) << "Could not create camera helper for "
<< settings.sensorModel;
@@ -220,8 +220,8 @@ int IPARPi::init(const IPASettings &settings, IPAInitResult *result)
* to setup the staggered writer class.
*/
int gainDelay, exposureDelay, vblankDelay, sensorMetadata;
- helper_->GetDelays(exposureDelay, gainDelay, vblankDelay);
- sensorMetadata = helper_->SensorEmbeddedDataPresent();
+ helper_->getDelays(exposureDelay, gainDelay, vblankDelay);
+ sensorMetadata = helper_->sensorEmbeddedDataPresent();
result->sensorConfig.gainDelay = gainDelay;
result->sensorConfig.exposureDelay = exposureDelay;
@@ -229,8 +229,8 @@ int IPARPi::init(const IPASettings &settings, IPAInitResult *result)
result->sensorConfig.sensorMetadata = sensorMetadata;
/* Load the tuning file for this sensor. */
- controller_.Read(settings.configurationFile.c_str());
- controller_.Initialise();
+ controller_.read(settings.configurationFile.c_str());
+ controller_.initialise();
/* Return the controls handled by the IPA */
ControlInfoMap::Map ctrlMap = ipaControls;
@@ -249,15 +249,15 @@ void IPARPi::start(const ControlList &controls, StartConfig *startConfig)
queueRequest(controls);
}
- controller_.SwitchMode(mode_, &metadata);
+ controller_.switchMode(mode_, &metadata);
/* SwitchMode may supply updated exposure/gain values to use. */
AgcStatus agcStatus;
- agcStatus.shutter_time = 0.0s;
- agcStatus.analogue_gain = 0.0;
+ agcStatus.shutterTime = 0.0s;
+ agcStatus.analogueGain = 0.0;
- metadata.Get("agc.status", agcStatus);
- if (agcStatus.shutter_time && agcStatus.analogue_gain) {
+ metadata.get("agc.status", agcStatus);
+ if (agcStatus.shutterTime && agcStatus.analogueGain) {
ControlList ctrls(sensorCtrls_);
applyAGC(&agcStatus, ctrls);
startConfig->controls = std::move(ctrls);
@@ -271,8 +271,8 @@ void IPARPi::start(const ControlList &controls, StartConfig *startConfig)
frameCount_ = 0;
checkCount_ = 0;
if (firstStart_) {
- dropFrameCount_ = helper_->HideFramesStartup();
- mistrustCount_ = helper_->MistrustFramesStartup();
+ dropFrameCount_ = helper_->hideFramesStartup();
+ mistrustCount_ = helper_->mistrustFramesStartup();
/*
* Query the AGC/AWB for how many frames they may take to
@@ -283,18 +283,18 @@ void IPARPi::start(const ControlList &controls, StartConfig *startConfig)
*/
unsigned int agcConvergenceFrames = 0;
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
- controller_.GetAlgorithm("agc"));
+ controller_.getAlgorithm("agc"));
if (agc) {
- agcConvergenceFrames = agc->GetConvergenceFrames();
+ agcConvergenceFrames = agc->getConvergenceFrames();
if (agcConvergenceFrames)
agcConvergenceFrames += mistrustCount_;
}
unsigned int awbConvergenceFrames = 0;
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
- controller_.GetAlgorithm("awb"));
+ controller_.getAlgorithm("awb"));
if (awb) {
- awbConvergenceFrames = awb->GetConvergenceFrames();
+ awbConvergenceFrames = awb->getConvergenceFrames();
if (awbConvergenceFrames)
awbConvergenceFrames += mistrustCount_;
}
@@ -302,12 +302,12 @@ void IPARPi::start(const ControlList &controls, StartConfig *startConfig)
dropFrameCount_ = std::max({ dropFrameCount_, agcConvergenceFrames, awbConvergenceFrames });
LOG(IPARPI, Debug) << "Drop " << dropFrameCount_ << " frames on startup";
} else {
- dropFrameCount_ = helper_->HideFramesModeSwitch();
- mistrustCount_ = helper_->MistrustFramesModeSwitch();
+ dropFrameCount_ = helper_->hideFramesModeSwitch();
+ mistrustCount_ = helper_->mistrustFramesModeSwitch();
}
startConfig->dropFrameCount = dropFrameCount_;
- const Duration maxSensorFrameDuration = mode_.max_frame_length * mode_.line_length;
+ const Duration maxSensorFrameDuration = mode_.maxFrameLength * mode_.lineLength;
startConfig->maxSensorFrameLengthMs = maxSensorFrameDuration.get<std::milli>();
firstStart_ = false;
@@ -319,17 +319,17 @@ void IPARPi::setMode(const IPACameraSensorInfo &sensorInfo)
mode_.bitdepth = sensorInfo.bitsPerPixel;
mode_.width = sensorInfo.outputSize.width;
mode_.height = sensorInfo.outputSize.height;
- mode_.sensor_width = sensorInfo.activeAreaSize.width;
- mode_.sensor_height = sensorInfo.activeAreaSize.height;
- mode_.crop_x = sensorInfo.analogCrop.x;
- mode_.crop_y = sensorInfo.analogCrop.y;
+ mode_.sensorWidth = sensorInfo.activeAreaSize.width;
+ mode_.sensorHeight = sensorInfo.activeAreaSize.height;
+ mode_.cropX = sensorInfo.analogCrop.x;
+ mode_.cropY = sensorInfo.analogCrop.y;
/*
* Calculate scaling parameters. The scale_[xy] factors are determined
* by the ratio between the crop rectangle size and the output size.
*/
- mode_.scale_x = sensorInfo.analogCrop.width / sensorInfo.outputSize.width;
- mode_.scale_y = sensorInfo.analogCrop.height / sensorInfo.outputSize.height;
+ mode_.scaleX = sensorInfo.analogCrop.width / sensorInfo.outputSize.width;
+ mode_.scaleY = sensorInfo.analogCrop.height / sensorInfo.outputSize.height;
/*
* We're not told by the pipeline handler how scaling is split between
@@ -339,30 +339,30 @@ void IPARPi::setMode(const IPACameraSensorInfo &sensorInfo)
*
* \todo Get the pipeline handle to provide the full data
*/
- mode_.bin_x = std::min(2, static_cast<int>(mode_.scale_x));
- mode_.bin_y = std::min(2, static_cast<int>(mode_.scale_y));
+ mode_.binX = std::min(2, static_cast<int>(mode_.scaleX));
+ mode_.binY = std::min(2, static_cast<int>(mode_.scaleY));
/* The noise factor is the square root of the total binning factor. */
- mode_.noise_factor = sqrt(mode_.bin_x * mode_.bin_y);
+ mode_.noiseFactor = sqrt(mode_.binX * mode_.binY);
/*
* Calculate the line length as the ratio between the line length in
* pixels and the pixel rate.
*/
- mode_.line_length = sensorInfo.lineLength * (1.0s / sensorInfo.pixelRate);
+ mode_.lineLength = sensorInfo.lineLength * (1.0s / sensorInfo.pixelRate);
/*
* Set the frame length limits for the mode to ensure exposure and
* framerate calculations are clipped appropriately.
*/
- mode_.min_frame_length = sensorInfo.minFrameLength;
- mode_.max_frame_length = sensorInfo.maxFrameLength;
+ mode_.minFrameLength = sensorInfo.minFrameLength;
+ mode_.maxFrameLength = sensorInfo.maxFrameLength;
/*
* Some sensors may have different sensitivities in different modes;
* the CamHelper will know the correct value.
*/
- mode_.sensitivity = helper_->GetModeSensitivity(mode_);
+ mode_.sensitivity = helper_->getModeSensitivity(mode_);
}
int IPARPi::configure(const IPACameraSensorInfo &sensorInfo,
@@ -421,7 +421,7 @@ int IPARPi::configure(const IPACameraSensorInfo &sensorInfo,
}
/* Pass the camera mode to the CamHelper to setup algorithms. */
- helper_->SetCameraMode(mode_);
+ helper_->setCameraMode(mode_);
/*
* Initialise this ControlList correctly, even if empty, in case the IPA is
@@ -438,8 +438,8 @@ int IPARPi::configure(const IPACameraSensorInfo &sensorInfo,
/* Supply initial values for gain and exposure. */
AgcStatus agcStatus;
- agcStatus.shutter_time = defaultExposureTime;
- agcStatus.analogue_gain = defaultAnalogueGain;
+ agcStatus.shutterTime = defaultExposureTime;
+ agcStatus.analogueGain = defaultAnalogueGain;
applyAGC(&agcStatus, ctrls);
}
@@ -451,25 +451,25 @@ int IPARPi::configure(const IPACameraSensorInfo &sensorInfo,
* based on the current sensor mode.
*/
ControlInfoMap::Map ctrlMap = ipaControls;
- const Duration minSensorFrameDuration = mode_.min_frame_length * mode_.line_length;
- const Duration maxSensorFrameDuration = mode_.max_frame_length * mode_.line_length;
+ const Duration minSensorFrameDuration = mode_.minFrameLength * mode_.lineLength;
+ const Duration maxSensorFrameDuration = mode_.maxFrameLength * mode_.lineLength;
ctrlMap[&controls::FrameDurationLimits] =
ControlInfo(static_cast<int64_t>(minSensorFrameDuration.get<std::micro>()),
static_cast<int64_t>(maxSensorFrameDuration.get<std::micro>()));
ctrlMap[&controls::AnalogueGain] =
- ControlInfo(1.0f, static_cast<float>(helper_->Gain(maxSensorGainCode_)));
+ ControlInfo(1.0f, static_cast<float>(helper_->gain(maxSensorGainCode_)));
/*
* Calculate the max exposure limit from the frame duration limit as V4L2
* will limit the maximum control value based on the current VBLANK value.
*/
Duration maxShutter = Duration::max();
- helper_->GetVBlanking(maxShutter, minSensorFrameDuration, maxSensorFrameDuration);
+ helper_->getVBlanking(maxShutter, minSensorFrameDuration, maxSensorFrameDuration);
const uint32_t exposureMin = sensorCtrls_.at(V4L2_CID_EXPOSURE).min().get<int32_t>();
ctrlMap[&controls::ExposureTime] =
- ControlInfo(static_cast<int32_t>(helper_->Exposure(exposureMin).get<std::micro>()),
+ ControlInfo(static_cast<int32_t>(helper_->exposure(exposureMin).get<std::micro>()),
static_cast<int32_t>(maxShutter.get<std::micro>()));
result->controlInfo = ControlInfoMap(std::move(ctrlMap), controls::controls);
@@ -536,54 +536,54 @@ void IPARPi::reportMetadata()
* processed can be extracted and placed into the libcamera metadata
* buffer, where an application could query it.
*/
- DeviceStatus *deviceStatus = rpiMetadata_.GetLocked<DeviceStatus>("device.status");
+ DeviceStatus *deviceStatus = rpiMetadata_.getLocked<DeviceStatus>("device.status");
if (deviceStatus) {
libcameraMetadata_.set(controls::ExposureTime,
- deviceStatus->shutter_speed.get<std::micro>());
- libcameraMetadata_.set(controls::AnalogueGain, deviceStatus->analogue_gain);
+ deviceStatus->shutterSpeed.get<std::micro>());
+ libcameraMetadata_.set(controls::AnalogueGain, deviceStatus->analogueGain);
libcameraMetadata_.set(controls::FrameDuration,
- helper_->Exposure(deviceStatus->frame_length).get<std::micro>());
- if (deviceStatus->sensor_temperature)
- libcameraMetadata_.set(controls::SensorTemperature, *deviceStatus->sensor_temperature);
+ helper_->exposure(deviceStatus->frameLength).get<std::micro>());
+ if (deviceStatus->sensorTemperature)
+ libcameraMetadata_.set(controls::SensorTemperature, *deviceStatus->sensorTemperature);
}
- AgcStatus *agcStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
+ AgcStatus *agcStatus = rpiMetadata_.getLocked<AgcStatus>("agc.status");
if (agcStatus) {
libcameraMetadata_.set(controls::AeLocked, agcStatus->locked);
- libcameraMetadata_.set(controls::DigitalGain, agcStatus->digital_gain);
+ libcameraMetadata_.set(controls::DigitalGain, agcStatus->digitalGain);
}
- LuxStatus *luxStatus = rpiMetadata_.GetLocked<LuxStatus>("lux.status");
+ LuxStatus *luxStatus = rpiMetadata_.getLocked<LuxStatus>("lux.status");
if (luxStatus)
libcameraMetadata_.set(controls::Lux, luxStatus->lux);
- AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
+ AwbStatus *awbStatus = rpiMetadata_.getLocked<AwbStatus>("awb.status");
if (awbStatus) {
- libcameraMetadata_.set(controls::ColourGains, { static_cast<float>(awbStatus->gain_r),
- static_cast<float>(awbStatus->gain_b) });
- libcameraMetadata_.set(controls::ColourTemperature, awbStatus->temperature_K);
+ libcameraMetadata_.set(controls::ColourGains, { static_cast<float>(awbStatus->gainR),
+ static_cast<float>(awbStatus->gainB) });
+ libcameraMetadata_.set(controls::ColourTemperature, awbStatus->temperatureK);
}
- BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
+ BlackLevelStatus *blackLevelStatus = rpiMetadata_.getLocked<BlackLevelStatus>("black_level.status");
if (blackLevelStatus)
libcameraMetadata_.set(controls::SensorBlackLevels,
- { static_cast<int32_t>(blackLevelStatus->black_level_r),
- static_cast<int32_t>(blackLevelStatus->black_level_g),
- static_cast<int32_t>(blackLevelStatus->black_level_g),
- static_cast<int32_t>(blackLevelStatus->black_level_b) });
+ { static_cast<int32_t>(blackLevelStatus->blackLevelR),
+ static_cast<int32_t>(blackLevelStatus->blackLevelG),
+ static_cast<int32_t>(blackLevelStatus->blackLevelG),
+ static_cast<int32_t>(blackLevelStatus->blackLevelB) });
- FocusStatus *focusStatus = rpiMetadata_.GetLocked<FocusStatus>("focus.status");
+ FocusStatus *focusStatus = rpiMetadata_.getLocked<FocusStatus>("focus.status");
if (focusStatus && focusStatus->num == 12) {
/*
* We get a 4x3 grid of regions by default. Calculate the average
* FoM over the central two positions to give an overall scene FoM.
* This can change later if it is not deemed suitable.
*/
- int32_t focusFoM = (focusStatus->focus_measures[5] + focusStatus->focus_measures[6]) / 2;
+ int32_t focusFoM = (focusStatus->focusMeasures[5] + focusStatus->focusMeasures[6]) / 2;
libcameraMetadata_.set(controls::FocusFoM, focusFoM);
}
- CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
+ CcmStatus *ccmStatus = rpiMetadata_.getLocked<CcmStatus>("ccm.status");
if (ccmStatus) {
float m[9];
for (unsigned int i = 0; i < 9; i++)
@@ -695,7 +695,7 @@ void IPARPi::queueRequest(const ControlList &controls)
switch (ctrl.first) {
case controls::AE_ENABLE: {
- RPiController::Algorithm *agc = controller_.GetAlgorithm("agc");
+ RPiController::Algorithm *agc = controller_.getAlgorithm("agc");
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AE_ENABLE - no AGC algorithm";
@@ -703,9 +703,9 @@ void IPARPi::queueRequest(const ControlList &controls)
}
if (ctrl.second.get<bool>() == false)
- agc->Pause();
+ agc->pause();
else
- agc->Resume();
+ agc->resume();
libcameraMetadata_.set(controls::AeEnable, ctrl.second.get<bool>());
break;
@@ -713,7 +713,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::EXPOSURE_TIME: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
- controller_.GetAlgorithm("agc"));
+ controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set EXPOSURE_TIME - no AGC algorithm";
@@ -721,7 +721,7 @@ void IPARPi::queueRequest(const ControlList &controls)
}
/* The control provides units of microseconds. */
- agc->SetFixedShutter(ctrl.second.get<int32_t>() * 1.0us);
+ agc->setFixedShutter(ctrl.second.get<int32_t>() * 1.0us);
libcameraMetadata_.set(controls::ExposureTime, ctrl.second.get<int32_t>());
break;
@@ -729,14 +729,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::ANALOGUE_GAIN: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
- controller_.GetAlgorithm("agc"));
+ controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set ANALOGUE_GAIN - no AGC algorithm";
break;
}
- agc->SetFixedAnalogueGain(ctrl.second.get<float>());
+ agc->setFixedAnalogueGain(ctrl.second.get<float>());
libcameraMetadata_.set(controls::AnalogueGain,
ctrl.second.get<float>());
@@ -745,7 +745,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AE_METERING_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
- controller_.GetAlgorithm("agc"));
+ controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AE_METERING_MODE - no AGC algorithm";
@@ -754,7 +754,7 @@ void IPARPi::queueRequest(const ControlList &controls)
int32_t idx = ctrl.second.get<int32_t>();
if (MeteringModeTable.count(idx)) {
- agc->SetMeteringMode(MeteringModeTable.at(idx));
+ agc->setMeteringMode(MeteringModeTable.at(idx));
libcameraMetadata_.set(controls::AeMeteringMode, idx);
} else {
LOG(IPARPI, Error) << "Metering mode " << idx
@@ -765,7 +765,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AE_CONSTRAINT_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
- controller_.GetAlgorithm("agc"));
+ controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AE_CONSTRAINT_MODE - no AGC algorithm";
@@ -774,7 +774,7 @@ void IPARPi::queueRequest(const ControlList &controls)
int32_t idx = ctrl.second.get<int32_t>();
if (ConstraintModeTable.count(idx)) {
- agc->SetConstraintMode(ConstraintModeTable.at(idx));
+ agc->setConstraintMode(ConstraintModeTable.at(idx));
libcameraMetadata_.set(controls::AeConstraintMode, idx);
} else {
LOG(IPARPI, Error) << "Constraint mode " << idx
@@ -785,7 +785,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AE_EXPOSURE_MODE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
- controller_.GetAlgorithm("agc"));
+ controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set AE_EXPOSURE_MODE - no AGC algorithm";
@@ -794,7 +794,7 @@ void IPARPi::queueRequest(const ControlList &controls)
int32_t idx = ctrl.second.get<int32_t>();
if (ExposureModeTable.count(idx)) {
- agc->SetExposureMode(ExposureModeTable.at(idx));
+ agc->setExposureMode(ExposureModeTable.at(idx));
libcameraMetadata_.set(controls::AeExposureMode, idx);
} else {
LOG(IPARPI, Error) << "Exposure mode " << idx
@@ -805,7 +805,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::EXPOSURE_VALUE: {
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
- controller_.GetAlgorithm("agc"));
+ controller_.getAlgorithm("agc"));
if (!agc) {
LOG(IPARPI, Warning)
<< "Could not set EXPOSURE_VALUE - no AGC algorithm";
@@ -817,14 +817,14 @@ void IPARPi::queueRequest(const ControlList &controls)
* So convert to 2^EV
*/
double ev = pow(2.0, ctrl.second.get<float>());
- agc->SetEv(ev);
+ agc->setEv(ev);
libcameraMetadata_.set(controls::ExposureValue,
ctrl.second.get<float>());
break;
}
case controls::AWB_ENABLE: {
- RPiController::Algorithm *awb = controller_.GetAlgorithm("awb");
+ RPiController::Algorithm *awb = controller_.getAlgorithm("awb");
if (!awb) {
LOG(IPARPI, Warning)
<< "Could not set AWB_ENABLE - no AWB algorithm";
@@ -832,9 +832,9 @@ void IPARPi::queueRequest(const ControlList &controls)
}
if (ctrl.second.get<bool>() == false)
- awb->Pause();
+ awb->pause();
else
- awb->Resume();
+ awb->resume();
libcameraMetadata_.set(controls::AwbEnable,
ctrl.second.get<bool>());
@@ -843,7 +843,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::AWB_MODE: {
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
- controller_.GetAlgorithm("awb"));
+ controller_.getAlgorithm("awb"));
if (!awb) {
LOG(IPARPI, Warning)
<< "Could not set AWB_MODE - no AWB algorithm";
@@ -852,7 +852,7 @@ void IPARPi::queueRequest(const ControlList &controls)
int32_t idx = ctrl.second.get<int32_t>();
if (AwbModeTable.count(idx)) {
- awb->SetMode(AwbModeTable.at(idx));
+ awb->setMode(AwbModeTable.at(idx));
libcameraMetadata_.set(controls::AwbMode, idx);
} else {
LOG(IPARPI, Error) << "AWB mode " << idx
@@ -864,14 +864,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::COLOUR_GAINS: {
auto gains = ctrl.second.get<Span<const float>>();
RPiController::AwbAlgorithm *awb = dynamic_cast<RPiController::AwbAlgorithm *>(
- controller_.GetAlgorithm("awb"));
+ controller_.getAlgorithm("awb"));
if (!awb) {
LOG(IPARPI, Warning)
<< "Could not set COLOUR_GAINS - no AWB algorithm";
break;
}
- awb->SetManualGains(gains[0], gains[1]);
+ awb->setManualGains(gains[0], gains[1]);
if (gains[0] != 0.0f && gains[1] != 0.0f)
/* A gain of 0.0f will switch back to auto mode. */
libcameraMetadata_.set(controls::ColourGains,
@@ -881,14 +881,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::BRIGHTNESS: {
RPiController::ContrastAlgorithm *contrast = dynamic_cast<RPiController::ContrastAlgorithm *>(
- controller_.GetAlgorithm("contrast"));
+ controller_.getAlgorithm("contrast"));
if (!contrast) {
LOG(IPARPI, Warning)
<< "Could not set BRIGHTNESS - no contrast algorithm";
break;
}
- contrast->SetBrightness(ctrl.second.get<float>() * 65536);
+ contrast->setBrightness(ctrl.second.get<float>() * 65536);
libcameraMetadata_.set(controls::Brightness,
ctrl.second.get<float>());
break;
@@ -896,14 +896,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::CONTRAST: {
RPiController::ContrastAlgorithm *contrast = dynamic_cast<RPiController::ContrastAlgorithm *>(
- controller_.GetAlgorithm("contrast"));
+ controller_.getAlgorithm("contrast"));
if (!contrast) {
LOG(IPARPI, Warning)
<< "Could not set CONTRAST - no contrast algorithm";
break;
}
- contrast->SetContrast(ctrl.second.get<float>());
+ contrast->setContrast(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Contrast,
ctrl.second.get<float>());
break;
@@ -911,14 +911,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::SATURATION: {
RPiController::CcmAlgorithm *ccm = dynamic_cast<RPiController::CcmAlgorithm *>(
- controller_.GetAlgorithm("ccm"));
+ controller_.getAlgorithm("ccm"));
if (!ccm) {
LOG(IPARPI, Warning)
<< "Could not set SATURATION - no ccm algorithm";
break;
}
- ccm->SetSaturation(ctrl.second.get<float>());
+ ccm->setSaturation(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Saturation,
ctrl.second.get<float>());
break;
@@ -926,14 +926,14 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::SHARPNESS: {
RPiController::SharpenAlgorithm *sharpen = dynamic_cast<RPiController::SharpenAlgorithm *>(
- controller_.GetAlgorithm("sharpen"));
+ controller_.getAlgorithm("sharpen"));
if (!sharpen) {
LOG(IPARPI, Warning)
<< "Could not set SHARPNESS - no sharpen algorithm";
break;
}
- sharpen->SetStrength(ctrl.second.get<float>());
+ sharpen->setStrength(ctrl.second.get<float>());
libcameraMetadata_.set(controls::Sharpness,
ctrl.second.get<float>());
break;
@@ -952,7 +952,7 @@ void IPARPi::queueRequest(const ControlList &controls)
case controls::NOISE_REDUCTION_MODE: {
RPiController::DenoiseAlgorithm *sdn = dynamic_cast<RPiController::DenoiseAlgorithm *>(
- controller_.GetAlgorithm("SDN"));
+ controller_.getAlgorithm("SDN"));
if (!sdn) {
LOG(IPARPI, Warning)
<< "Could not set NOISE_REDUCTION_MODE - no SDN algorithm";
@@ -962,7 +962,7 @@ void IPARPi::queueRequest(const ControlList &controls)
int32_t idx = ctrl.second.get<int32_t>();
auto mode = DenoiseModeTable.find(idx);
if (mode != DenoiseModeTable.end()) {
- sdn->SetMode(mode->second);
+ sdn->setMode(mode->second);
/*
* \todo If the colour denoise is not going to run due to an
@@ -1014,7 +1014,7 @@ void IPARPi::prepareISP(const ISPConfig &data)
* This may overwrite the DeviceStatus using values from the sensor
* metadata, and may also do additional custom processing.
*/
- helper_->Prepare(embeddedBuffer, rpiMetadata_);
+ helper_->prepare(embeddedBuffer, rpiMetadata_);
/* Done with embedded data now, return to pipeline handler asap. */
if (data.embeddedBufferPresent)
@@ -1030,7 +1030,7 @@ void IPARPi::prepareISP(const ISPConfig &data)
* current frame, or any other bits of metadata that were added
* in helper_->Prepare().
*/
- rpiMetadata_.Merge(lastMetadata);
+ rpiMetadata_.merge(lastMetadata);
processPending_ = false;
return;
}
@@ -1040,48 +1040,48 @@ void IPARPi::prepareISP(const ISPConfig &data)
ControlList ctrls(ispCtrls_);
- controller_.Prepare(&rpiMetadata_);
+ controller_.prepare(&rpiMetadata_);
/* Lock the metadata buffer to avoid constant locks/unlocks. */
std::unique_lock<RPiController::Metadata> lock(rpiMetadata_);
- AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
+ AwbStatus *awbStatus = rpiMetadata_.getLocked<AwbStatus>("awb.status");
if (awbStatus)
applyAWB(awbStatus, ctrls);
- CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
+ CcmStatus *ccmStatus = rpiMetadata_.getLocked<CcmStatus>("ccm.status");
if (ccmStatus)
applyCCM(ccmStatus, ctrls);
- AgcStatus *dgStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
+ AgcStatus *dgStatus = rpiMetadata_.getLocked<AgcStatus>("agc.status");
if (dgStatus)
applyDG(dgStatus, ctrls);
- AlscStatus *lsStatus = rpiMetadata_.GetLocked<AlscStatus>("alsc.status");
+ AlscStatus *lsStatus = rpiMetadata_.getLocked<AlscStatus>("alsc.status");
if (lsStatus)
applyLS(lsStatus, ctrls);
- ContrastStatus *contrastStatus = rpiMetadata_.GetLocked<ContrastStatus>("contrast.status");
+ ContrastStatus *contrastStatus = rpiMetadata_.getLocked<ContrastStatus>("contrast.status");
if (contrastStatus)
applyGamma(contrastStatus, ctrls);
- BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
+ BlackLevelStatus *blackLevelStatus = rpiMetadata_.getLocked<BlackLevelStatus>("black_level.status");
if (blackLevelStatus)
applyBlackLevel(blackLevelStatus, ctrls);
- GeqStatus *geqStatus = rpiMetadata_.GetLocked<GeqStatus>("geq.status");
+ GeqStatus *geqStatus = rpiMetadata_.getLocked<GeqStatus>("geq.status");
if (geqStatus)
applyGEQ(geqStatus, ctrls);
- DenoiseStatus *denoiseStatus = rpiMetadata_.GetLocked<DenoiseStatus>("denoise.status");
+ DenoiseStatus *denoiseStatus = rpiMetadata_.getLocked<DenoiseStatus>("denoise.status");
if (denoiseStatus)
applyDenoise(denoiseStatus, ctrls);
- SharpenStatus *sharpenStatus = rpiMetadata_.GetLocked<SharpenStatus>("sharpen.status");
+ SharpenStatus *sharpenStatus = rpiMetadata_.getLocked<SharpenStatus>("sharpen.status");
if (sharpenStatus)
applySharpen(sharpenStatus, ctrls);
- DpcStatus *dpcStatus = rpiMetadata_.GetLocked<DpcStatus>("dpc.status");
+ DpcStatus *dpcStatus = rpiMetadata_.getLocked<DpcStatus>("dpc.status");
if (dpcStatus)
applyDPC(dpcStatus, ctrls);
@@ -1097,13 +1097,13 @@ void IPARPi::fillDeviceStatus(const ControlList &sensorControls)
int32_t gainCode = sensorControls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>();
int32_t vblank = sensorControls.get(V4L2_CID_VBLANK).get<int32_t>();
- deviceStatus.shutter_speed = helper_->Exposure(exposureLines);
- deviceStatus.analogue_gain = helper_->Gain(gainCode);
- deviceStatus.frame_length = mode_.height + vblank;
+ deviceStatus.shutterSpeed = helper_->exposure(exposureLines);
+ deviceStatus.analogueGain = helper_->gain(gainCode);
+ deviceStatus.frameLength = mode_.height + vblank;
LOG(IPARPI, Debug) << "Metadata - " << deviceStatus;
- rpiMetadata_.Set("device.status", deviceStatus);
+ rpiMetadata_.set("device.status", deviceStatus);
}
void IPARPi::processStats(unsigned int bufferId)
@@ -1117,11 +1117,11 @@ void IPARPi::processStats(unsigned int bufferId)
Span<uint8_t> mem = it->second.planes()[0];
bcm2835_isp_stats *stats = reinterpret_cast<bcm2835_isp_stats *>(mem.data());
RPiController::StatisticsPtr statistics = std::make_shared<bcm2835_isp_stats>(*stats);
- helper_->Process(statistics, rpiMetadata_);
- controller_.Process(statistics, &rpiMetadata_);
+ helper_->process(statistics, rpiMetadata_);
+ controller_.process(statistics, &rpiMetadata_);
struct AgcStatus agcStatus;
- if (rpiMetadata_.Get("agc.status", agcStatus) == 0) {
+ if (rpiMetadata_.get("agc.status", agcStatus) == 0) {
ControlList ctrls(sensorCtrls_);
applyAGC(&agcStatus, ctrls);
@@ -1131,19 +1131,19 @@ void IPARPi::processStats(unsigned int bufferId)
void IPARPi::applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls)
{
- LOG(IPARPI, Debug) << "Applying WB R: " << awbStatus->gain_r << " B: "
- << awbStatus->gain_b;
+ LOG(IPARPI, Debug) << "Applying WB R: " << awbStatus->gainR << " B: "
+ << awbStatus->gainB;
ctrls.set(V4L2_CID_RED_BALANCE,
- static_cast<int32_t>(awbStatus->gain_r * 1000));
+ static_cast<int32_t>(awbStatus->gainR * 1000));
ctrls.set(V4L2_CID_BLUE_BALANCE,
- static_cast<int32_t>(awbStatus->gain_b * 1000));
+ static_cast<int32_t>(awbStatus->gainB * 1000));
}
void IPARPi::applyFrameDurations(Duration minFrameDuration, Duration maxFrameDuration)
{
- const Duration minSensorFrameDuration = mode_.min_frame_length * mode_.line_length;
- const Duration maxSensorFrameDuration = mode_.max_frame_length * mode_.line_length;
+ const Duration minSensorFrameDuration = mode_.minFrameLength * mode_.lineLength;
+ const Duration maxSensorFrameDuration = mode_.maxFrameLength * mode_.lineLength;
/*
* This will only be applied once AGC recalculations occur.
@@ -1164,20 +1164,20 @@ void IPARPi::applyFrameDurations(Duration minFrameDuration, Duration maxFrameDur
/*
* Calculate the maximum exposure time possible for the AGC to use.
- * GetVBlanking() will update maxShutter with the largest exposure
+ * getVBlanking() will update maxShutter with the largest exposure
* value possible.
*/
Duration maxShutter = Duration::max();
- helper_->GetVBlanking(maxShutter, minFrameDuration_, maxFrameDuration_);
+ helper_->getVBlanking(maxShutter, minFrameDuration_, maxFrameDuration_);
RPiController::AgcAlgorithm *agc = dynamic_cast<RPiController::AgcAlgorithm *>(
- controller_.GetAlgorithm("agc"));
- agc->SetMaxShutter(maxShutter);
+ controller_.getAlgorithm("agc"));
+ agc->setMaxShutter(maxShutter);
}
void IPARPi::applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls)
{
- int32_t gainCode = helper_->GainCode(agcStatus->analogue_gain);
+ int32_t gainCode = helper_->gainCode(agcStatus->analogueGain);
/*
* Ensure anything larger than the max gain code will not be passed to
@@ -1186,15 +1186,15 @@ void IPARPi::applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls)
*/
gainCode = std::min<int32_t>(gainCode, maxSensorGainCode_);
- /* GetVBlanking might clip exposure time to the fps limits. */
- Duration exposure = agcStatus->shutter_time;
- int32_t vblanking = helper_->GetVBlanking(exposure, minFrameDuration_, maxFrameDuration_);
- int32_t exposureLines = helper_->ExposureLines(exposure);
+ /* getVBlanking might clip exposure time to the fps limits. */
+ Duration exposure = agcStatus->shutterTime;
+ int32_t vblanking = helper_->getVBlanking(exposure, minFrameDuration_, maxFrameDuration_);
+ int32_t exposureLines = helper_->exposureLines(exposure);
LOG(IPARPI, Debug) << "Applying AGC Exposure: " << exposure
<< " (Shutter lines: " << exposureLines << ", AGC requested "
- << agcStatus->shutter_time << ") Gain: "
- << agcStatus->analogue_gain << " (Gain Code: "
+ << agcStatus->shutterTime << ") Gain: "
+ << agcStatus->analogueGain << " (Gain Code: "
<< gainCode << ")";
/*
@@ -1210,7 +1210,7 @@ void IPARPi::applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls)
void IPARPi::applyDG(const struct AgcStatus *dgStatus, ControlList &ctrls)
{
ctrls.set(V4L2_CID_DIGITAL_GAIN,
- static_cast<int32_t>(dgStatus->digital_gain * 1000));
+ static_cast<int32_t>(dgStatus->digitalGain * 1000));
}
void IPARPi::applyCCM(const struct CcmStatus *ccmStatus, ControlList &ctrls)
@@ -1250,9 +1250,9 @@ void IPARPi::applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, Co
bcm2835_isp_black_level blackLevel;
blackLevel.enabled = 1;
- blackLevel.black_level_r = blackLevelStatus->black_level_r;
- blackLevel.black_level_g = blackLevelStatus->black_level_g;
- blackLevel.black_level_b = blackLevelStatus->black_level_b;
+ blackLevel.black_level_r = blackLevelStatus->blackLevelR;
+ blackLevel.black_level_g = blackLevelStatus->blackLevelG;
+ blackLevel.black_level_b = blackLevelStatus->blackLevelB;
ControlValue c(Span<const uint8_t>{ reinterpret_cast<uint8_t *>(&blackLevel),
sizeof(blackLevel) });
@@ -1281,8 +1281,8 @@ void IPARPi::applyDenoise(const struct DenoiseStatus *denoiseStatus, ControlList
DenoiseMode mode = static_cast<DenoiseMode>(denoiseStatus->mode);
denoise.enabled = mode != DenoiseMode::Off;
- denoise.constant = denoiseStatus->noise_constant;
- denoise.slope.num = 1000 * denoiseStatus->noise_slope;
+ denoise.constant = denoiseStatus->noiseConstant;
+ denoise.slope.num = 1000 * denoiseStatus->noiseSlope;
denoise.slope.den = 1000;
denoise.strength.num = 1000 * denoiseStatus->strength;
denoise.strength.den = 1000;