summaryrefslogtreecommitdiff
path: root/src/ipa/raspberrypi/controller
diff options
context:
space:
mode:
Diffstat (limited to 'src/ipa/raspberrypi/controller')
-rw-r--r--src/ipa/raspberrypi/controller/agc_algorithm.hpp8
-rw-r--r--src/ipa/raspberrypi/controller/agc_status.h12
-rw-r--r--src/ipa/raspberrypi/controller/algorithm.hpp6
-rw-r--r--src/ipa/raspberrypi/controller/awb_algorithm.hpp1
-rw-r--r--src/ipa/raspberrypi/controller/camera_mode.h8
-rw-r--r--src/ipa/raspberrypi/controller/controller.cpp19
-rw-r--r--src/ipa/raspberrypi/controller/denoise_algorithm.hpp23
-rw-r--r--src/ipa/raspberrypi/controller/denoise_status.h24
-rw-r--r--src/ipa/raspberrypi/controller/device_status.cpp21
-rw-r--r--src/ipa/raspberrypi/controller/device_status.h39
-rw-r--r--src/ipa/raspberrypi/controller/logging.hpp30
-rw-r--r--src/ipa/raspberrypi/controller/metadata.hpp70
-rw-r--r--src/ipa/raspberrypi/controller/pwl.cpp30
-rw-r--r--src/ipa/raspberrypi/controller/pwl.hpp3
-rw-r--r--src/ipa/raspberrypi/controller/rpi/agc.cpp526
-rw-r--r--src/ipa/raspberrypi/controller/rpi/agc.hpp50
-rw-r--r--src/ipa/raspberrypi/controller/rpi/alsc.cpp78
-rw-r--r--src/ipa/raspberrypi/controller/rpi/awb.cpp213
-rw-r--r--src/ipa/raspberrypi/controller/rpi/awb.hpp17
-rw-r--r--src/ipa/raspberrypi/controller/rpi/black_level.cpp11
-rw-r--r--src/ipa/raspberrypi/controller/rpi/ccm.cpp26
-rw-r--r--src/ipa/raspberrypi/controller/rpi/ccm.hpp3
-rw-r--r--src/ipa/raspberrypi/controller/rpi/contrast.cpp29
-rw-r--r--src/ipa/raspberrypi/controller/rpi/contrast.hpp5
-rw-r--r--src/ipa/raspberrypi/controller/rpi/dpc.cpp8
-rw-r--r--src/ipa/raspberrypi/controller/rpi/focus.cpp2
-rw-r--r--src/ipa/raspberrypi/controller/rpi/geq.cpp22
-rw-r--r--src/ipa/raspberrypi/controller/rpi/lux.cpp30
-rw-r--r--src/ipa/raspberrypi/controller/rpi/lux.hpp7
-rw-r--r--src/ipa/raspberrypi/controller/rpi/noise.cpp14
-rw-r--r--src/ipa/raspberrypi/controller/rpi/noise.hpp2
-rw-r--r--src/ipa/raspberrypi/controller/rpi/sdn.cpp38
-rw-r--r--src/ipa/raspberrypi/controller/rpi/sdn.hpp5
-rw-r--r--src/ipa/raspberrypi/controller/rpi/sharpen.cpp11
-rw-r--r--src/ipa/raspberrypi/controller/sdn_status.h23
35 files changed, 874 insertions, 540 deletions
diff --git a/src/ipa/raspberrypi/controller/agc_algorithm.hpp b/src/ipa/raspberrypi/controller/agc_algorithm.hpp
index b4ea54fb..61595ea2 100644
--- a/src/ipa/raspberrypi/controller/agc_algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/agc_algorithm.hpp
@@ -6,6 +6,8 @@
*/
#pragma once
+#include <libcamera/base/utils.h>
+
#include "algorithm.hpp"
namespace RPiController {
@@ -15,9 +17,11 @@ class AgcAlgorithm : public Algorithm
public:
AgcAlgorithm(Controller *controller) : Algorithm(controller) {}
// An AGC algorithm must provide the following:
+ virtual unsigned int GetConvergenceFrames() const = 0;
virtual void SetEv(double ev) = 0;
- virtual void SetFlickerPeriod(double flicker_period) = 0;
- virtual void SetFixedShutter(double fixed_shutter) = 0; // microseconds
+ virtual void SetFlickerPeriod(libcamera::utils::Duration flicker_period) = 0;
+ virtual void SetFixedShutter(libcamera::utils::Duration fixed_shutter) = 0;
+ virtual void SetMaxShutter(libcamera::utils::Duration max_shutter) = 0;
virtual void SetFixedAnalogueGain(double fixed_analogue_gain) = 0;
virtual void SetMeteringMode(std::string const &metering_mode_name) = 0;
virtual void SetExposureMode(std::string const &exposure_mode_name) = 0;
diff --git a/src/ipa/raspberrypi/controller/agc_status.h b/src/ipa/raspberrypi/controller/agc_status.h
index 10381c90..20cb1b62 100644
--- a/src/ipa/raspberrypi/controller/agc_status.h
+++ b/src/ipa/raspberrypi/controller/agc_status.h
@@ -6,6 +6,8 @@
*/
#pragma once
+#include <libcamera/base/utils.h>
+
// The AGC algorithm should post the following structure into the image's
// "agc.status" metadata.
@@ -18,17 +20,17 @@ extern "C" {
// ignored until then.
struct AgcStatus {
- double total_exposure_value; // value for all exposure and gain for this image
- double target_exposure_value; // (unfiltered) target total exposure AGC is aiming for
- double shutter_time;
+ libcamera::utils::Duration total_exposure_value; // value for all exposure and gain for this image
+ libcamera::utils::Duration target_exposure_value; // (unfiltered) target total exposure AGC is aiming for
+ libcamera::utils::Duration shutter_time;
double analogue_gain;
char exposure_mode[32];
char constraint_mode[32];
char metering_mode[32];
double ev;
- double flicker_period;
+ libcamera::utils::Duration flicker_period;
int floating_region_enable;
- double fixed_shutter;
+ libcamera::utils::Duration fixed_shutter;
double fixed_analogue_gain;
double digital_gain;
int locked;
diff --git a/src/ipa/raspberrypi/controller/algorithm.hpp b/src/ipa/raspberrypi/controller/algorithm.hpp
index 6196b2f9..5123c87b 100644
--- a/src/ipa/raspberrypi/controller/algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/algorithm.hpp
@@ -12,9 +12,7 @@
#include <string>
#include <memory>
#include <map>
-#include <atomic>
-#include "logging.hpp"
#include "controller.hpp"
#include <boost/property_tree/ptree.hpp>
@@ -30,7 +28,7 @@ public:
: controller_(controller), paused_(false)
{
}
- virtual ~Algorithm() {}
+ virtual ~Algorithm() = default;
virtual char const *Name() const = 0;
virtual bool IsPaused() const { return paused_; }
virtual void Pause() { paused_ = true; }
@@ -47,7 +45,7 @@ public:
private:
Controller *controller_;
- std::atomic<bool> paused_;
+ bool paused_;
};
// This code is for automatic registration of Front End algorithms with the
diff --git a/src/ipa/raspberrypi/controller/awb_algorithm.hpp b/src/ipa/raspberrypi/controller/awb_algorithm.hpp
index 5be0c9f4..96f88afc 100644
--- a/src/ipa/raspberrypi/controller/awb_algorithm.hpp
+++ b/src/ipa/raspberrypi/controller/awb_algorithm.hpp
@@ -15,6 +15,7 @@ class AwbAlgorithm : public Algorithm
public:
AwbAlgorithm(Controller *controller) : Algorithm(controller) {}
// An AWB algorithm must provide the following:
+ virtual unsigned int GetConvergenceFrames() const = 0;
virtual void SetMode(std::string const &mode_name) = 0;
virtual void SetManualGains(double manual_r, double manual_b) = 0;
};
diff --git a/src/ipa/raspberrypi/controller/camera_mode.h b/src/ipa/raspberrypi/controller/camera_mode.h
index 920f11be..65888230 100644
--- a/src/ipa/raspberrypi/controller/camera_mode.h
+++ b/src/ipa/raspberrypi/controller/camera_mode.h
@@ -8,6 +8,8 @@
#include <libcamera/transform.h>
+#include <libcamera/base/utils.h>
+
// Description of a "camera mode", holding enough information for control
// algorithms to adapt their behaviour to the different modes of the camera,
// including binning, scaling, cropping etc.
@@ -33,10 +35,12 @@ struct CameraMode {
double scale_x, scale_y;
// scaling of the noise compared to the native sensor mode
double noise_factor;
- // line time in nanoseconds
- double line_length;
+ // line time
+ libcamera::utils::Duration line_length;
// any camera transform *not* reflected already in the camera tuning
libcamera::Transform transform;
+ // minimum and maximum fame lengths in units of lines
+ uint32_t min_frame_length, max_frame_length;
};
#ifdef __cplusplus
diff --git a/src/ipa/raspberrypi/controller/controller.cpp b/src/ipa/raspberrypi/controller/controller.cpp
index 22461cc4..d3433ad2 100644
--- a/src/ipa/raspberrypi/controller/controller.cpp
+++ b/src/ipa/raspberrypi/controller/controller.cpp
@@ -5,6 +5,8 @@
* controller.cpp - ISP controller
*/
+#include <libcamera/base/log.h>
+
#include "algorithm.hpp"
#include "controller.hpp"
@@ -12,6 +14,9 @@
#include <boost/property_tree/ptree.hpp>
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiController)
Controller::Controller()
: switch_mode_called_(false) {}
@@ -27,7 +32,6 @@ Controller::~Controller() {}
void Controller::Read(char const *filename)
{
- RPI_LOG("Controller starting");
boost::property_tree::ptree root;
boost::property_tree::read_json(filename, root);
for (auto const &key_and_value : root) {
@@ -36,10 +40,9 @@ void Controller::Read(char const *filename)
algo->Read(key_and_value.second);
algorithms_.push_back(AlgorithmPtr(algo));
} else
- RPI_LOG("WARNING: No algorithm found for \""
- << key_and_value.first << "\"");
+ LOG(RPiController, Warning)
+ << "No algorithm found for \"" << key_and_value.first << "\"";
}
- RPI_LOG("Controller finished");
}
Algorithm *Controller::CreateAlgorithm(char const *name)
@@ -50,39 +53,31 @@ Algorithm *Controller::CreateAlgorithm(char const *name)
void Controller::Initialise()
{
- RPI_LOG("Controller starting");
for (auto &algo : algorithms_)
algo->Initialise();
- RPI_LOG("Controller finished");
}
void Controller::SwitchMode(CameraMode const &camera_mode, Metadata *metadata)
{
- RPI_LOG("Controller starting");
for (auto &algo : algorithms_)
algo->SwitchMode(camera_mode, metadata);
switch_mode_called_ = true;
- RPI_LOG("Controller finished");
}
void Controller::Prepare(Metadata *image_metadata)
{
- RPI_LOG("Controller::Prepare starting");
assert(switch_mode_called_);
for (auto &algo : algorithms_)
if (!algo->IsPaused())
algo->Prepare(image_metadata);
- RPI_LOG("Controller::Prepare finished");
}
void Controller::Process(StatisticsPtr stats, Metadata *image_metadata)
{
- RPI_LOG("Controller::Process starting");
assert(switch_mode_called_);
for (auto &algo : algorithms_)
if (!algo->IsPaused())
algo->Process(stats, image_metadata);
- RPI_LOG("Controller::Process finished");
}
Metadata &Controller::GetGlobalMetadata()
diff --git a/src/ipa/raspberrypi/controller/denoise_algorithm.hpp b/src/ipa/raspberrypi/controller/denoise_algorithm.hpp
new file mode 100644
index 00000000..39fcd7e9
--- /dev/null
+++ b/src/ipa/raspberrypi/controller/denoise_algorithm.hpp
@@ -0,0 +1,23 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2021, Raspberry Pi (Trading) Limited
+ *
+ * denoise.hpp - Denoise control algorithm interface
+ */
+#pragma once
+
+#include "algorithm.hpp"
+
+namespace RPiController {
+
+enum class DenoiseMode { Off, ColourOff, ColourFast, ColourHighQuality };
+
+class DenoiseAlgorithm : public Algorithm
+{
+public:
+ DenoiseAlgorithm(Controller *controller) : Algorithm(controller) {}
+ // A Denoise algorithm must provide the following:
+ virtual void SetMode(DenoiseMode mode) = 0;
+};
+
+} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/denoise_status.h b/src/ipa/raspberrypi/controller/denoise_status.h
new file mode 100644
index 00000000..67a3c361
--- /dev/null
+++ b/src/ipa/raspberrypi/controller/denoise_status.h
@@ -0,0 +1,24 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
+ *
+ * denoise_status.h - Denoise control algorithm status
+ */
+#pragma once
+
+// This stores the parameters required for Denoise.
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+struct DenoiseStatus {
+ double noise_constant;
+ double noise_slope;
+ double strength;
+ unsigned int mode;
+};
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/src/ipa/raspberrypi/controller/device_status.cpp b/src/ipa/raspberrypi/controller/device_status.cpp
new file mode 100644
index 00000000..f052ea8b
--- /dev/null
+++ b/src/ipa/raspberrypi/controller/device_status.cpp
@@ -0,0 +1,21 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2021, Raspberry Pi (Trading) Limited
+ *
+ * device_status.cpp - device (image sensor) status
+ */
+#include "device_status.h"
+
+using namespace libcamera; /* for the Duration operator<< overload */
+
+std::ostream &operator<<(std::ostream &out, const DeviceStatus &d)
+{
+ out << "Exposure: " << d.shutter_speed
+ << " Frame length: " << d.frame_length
+ << " Gain: " << d.analogue_gain
+ << " Aperture: " << d.aperture
+ << " Lens: " << d.lens_position
+ << " Flash: " << d.flash_intensity;
+
+ return out;
+}
diff --git a/src/ipa/raspberrypi/controller/device_status.h b/src/ipa/raspberrypi/controller/device_status.h
index aa08608b..c4a5d9c8 100644
--- a/src/ipa/raspberrypi/controller/device_status.h
+++ b/src/ipa/raspberrypi/controller/device_status.h
@@ -1,30 +1,39 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
*
* device_status.h - device (image sensor) status
*/
#pragma once
-// Definition of "device metadata" which stores things like shutter time and
-// analogue gain that downstream control algorithms will want to know.
+#include <iostream>
-#ifdef __cplusplus
-extern "C" {
-#endif
+#include <libcamera/base/utils.h>
+
+/*
+ * Definition of "device metadata" which stores things like shutter time and
+ * analogue gain that downstream control algorithms will want to know.
+ */
struct DeviceStatus {
- // time shutter is open, in microseconds
- double shutter_speed;
+ DeviceStatus()
+ : shutter_speed(std::chrono::seconds(0)), frame_length(0),
+ analogue_gain(0.0), lens_position(0.0), aperture(0.0),
+ flash_intensity(0.0)
+ {
+ }
+
+ friend std::ostream &operator<<(std::ostream &out, const DeviceStatus &d);
+
+ /* time shutter is open */
+ libcamera::utils::Duration shutter_speed;
+ /* frame length given in number of lines */
+ uint32_t frame_length;
double analogue_gain;
- // 1.0/distance-in-metres, or 0 if unknown
+ /* 1.0/distance-in-metres, or 0 if unknown */
double lens_position;
- // 1/f so that brightness quadruples when this doubles, or 0 if unknown
+ /* 1/f so that brightness quadruples when this doubles, or 0 if unknown */
double aperture;
- // proportional to brightness with 0 = no flash, 1 = maximum flash
+ /* proportional to brightness with 0 = no flash, 1 = maximum flash */
double flash_intensity;
};
-
-#ifdef __cplusplus
-}
-#endif
diff --git a/src/ipa/raspberrypi/controller/logging.hpp b/src/ipa/raspberrypi/controller/logging.hpp
deleted file mode 100644
index f0d306b6..00000000
--- a/src/ipa/raspberrypi/controller/logging.hpp
+++ /dev/null
@@ -1,30 +0,0 @@
-/* SPDX-License-Identifier: BSD-2-Clause */
-/*
- * Copyright (C) 2019-2020, Raspberry Pi (Trading) Limited
- *
- * logging.hpp - logging macros
- */
-#pragma once
-
-#include <iostream>
-
-#ifndef RPI_LOGGING_ENABLE
-#define RPI_LOGGING_ENABLE 0
-#endif
-
-#ifndef RPI_WARNING_ENABLE
-#define RPI_WARNING_ENABLE 1
-#endif
-
-#define RPI_LOG(stuff) \
- do { \
- if (RPI_LOGGING_ENABLE) \
- std::cout << __FUNCTION__ << ": " << stuff << "\n"; \
- } while (0)
-
-#define RPI_WARN(stuff) \
- do { \
- if (RPI_WARNING_ENABLE) \
- std::cout << __FUNCTION__ << " ***WARNING*** " \
- << stuff << "\n"; \
- } while (0)
diff --git a/src/ipa/raspberrypi/controller/metadata.hpp b/src/ipa/raspberrypi/controller/metadata.hpp
index f3a8dfab..fd6aac88 100644
--- a/src/ipa/raspberrypi/controller/metadata.hpp
+++ b/src/ipa/raspberrypi/controller/metadata.hpp
@@ -1,6 +1,6 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
*
* metadata.hpp - general metadata class
*/
@@ -8,68 +8,104 @@
// A simple class for carrying arbitrary metadata, for example about an image.
-#include <string>
-#include <mutex>
+#include <any>
#include <map>
#include <memory>
-
-#include <boost/any.hpp>
+#include <mutex>
+#include <string>
namespace RPiController {
class Metadata
{
public:
- template<typename T> void Set(std::string const &tag, T const &value)
+ Metadata() = default;
+
+ Metadata(Metadata const &other)
{
- std::lock_guard<std::mutex> lock(mutex_);
+ std::scoped_lock other_lock(other.mutex_);
+ data_ = other.data_;
+ }
+
+ Metadata(Metadata &&other)
+ {
+ std::scoped_lock other_lock(other.mutex_);
+ data_ = std::move(other.data_);
+ other.data_.clear();
+ }
+
+ template<typename T>
+ void Set(std::string const &tag, T const &value)
+ {
+ std::scoped_lock lock(mutex_);
data_[tag] = value;
}
- template<typename T> int Get(std::string const &tag, T &value) const
+
+ template<typename T>
+ int Get(std::string const &tag, T &value) const
{
- std::lock_guard<std::mutex> lock(mutex_);
+ std::scoped_lock lock(mutex_);
auto it = data_.find(tag);
if (it == data_.end())
return -1;
- value = boost::any_cast<T>(it->second);
+ value = std::any_cast<T>(it->second);
return 0;
}
+
void Clear()
{
- std::lock_guard<std::mutex> lock(mutex_);
+ std::scoped_lock lock(mutex_);
data_.clear();
}
+
Metadata &operator=(Metadata const &other)
{
- std::lock_guard<std::mutex> lock(mutex_);
- std::lock_guard<std::mutex> other_lock(other.mutex_);
+ std::scoped_lock lock(mutex_, other.mutex_);
data_ = other.data_;
return *this;
}
- template<typename T> T *GetLocked(std::string const &tag)
+
+ Metadata &operator=(Metadata &&other)
+ {
+ std::scoped_lock lock(mutex_, other.mutex_);
+ data_ = std::move(other.data_);
+ other.data_.clear();
+ return *this;
+ }
+
+ void Merge(Metadata &other)
+ {
+ std::scoped_lock lock(mutex_, other.mutex_);
+ data_.merge(other.data_);
+ }
+
+ template<typename T>
+ T *GetLocked(std::string const &tag)
{
// This allows in-place access to the Metadata contents,
// for which you should be holding the lock.
auto it = data_.find(tag);
if (it == data_.end())
return nullptr;
- return boost::any_cast<T>(&it->second);
+ return std::any_cast<T>(&it->second);
}
+
template<typename T>
void SetLocked(std::string const &tag, T const &value)
{
// Use this only if you're holding the lock yourself.
data_[tag] = value;
}
+
// Note: use of (lowercase) lock and unlock means you can create scoped
// locks with the standard lock classes.
- // e.g. std::lock_guard<PisP::Metadata> lock(metadata)
+ // e.g. std::lock_guard<RPiController::Metadata> lock(metadata)
void lock() { mutex_.lock(); }
void unlock() { mutex_.unlock(); }
private:
mutable std::mutex mutex_;
- std::map<std::string, boost::any> data_;
+ std::map<std::string, std::any> data_;
};
typedef std::shared_ptr<Metadata> MetadataPtr;
diff --git a/src/ipa/raspberrypi/controller/pwl.cpp b/src/ipa/raspberrypi/controller/pwl.cpp
index aa134a1f..130c820b 100644
--- a/src/ipa/raspberrypi/controller/pwl.cpp
+++ b/src/ipa/raspberrypi/controller/pwl.cpp
@@ -114,6 +114,36 @@ Pwl::PerpType Pwl::Invert(Point const &xy, Point &perp, int &span,
return PerpType::None;
}
+Pwl Pwl::Inverse(bool *true_inverse, const double eps) const
+{
+ bool appended = false, prepended = false, neither = false;
+ Pwl inverse;
+
+ for (Point const &p : points_) {
+ if (inverse.Empty())
+ inverse.Append(p.y, p.x, eps);
+ else if (std::abs(inverse.points_.back().x - p.y) <= eps ||
+ std::abs(inverse.points_.front().x - p.y) <= eps)
+ /* do nothing */;
+ else if (p.y > inverse.points_.back().x) {
+ inverse.Append(p.y, p.x, eps);
+ appended = true;
+ } else if (p.y < inverse.points_.front().x) {
+ inverse.Prepend(p.y, p.x, eps);
+ prepended = true;
+ } else
+ neither = true;
+ }
+
+ // This is not a proper inverse if we found ourselves putting points
+ // onto both ends of the inverse, or if there were points that couldn't
+ // go on either.
+ if (true_inverse)
+ *true_inverse = !(neither || (appended && prepended));
+
+ return inverse;
+}
+
Pwl Pwl::Compose(Pwl const &other, const double eps) const
{
double this_x = points_[0].x, this_y = points_[0].y;
diff --git a/src/ipa/raspberrypi/controller/pwl.hpp b/src/ipa/raspberrypi/controller/pwl.hpp
index 4f168551..484672f6 100644
--- a/src/ipa/raspberrypi/controller/pwl.hpp
+++ b/src/ipa/raspberrypi/controller/pwl.hpp
@@ -80,6 +80,9 @@ public:
};
PerpType Invert(Point const &xy, Point &perp, int &span,
const double eps = 1e-6) const;
+ // Compute the inverse function. Indicate if it is a proper (true)
+ // inverse, or only a best effort (e.g. input was non-monotonic).
+ Pwl Inverse(bool *true_inverse = nullptr, const double eps = 1e-6) const;
// Compose two Pwls together, doing "this" first and "other" after.
Pwl Compose(Pwl const &other, const double eps = 1e-6) const;
// Apply function to (x,y) values at every control point.
diff --git a/src/ipa/raspberrypi/controller/rpi/agc.cpp b/src/ipa/raspberrypi/controller/rpi/agc.cpp
index df4d3647..f57783f8 100644
--- a/src/ipa/raspberrypi/controller/rpi/agc.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/agc.cpp
@@ -9,16 +9,21 @@
#include "linux/bcm2835-isp.h"
+#include <libcamera/base/log.h>
+
#include "../awb_status.h"
#include "../device_status.h"
#include "../histogram.hpp"
-#include "../logging.hpp"
#include "../lux_status.h"
#include "../metadata.hpp"
#include "agc.hpp"
using namespace RPiController;
+using namespace libcamera;
+using libcamera::utils::Duration;
+
+LOG_DEFINE_CATEGORY(RPiAgc)
#define NAME "rpi.agc"
@@ -51,19 +56,26 @@ read_metering_modes(std::map<std::string, AgcMeteringMode> &metering_modes,
return first;
}
-static int read_double_list(std::vector<double> &list,
- boost::property_tree::ptree const &params)
+static int read_list(std::vector<double> &list,
+ boost::property_tree::ptree const &params)
{
for (auto &p : params)
list.push_back(p.second.get_value<double>());
return list.size();
}
+static int read_list(std::vector<Duration> &list,
+ boost::property_tree::ptree const &params)
+{
+ for (auto &p : params)
+ list.push_back(p.second.get_value<double>() * 1us);
+ return list.size();
+}
+
void AgcExposureMode::Read(boost::property_tree::ptree const &params)
{
- int num_shutters =
- read_double_list(shutter, params.get_child("shutter"));
- int num_ags = read_double_list(gain, params.get_child("gain"));
+ int num_shutters = read_list(shutter, params.get_child("shutter"));
+ int num_ags = read_list(gain, params.get_child("gain"));
if (num_shutters < 2 || num_ags < 2)
throw std::runtime_error(
"AgcConfig: must have at least two entries in exposure profile");
@@ -128,7 +140,7 @@ static std::string read_constraint_modes(
void AgcConfig::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG("AgcConfig");
+ LOG(RPiAgc, Debug) << "AgcConfig";
default_metering_mode = read_metering_modes(
metering_modes, params.get_child("metering_modes"));
default_exposure_mode = read_exposure_modes(
@@ -138,25 +150,28 @@ void AgcConfig::Read(boost::property_tree::ptree const &params)
Y_target.Read(params.get_child("y_target"));
speed = params.get<double>("speed", 0.2);
startup_frames = params.get<uint16_t>("startup_frames", 10);
+ convergence_frames = params.get<unsigned int>("convergence_frames", 6);
fast_reduce_threshold =
params.get<double>("fast_reduce_threshold", 0.4);
base_ev = params.get<double>("base_ev", 1.0);
+ // Start with quite a low value as ramping up is easier than ramping down.
+ default_exposure_time = params.get<double>("default_exposure_time", 1000) * 1us;
+ default_analogue_gain = params.get<double>("default_analogue_gain", 1.0);
}
Agc::Agc(Controller *controller)
: AgcAlgorithm(controller), metering_mode_(nullptr),
exposure_mode_(nullptr), constraint_mode_(nullptr),
- frame_count_(0), lock_count_(0)
+ frame_count_(0), lock_count_(0),
+ last_target_exposure_(0s),
+ ev_(1.0), flicker_period_(0s),
+ max_shutter_(0s), fixed_shutter_(0s), fixed_analogue_gain_(0.0)
{
- ev_ = status_.ev = 1.0;
- flicker_period_ = status_.flicker_period = 0.0;
- fixed_shutter_ = status_.fixed_shutter = 0;
- fixed_analogue_gain_ = status_.fixed_analogue_gain = 0.0;
- // set to zero initially, so we can tell it's not been calculated
- status_.total_exposure_value = 0.0;
- status_.target_exposure_value = 0.0;
- status_.locked = false;
- output_status_ = status_;
+ memset(&awb_, 0, sizeof(awb_));
+ // Setting status_.total_exposure_value_ to zero initially tells us
+ // it's not been calculated yet (i.e. Process hasn't yet run).
+ memset(&status_, 0, sizeof(status_));
+ status_.ev = ev_;
}
char const *Agc::Name() const
@@ -166,7 +181,7 @@ char const *Agc::Name() const
void Agc::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG("Agc");
+ LOG(RPiAgc, Debug) << "Agc";
config_.Read(params);
// Set the config's defaults (which are the first ones it read) as our
// current modes, until someone changes them. (they're all known to
@@ -177,122 +192,155 @@ void Agc::Read(boost::property_tree::ptree const &params)
exposure_mode_ = &config_.exposure_modes[exposure_mode_name_];
constraint_mode_name_ = config_.default_constraint_mode;
constraint_mode_ = &config_.constraint_modes[constraint_mode_name_];
+ // Set up the "last shutter/gain" values, in case AGC starts "disabled".
+ status_.shutter_time = config_.default_exposure_time;
+ status_.analogue_gain = config_.default_analogue_gain;
+}
+
+bool Agc::IsPaused() const
+{
+ return false;
+}
+
+void Agc::Pause()
+{
+ fixed_shutter_ = status_.shutter_time;
+ fixed_analogue_gain_ = status_.analogue_gain;
+}
+
+void Agc::Resume()
+{
+ fixed_shutter_ = 0s;
+ fixed_analogue_gain_ = 0;
+}
+
+unsigned int Agc::GetConvergenceFrames() const
+{
+ // If shutter and gain have been explicitly set, there is no
+ // convergence to happen, so no need to drop any frames - return zero.
+ if (fixed_shutter_ && fixed_analogue_gain_)
+ return 0;
+ else
+ return config_.convergence_frames;
}
void Agc::SetEv(double ev)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
ev_ = ev;
}
-void Agc::SetFlickerPeriod(double flicker_period)
+void Agc::SetFlickerPeriod(Duration flicker_period)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
flicker_period_ = flicker_period;
}
-void Agc::SetFixedShutter(double fixed_shutter)
+void Agc::SetMaxShutter(Duration max_shutter)
+{
+ max_shutter_ = max_shutter;
+}
+
+void Agc::SetFixedShutter(Duration fixed_shutter)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
fixed_shutter_ = fixed_shutter;
+ // Set this in case someone calls Pause() straight after.
+ status_.shutter_time = clipShutter(fixed_shutter_);
}
void Agc::SetFixedAnalogueGain(double fixed_analogue_gain)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
fixed_analogue_gain_ = fixed_analogue_gain;
+ // Set this in case someone calls Pause() straight after.
+ status_.analogue_gain = fixed_analogue_gain;
}
void Agc::SetMeteringMode(std::string const &metering_mode_name)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
metering_mode_name_ = metering_mode_name;
}
void Agc::SetExposureMode(std::string const &exposure_mode_name)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
exposure_mode_name_ = exposure_mode_name;
}
void Agc::SetConstraintMode(std::string const &constraint_mode_name)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
constraint_mode_name_ = constraint_mode_name;
}
void Agc::SwitchMode([[maybe_unused]] CameraMode const &camera_mode,
Metadata *metadata)
{
- // On a mode switch, it's possible the exposure profile could change,
- // so we run through the dividing up of exposure/gain again and
- // write the results into the metadata we've been given.
- if (status_.total_exposure_value) {
- housekeepConfig();
- divvyupExposure();
- writeAndFinish(metadata, false);
+ housekeepConfig();
+
+ Duration fixed_shutter = clipShutter(fixed_shutter_);
+ if (fixed_shutter && fixed_analogue_gain_) {
+ // We're going to reset the algorithm here with these fixed values.
+
+ fetchAwbStatus(metadata);
+ double min_colour_gain = std::min({ awb_.gain_r, awb_.gain_g, awb_.gain_b, 1.0 });
+ ASSERT(min_colour_gain != 0.0);
+
+ // This is the equivalent of computeTargetExposure and applyDigitalGain.
+ target_.total_exposure_no_dg = fixed_shutter * fixed_analogue_gain_;
+ target_.total_exposure = target_.total_exposure_no_dg / min_colour_gain;
+
+ // Equivalent of filterExposure. This resets any "history".
+ filtered_ = target_;
+
+ // Equivalent of divideUpExposure.
+ filtered_.shutter = fixed_shutter;
+ filtered_.analogue_gain = fixed_analogue_gain_;
+ } else if (status_.total_exposure_value) {
+ // On a mode switch, it's possible the exposure profile could change,
+ // or a fixed exposure/gain might be set so we divide up the exposure/
+ // gain again, but we don't change any target values.
+ divideUpExposure();
+ } else {
+ // We come through here on startup, when at least one of the shutter
+ // or gain has not been fixed. We must still write those values out so
+ // that they will be applied immediately. We supply some arbitrary defaults
+ // for any that weren't set.
+
+ // Equivalent of divideUpExposure.
+ filtered_.shutter = fixed_shutter ? fixed_shutter : config_.default_exposure_time;
+ filtered_.analogue_gain = fixed_analogue_gain_ ? fixed_analogue_gain_ : config_.default_analogue_gain;
}
+
+ writeAndFinish(metadata, false);
}
void Agc::Prepare(Metadata *image_metadata)
{
- AgcStatus status;
- {
- std::unique_lock<std::mutex> lock(output_mutex_);
- status = output_status_;
- }
- int lock_count = lock_count_;
- lock_count_ = 0;
- status.digital_gain = 1.0;
+ status_.digital_gain = 1.0;
+ fetchAwbStatus(image_metadata); // always fetch it so that Process knows it's been done
+
if (status_.total_exposure_value) {
// Process has run, so we have meaningful values.
DeviceStatus device_status;
if (image_metadata->Get("device.status", device_status) == 0) {
- double actual_exposure = device_status.shutter_speed *
- device_status.analogue_gain;
+ Duration actual_exposure = device_status.shutter_speed *
+ device_status.analogue_gain;
if (actual_exposure) {
- status.digital_gain =
+ status_.digital_gain =
status_.total_exposure_value /
actual_exposure;
- RPI_LOG("Want total exposure " << status_.total_exposure_value);
+ LOG(RPiAgc, Debug) << "Want total exposure " << status_.total_exposure_value;
// Never ask for a gain < 1.0, and also impose
// some upper limit. Make it customisable?
- status.digital_gain = std::max(
+ status_.digital_gain = std::max(
1.0,
- std::min(status.digital_gain, 4.0));
- RPI_LOG("Actual exposure " << actual_exposure);
- RPI_LOG("Use digital_gain " << status.digital_gain);
- RPI_LOG("Effective exposure " << actual_exposure * status.digital_gain);
+ std::min(status_.digital_gain, 4.0));
+ LOG(RPiAgc, Debug) << "Actual exposure " << actual_exposure;
+ LOG(RPiAgc, Debug) << "Use digital_gain " << status_.digital_gain;
+ LOG(RPiAgc, Debug) << "Effective exposure "
+ << actual_exposure * status_.digital_gain;
// Decide whether AEC/AGC has converged.
- // Insist AGC is steady for MAX_LOCK_COUNT
- // frames before we say we are "locked".
- // (The hard-coded constants may need to
- // become customisable.)
- if (status.target_exposure_value) {
-#define MAX_LOCK_COUNT 3
- double err = 0.10 * status.target_exposure_value + 200;
- if (actual_exposure <
- status.target_exposure_value + err
- && actual_exposure >
- status.target_exposure_value - err)
- lock_count_ =
- std::min(lock_count + 1,
- MAX_LOCK_COUNT);
- else if (actual_exposure <
- status.target_exposure_value
- + 1.5 * err &&
- actual_exposure >
- status.target_exposure_value
- - 1.5 * err)
- lock_count_ = lock_count;
- RPI_LOG("Lock count: " << lock_count_);
- }
+ updateLockStatus(device_status);
}
} else
- RPI_LOG(Name() << ": no device metadata");
- status.locked = lock_count_ >= MAX_LOCK_COUNT;
- //printf("%s\n", status.locked ? "+++++++++" : "-");
- image_metadata->Set("agc.status", status);
+ LOG(RPiAgc, Warning) << Name() << ": no device metadata";
+ image_metadata->Set("agc.status", status_);
}
}
@@ -312,16 +360,53 @@ void Agc::Process(StatisticsPtr &stats, Metadata *image_metadata)
// Some of the exposure has to be applied as digital gain, so work out
// what that is. This function also tells us whether it's decided to
// "desaturate" the image more quickly.
- bool desaturate = applyDigitalGain(image_metadata, gain, target_Y);
+ bool desaturate = applyDigitalGain(gain, target_Y);
// The results have to be filtered so as not to change too rapidly.
filterExposure(desaturate);
- // The last thing is to divvy up the exposure value into a shutter time
+ // The last thing is to divide up the exposure value into a shutter time
// and analogue_gain, according to the current exposure mode.
- divvyupExposure();
+ divideUpExposure();
// Finally advertise what we've done.
writeAndFinish(image_metadata, desaturate);
}
+void Agc::updateLockStatus(DeviceStatus const &device_status)
+{
+ const double ERROR_FACTOR = 0.10; // make these customisable?
+ const int MAX_LOCK_COUNT = 5;
+ // Reset "lock count" when we exceed this multiple of ERROR_FACTOR
+ const double RESET_MARGIN = 1.5;
+
+ // Add 200us to the exposure time error to allow for line quantisation.
+ Duration exposure_error = last_device_status_.shutter_speed * ERROR_FACTOR + 200us;
+ double gain_error = last_device_status_.analogue_gain * ERROR_FACTOR;
+ Duration target_error = last_target_exposure_ * ERROR_FACTOR;
+
+ // Note that we don't know the exposure/gain limits of the sensor, so
+ // the values we keep requesting may be unachievable. For this reason
+ // we only insist that we're close to values in the past few frames.
+ if (device_status.shutter_speed > last_device_status_.shutter_speed - exposure_error &&
+ device_status.shutter_speed < last_device_status_.shutter_speed + exposure_error &&
+ device_status.analogue_gain > last_device_status_.analogue_gain - gain_error &&
+ device_status.analogue_gain < last_device_status_.analogue_gain + gain_error &&
+ status_.target_exposure_value > last_target_exposure_ - target_error &&
+ status_.target_exposure_value < last_target_exposure_ + target_error)
+ lock_count_ = std::min(lock_count_ + 1, MAX_LOCK_COUNT);
+ else if (device_status.shutter_speed < last_device_status_.shutter_speed - RESET_MARGIN * exposure_error ||
+ device_status.shutter_speed > last_device_status_.shutter_speed + RESET_MARGIN * exposure_error ||
+ device_status.analogue_gain < last_device_status_.analogue_gain - RESET_MARGIN * gain_error ||
+ device_status.analogue_gain > last_device_status_.analogue_gain + RESET_MARGIN * gain_error ||
+ status_.target_exposure_value < last_target_exposure_ - RESET_MARGIN * target_error ||
+ status_.target_exposure_value > last_target_exposure_ + RESET_MARGIN * target_error)
+ lock_count_ = 0;
+
+ last_device_status_ = device_status;
+ last_target_exposure_ = status_.target_exposure_value;
+
+ LOG(RPiAgc, Debug) << "Lock count updated to " << lock_count_;
+ status_.locked = lock_count_ == MAX_LOCK_COUNT;
+}
+
static void copy_string(std::string const &s, char *d, size_t size)
{
size_t length = s.copy(d, size - 1);
@@ -331,55 +416,47 @@ static void copy_string(std::string const &s, char *d, size_t size)
void Agc::housekeepConfig()
{
// First fetch all the up-to-date settings, so no one else has to do it.
- std::string new_exposure_mode_name, new_constraint_mode_name,
- new_metering_mode_name;
- {
- std::unique_lock<std::mutex> lock(settings_mutex_);
- new_metering_mode_name = metering_mode_name_;
- new_exposure_mode_name = exposure_mode_name_;
- new_constraint_mode_name = constraint_mode_name_;
- status_.ev = ev_;
- status_.fixed_shutter = fixed_shutter_;
- status_.fixed_analogue_gain = fixed_analogue_gain_;
- status_.flicker_period = flicker_period_;
- }
- RPI_LOG("ev " << status_.ev << " fixed_shutter "
- << status_.fixed_shutter << " fixed_analogue_gain "
- << status_.fixed_analogue_gain);
+ status_.ev = ev_;
+ status_.fixed_shutter = clipShutter(fixed_shutter_);
+ status_.fixed_analogue_gain = fixed_analogue_gain_;
+ status_.flicker_period = flicker_period_;
+ LOG(RPiAgc, Debug) << "ev " << status_.ev << " fixed_shutter "
+ << status_.fixed_shutter << " fixed_analogue_gain "
+ << status_.fixed_analogue_gain;
// Make sure the "mode" pointers point to the up-to-date things, if
// they've changed.
- if (strcmp(new_metering_mode_name.c_str(), status_.metering_mode)) {
- auto it = config_.metering_modes.find(new_metering_mode_name);
+ if (strcmp(metering_mode_name_.c_str(), status_.metering_mode)) {
+ auto it = config_.metering_modes.find(metering_mode_name_);
if (it == config_.metering_modes.end())
throw std::runtime_error("Agc: no metering mode " +
- new_metering_mode_name);
+ metering_mode_name_);
metering_mode_ = &it->second;
- copy_string(new_metering_mode_name, status_.metering_mode,
+ copy_string(metering_mode_name_, status_.metering_mode,
sizeof(status_.metering_mode));
}
- if (strcmp(new_exposure_mode_name.c_str(), status_.exposure_mode)) {
- auto it = config_.exposure_modes.find(new_exposure_mode_name);
+ if (strcmp(exposure_mode_name_.c_str(), status_.exposure_mode)) {
+ auto it = config_.exposure_modes.find(exposure_mode_name_);
if (it == config_.exposure_modes.end())
throw std::runtime_error("Agc: no exposure profile " +
- new_exposure_mode_name);
+ exposure_mode_name_);
exposure_mode_ = &it->second;
- copy_string(new_exposure_mode_name, status_.exposure_mode,
+ copy_string(exposure_mode_name_, status_.exposure_mode,
sizeof(status_.exposure_mode));
}
- if (strcmp(new_constraint_mode_name.c_str(), status_.constraint_mode)) {
+ if (strcmp(constraint_mode_name_.c_str(), status_.constraint_mode)) {
auto it =
- config_.constraint_modes.find(new_constraint_mode_name);
+ config_.constraint_modes.find(constraint_mode_name_);
if (it == config_.constraint_modes.end())
throw std::runtime_error("Agc: no constraint list " +
- new_constraint_mode_name);
+ constraint_mode_name_);
constraint_mode_ = &it->second;
- copy_string(new_constraint_mode_name, status_.constraint_mode,
+ copy_string(constraint_mode_name_, status_.constraint_mode,
sizeof(status_.constraint_mode));
}
- RPI_LOG("exposure_mode "
- << new_exposure_mode_name << " constraint_mode "
- << new_constraint_mode_name << " metering_mode "
- << new_metering_mode_name);
+ LOG(RPiAgc, Debug) << "exposure_mode "
+ << exposure_mode_name_ << " constraint_mode "
+ << constraint_mode_name_ << " metering_mode "
+ << metering_mode_name_;
}
void Agc::fetchCurrentExposure(Metadata *image_metadata)
@@ -393,30 +470,44 @@ void Agc::fetchCurrentExposure(Metadata *image_metadata)
current_.analogue_gain = device_status->analogue_gain;
AgcStatus *agc_status =
image_metadata->GetLocked<AgcStatus>("agc.status");
- current_.total_exposure = agc_status ? agc_status->total_exposure_value : 0;
+ current_.total_exposure = agc_status ? agc_status->total_exposure_value : 0s;
current_.total_exposure_no_dg = current_.shutter * current_.analogue_gain;
}
-static double compute_initial_Y(bcm2835_isp_stats *stats, Metadata *image_metadata,
- double weights[])
+void Agc::fetchAwbStatus(Metadata *image_metadata)
+{
+ awb_.gain_r = 1.0; // in case not found in metadata
+ awb_.gain_g = 1.0;
+ awb_.gain_b = 1.0;
+ if (image_metadata->Get("awb.status", awb_) != 0)
+ LOG(RPiAgc, Debug) << "Agc: no AWB status found";
+}
+
+static double compute_initial_Y(bcm2835_isp_stats *stats, AwbStatus const &awb,
+ double weights[], double gain)
{
bcm2835_isp_stats_region *regions = stats->agc_stats;
- struct AwbStatus awb;
- awb.gain_r = awb.gain_g = awb.gain_b = 1.0; // in case no metadata
- if (image_metadata->Get("awb.status", awb) != 0)
- RPI_WARN("Agc: no AWB status found");
- double Y_sum = 0, weight_sum = 0;
+ // Note how the calculation below means that equal weights give you
+ // "average" metering (i.e. all pixels equally important).
+ double R_sum = 0, G_sum = 0, B_sum = 0, pixel_sum = 0;
for (int i = 0; i < AGC_STATS_SIZE; i++) {
- if (regions[i].counted == 0)
- continue;
- weight_sum += weights[i];
- double Y = regions[i].r_sum * awb.gain_r * .299 +
- regions[i].g_sum * awb.gain_g * .587 +
- regions[i].b_sum * awb.gain_b * .114;
- Y /= regions[i].counted;
- Y_sum += Y * weights[i];
+ double counted = regions[i].counted;
+ double r_sum = std::min(regions[i].r_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
+ double g_sum = std::min(regions[i].g_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
+ double b_sum = std::min(regions[i].b_sum * gain, ((1 << PIPELINE_BITS) - 1) * counted);
+ R_sum += r_sum * weights[i];
+ G_sum += g_sum * weights[i];
+ B_sum += b_sum * weights[i];
+ pixel_sum += counted * weights[i];
}
- return Y_sum / weight_sum / (1 << PIPELINE_BITS);
+ if (pixel_sum == 0.0) {
+ LOG(RPiAgc, Warning) << "compute_initial_Y: pixel_sum is zero";
+ return 0;
+ }
+ double Y_sum = R_sum * awb.gain_r * .299 +
+ G_sum * awb.gain_g * .587 +
+ B_sum * awb.gain_b * .114;
+ return Y_sum / pixel_sum / (1 << PIPELINE_BITS);
}
// We handle extra gain through EV by adjusting our Y targets. However, you
@@ -443,7 +534,7 @@ void Agc::computeGain(bcm2835_isp_stats *statistics, Metadata *image_metadata,
struct LuxStatus lux = {};
lux.lux = 400; // default lux level to 400 in case no metadata found
if (image_metadata->Get("lux.status", lux) != 0)
- RPI_WARN("Agc: no lux level found");
+ LOG(RPiAgc, Warning) << "Agc: no lux level found";
Histogram h(statistics->hist[0].g_hist, NUM_HISTOGRAM_BINS);
double ev_gain = status_.ev * config_.base_ev;
// The initial gain and target_Y come from some of the regions. After
@@ -451,67 +542,84 @@ void Agc::computeGain(bcm2835_isp_stats *statistics, Metadata *image_metadata,
target_Y =
config_.Y_target.Eval(config_.Y_target.Domain().Clip(lux.lux));
target_Y = std::min(EV_GAIN_Y_TARGET_LIMIT, target_Y * ev_gain);
- double initial_Y = compute_initial_Y(statistics, image_metadata,
- metering_mode_->weights);
- gain = std::min(10.0, target_Y / (initial_Y + .001));
- RPI_LOG("Initially Y " << initial_Y << " target " << target_Y
- << " gives gain " << gain);
+
+ // Do this calculation a few times as brightness increase can be
+ // non-linear when there are saturated regions.
+ gain = 1.0;
+ for (int i = 0; i < 8; i++) {
+ double initial_Y = compute_initial_Y(statistics, awb_,
+ metering_mode_->weights, gain);
+ double extra_gain = std::min(10.0, target_Y / (initial_Y + .001));
+ gain *= extra_gain;
+ LOG(RPiAgc, Debug) << "Initial Y " << initial_Y << " target " << target_Y
+ << " gives gain " << gain;
+ if (extra_gain < 1.01) // close enough
+ break;
+ }
+
for (auto &c : *constraint_mode_) {
double new_target_Y;
double new_gain =
constraint_compute_gain(c, h, lux.lux, ev_gain,
new_target_Y);
- RPI_LOG("Constraint has target_Y "
- << new_target_Y << " giving gain " << new_gain);
+ LOG(RPiAgc, Debug) << "Constraint has target_Y "
+ << new_target_Y << " giving gain " << new_gain;
if (c.bound == AgcConstraint::Bound::LOWER &&
new_gain > gain) {
- RPI_LOG("Lower bound constraint adopted");
+ LOG(RPiAgc, Debug) << "Lower bound constraint adopted";
gain = new_gain, target_Y = new_target_Y;
} else if (c.bound == AgcConstraint::Bound::UPPER &&
new_gain < gain) {
- RPI_LOG("Upper bound constraint adopted");
+ LOG(RPiAgc, Debug) << "Upper bound constraint adopted";
gain = new_gain, target_Y = new_target_Y;
}
}
- RPI_LOG("Final gain " << gain << " (target_Y " << target_Y << " ev "
- << status_.ev << " base_ev " << config_.base_ev
- << ")");
+ LOG(RPiAgc, Debug) << "Final gain " << gain << " (target_Y " << target_Y << " ev "
+ << status_.ev << " base_ev " << config_.base_ev
+ << ")";
}
void Agc::computeTargetExposure(double gain)
{
- // The statistics reflect the image without digital gain, so the final
- // total exposure we're aiming for is:
- target_.total_exposure = current_.total_exposure_no_dg * gain;
- // The final target exposure is also limited to what the exposure
- // mode allows.
- double max_total_exposure =
- (status_.fixed_shutter != 0.0
- ? status_.fixed_shutter
- : exposure_mode_->shutter.back()) *
- (status_.fixed_analogue_gain != 0.0
- ? status_.fixed_analogue_gain
- : exposure_mode_->gain.back());
- target_.total_exposure = std::min(target_.total_exposure,
- max_total_exposure);
- RPI_LOG("Target total_exposure " << target_.total_exposure);
-}
-
-bool Agc::applyDigitalGain(Metadata *image_metadata, double gain,
- double target_Y)
-{
- double dg = 1.0;
+ if (status_.fixed_shutter && status_.fixed_analogue_gain) {
+ // When ag and shutter are both fixed, we need to drive the
+ // total exposure so that we end up with a digital gain of at least
+ // 1/min_colour_gain. Otherwise we'd desaturate channels causing
+ // white to go cyan or magenta.
+ double min_colour_gain = std::min({ awb_.gain_r, awb_.gain_g, awb_.gain_b, 1.0 });
+ ASSERT(min_colour_gain != 0.0);
+ target_.total_exposure =
+ status_.fixed_shutter * status_.fixed_analogue_gain / min_colour_gain;
+ } else {
+ // The statistics reflect the image without digital gain, so the final
+ // total exposure we're aiming for is:
+ target_.total_exposure = current_.total_exposure_no_dg * gain;
+ // The final target exposure is also limited to what the exposure
+ // mode allows.
+ Duration max_shutter = status_.fixed_shutter
+ ? status_.fixed_shutter
+ : exposure_mode_->shutter.back();
+ max_shutter = clipShutter(max_shutter);
+ Duration max_total_exposure =
+ max_shutter *
+ (status_.fixed_analogue_gain != 0.0
+ ? status_.fixed_analogue_gain
+ : exposure_mode_->gain.back());
+ target_.total_exposure = std::min(target_.total_exposure,
+ max_total_exposure);
+ }
+ LOG(RPiAgc, Debug) << "Target total_exposure " << target_.total_exposure;
+}
+
+bool Agc::applyDigitalGain(double gain, double target_Y)
+{
+ double min_colour_gain = std::min({ awb_.gain_r, awb_.gain_g, awb_.gain_b, 1.0 });
+ ASSERT(min_colour_gain != 0.0);
+ double dg = 1.0 / min_colour_gain;
// I think this pipeline subtracts black level and rescales before we
// get the stats, so no need to worry about it.
- struct AwbStatus awb;
- if (image_metadata->Get("awb.status", awb) == 0) {
- double min_gain = std::min(awb.gain_r,
- std::min(awb.gain_g, awb.gain_b));
- dg *= std::max(1.0, 1.0 / min_gain);
- } else
- RPI_WARN("Agc: no AWB status found");
- RPI_LOG("after AWB, target dg " << dg << " gain " << gain
- << " target_Y " << target_Y);
+ LOG(RPiAgc, Debug) << "after AWB, target dg " << dg << " gain " << gain
+ << " target_Y " << target_Y;
// Finally, if we're trying to reduce exposure but the target_Y is
// "close" to 1.0, then the gain computed for that constraint will be
// only slightly less than one, because the measured Y can never be
@@ -523,16 +631,21 @@ bool Agc::applyDigitalGain(Metadata *image_metadata, double gain,
gain < sqrt(target_Y);
if (desaturate)
dg /= config_.fast_reduce_threshold;
- RPI_LOG("Digital gain " << dg << " desaturate? " << desaturate);
+ LOG(RPiAgc, Debug) << "Digital gain " << dg << " desaturate? " << desaturate;
target_.total_exposure_no_dg = target_.total_exposure / dg;
- RPI_LOG("Target total_exposure_no_dg " << target_.total_exposure_no_dg);
+ LOG(RPiAgc, Debug) << "Target total_exposure_no_dg " << target_.total_exposure_no_dg;
return desaturate;
}
void Agc::filterExposure(bool desaturate)
{
- double speed = frame_count_ <= config_.startup_frames ? 1.0 : config_.speed;
- if (filtered_.total_exposure == 0.0) {
+ double speed = config_.speed;
+ // AGC adapts instantly if both shutter and gain are directly specified
+ // or we're in the startup phase.
+ if ((status_.fixed_shutter && status_.fixed_analogue_gain) ||
+ frame_count_ <= config_.startup_frames)
+ speed = 1.0;
+ if (!filtered_.total_exposure) {
filtered_.total_exposure = target_.total_exposure;
filtered_.total_exposure_no_dg = target_.total_exposure_no_dg;
} else {
@@ -560,35 +673,38 @@ void Agc::filterExposure(bool desaturate)
filtered_.total_exposure * config_.fast_reduce_threshold)
filtered_.total_exposure_no_dg = filtered_.total_exposure *
config_.fast_reduce_threshold;
- RPI_LOG("After filtering, total_exposure " << filtered_.total_exposure <<
- " no dg " << filtered_.total_exposure_no_dg);
+ LOG(RPiAgc, Debug) << "After filtering, total_exposure " << filtered_.total_exposure
+ << " no dg " << filtered_.total_exposure_no_dg;
}
-void Agc::divvyupExposure()
+void Agc::divideUpExposure()
{
// Sending the fixed shutter/gain cases through the same code may seem
// unnecessary, but it will make more sense when extend this to cover
// variable aperture.
- double exposure_value = filtered_.total_exposure_no_dg;
- double shutter_time, analogue_gain;
- shutter_time = status_.fixed_shutter != 0.0
+ Duration exposure_value = filtered_.total_exposure_no_dg;
+ Duration shutter_time;
+ double analogue_gain;
+ shutter_time = status_.fixed_shutter
? status_.fixed_shutter
: exposure_mode_->shutter[0];
+ shutter_time = clipShutter(shutter_time);
analogue_gain = status_.fixed_analogue_gain != 0.0
? status_.fixed_analogue_gain
: exposure_mode_->gain[0];
if (shutter_time * analogue_gain < exposure_value) {
for (unsigned int stage = 1;
stage < exposure_mode_->gain.size(); stage++) {
- if (status_.fixed_shutter == 0.0) {
- if (exposure_mode_->shutter[stage] *
- analogue_gain >=
+ if (!status_.fixed_shutter) {
+ Duration stage_shutter =
+ clipShutter(exposure_mode_->shutter[stage]);
+ if (stage_shutter * analogue_gain >=
exposure_value) {
shutter_time =
exposure_value / analogue_gain;
break;
}
- shutter_time = exposure_mode_->shutter[stage];
+ shutter_time = stage_shutter;
}
if (status_.fixed_analogue_gain == 0.0) {
if (exposure_mode_->gain[stage] *
@@ -602,16 +718,15 @@ void Agc::divvyupExposure()
}
}
}
- RPI_LOG("Divided up shutter and gain are " << shutter_time << " and "
- << analogue_gain);
+ LOG(RPiAgc, Debug) << "Divided up shutter and gain are " << shutter_time << " and "
+ << analogue_gain;
// Finally adjust shutter time for flicker avoidance (require both
// shutter and gain not to be fixed).
- if (status_.fixed_shutter == 0.0 &&
- status_.fixed_analogue_gain == 0.0 &&
- status_.flicker_period != 0.0) {
+ if (!status_.fixed_shutter && !status_.fixed_analogue_gain &&
+ status_.flicker_period) {
int flicker_periods = shutter_time / status_.flicker_period;
- if (flicker_periods > 0) {
- double new_shutter_time = flicker_periods * status_.flicker_period;
+ if (flicker_periods) {
+ Duration new_shutter_time = flicker_periods * status_.flicker_period;
analogue_gain *= shutter_time / new_shutter_time;
// We should still not allow the ag to go over the
// largest value in the exposure mode. Note that this
@@ -621,8 +736,8 @@ void Agc::divvyupExposure()
exposure_mode_->gain.back());
shutter_time = new_shutter_time;
}
- RPI_LOG("After flicker avoidance, shutter "
- << shutter_time << " gain " << analogue_gain);
+ LOG(RPiAgc, Debug) << "After flicker avoidance, shutter "
+ << shutter_time << " gain " << analogue_gain;
}
filtered_.shutter = shutter_time;
filtered_.analogue_gain = analogue_gain;
@@ -631,20 +746,23 @@ void Agc::divvyupExposure()
void Agc::writeAndFinish(Metadata *image_metadata, bool desaturate)
{
status_.total_exposure_value = filtered_.total_exposure;
- status_.target_exposure_value = desaturate ? 0 : target_.total_exposure_no_dg;
+ status_.target_exposure_value = desaturate ? 0s : target_.total_exposure_no_dg;
status_.shutter_time = filtered_.shutter;
status_.analogue_gain = filtered_.analogue_gain;
- {
- std::unique_lock<std::mutex> lock(output_mutex_);
- output_status_ = status_;
- }
// Write to metadata as well, in case anyone wants to update the camera
// immediately.
image_metadata->Set("agc.status", status_);
- RPI_LOG("Output written, total exposure requested is "
- << filtered_.total_exposure);
- RPI_LOG("Camera exposure update: shutter time " << filtered_.shutter <<
- " analogue gain " << filtered_.analogue_gain);
+ LOG(RPiAgc, Debug) << "Output written, total exposure requested is "
+ << filtered_.total_exposure;
+ LOG(RPiAgc, Debug) << "Camera exposure update: shutter time " << filtered_.shutter
+ << " analogue gain " << filtered_.analogue_gain;
+}
+
+Duration Agc::clipShutter(Duration shutter)
+{
+ if (max_shutter_)
+ shutter = std::min(shutter, max_shutter_);
+ return shutter;
}
// Register algorithm with the system.
diff --git a/src/ipa/raspberrypi/controller/rpi/agc.hpp b/src/ipa/raspberrypi/controller/rpi/agc.hpp
index ba7ae092..85067dc6 100644
--- a/src/ipa/raspberrypi/controller/rpi/agc.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/agc.hpp
@@ -9,6 +9,8 @@
#include <vector>
#include <mutex>
+#include <libcamera/base/utils.h>
+
#include "../agc_algorithm.hpp"
#include "../agc_status.h"
#include "../pwl.hpp"
@@ -22,13 +24,15 @@
namespace RPiController {
+using namespace std::literals::chrono_literals;
+
struct AgcMeteringMode {
double weights[AGC_STATS_SIZE];
void Read(boost::property_tree::ptree const &params);
};
struct AgcExposureMode {
- std::vector<double> shutter;
+ std::vector<libcamera::utils::Duration> shutter;
std::vector<double> gain;
void Read(boost::property_tree::ptree const &params);
};
@@ -52,6 +56,7 @@ struct AgcConfig {
Pwl Y_target;
double speed;
uint16_t startup_frames;
+ unsigned int convergence_frames;
double max_change;
double min_change;
double fast_reduce_threshold;
@@ -60,6 +65,8 @@ struct AgcConfig {
std::string default_exposure_mode;
std::string default_constraint_mode;
double base_ev;
+ libcamera::utils::Duration default_exposure_time;
+ double default_analogue_gain;
};
class Agc : public AgcAlgorithm
@@ -68,9 +75,15 @@ public:
Agc(Controller *controller);
char const *Name() const override;
void Read(boost::property_tree::ptree const &params) override;
+ // AGC handles "pausing" for itself.
+ bool IsPaused() const override;
+ void Pause() override;
+ void Resume() override;
+ unsigned int GetConvergenceFrames() const override;
void SetEv(double ev) override;
- void SetFlickerPeriod(double flicker_period) override;
- void SetFixedShutter(double fixed_shutter) override; // microseconds
+ void SetFlickerPeriod(libcamera::utils::Duration flicker_period) override;
+ void SetMaxShutter(libcamera::utils::Duration max_shutter) override;
+ void SetFixedShutter(libcamera::utils::Duration fixed_shutter) override;
void SetFixedAnalogueGain(double fixed_analogue_gain) override;
void SetMeteringMode(std::string const &metering_mode_name) override;
void SetExposureMode(std::string const &exposure_mode_name) override;
@@ -80,44 +93,47 @@ public:
void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
private:
+ void updateLockStatus(DeviceStatus const &device_status);
AgcConfig config_;
void housekeepConfig();
void fetchCurrentExposure(Metadata *image_metadata);
+ void fetchAwbStatus(Metadata *image_metadata);
void computeGain(bcm2835_isp_stats *statistics, Metadata *image_metadata,
double &gain, double &target_Y);
void computeTargetExposure(double gain);
- bool applyDigitalGain(Metadata *image_metadata, double gain,
- double target_Y);
+ bool applyDigitalGain(double gain, double target_Y);
void filterExposure(bool desaturate);
- void divvyupExposure();
+ void divideUpExposure();
void writeAndFinish(Metadata *image_metadata, bool desaturate);
+ libcamera::utils::Duration clipShutter(libcamera::utils::Duration shutter);
AgcMeteringMode *metering_mode_;
AgcExposureMode *exposure_mode_;
AgcConstraintMode *constraint_mode_;
uint64_t frame_count_;
+ AwbStatus awb_;
struct ExposureValues {
- ExposureValues() : shutter(0), analogue_gain(0),
- total_exposure(0), total_exposure_no_dg(0) {}
- double shutter;
+ ExposureValues() : shutter(0s), analogue_gain(0),
+ total_exposure(0s), total_exposure_no_dg(0s) {}
+ libcamera::utils::Duration shutter;
double analogue_gain;
- double total_exposure;
- double total_exposure_no_dg; // without digital gain
+ libcamera::utils::Duration total_exposure;
+ libcamera::utils::Duration total_exposure_no_dg; // without digital gain
};
ExposureValues current_; // values for the current frame
ExposureValues target_; // calculate the values we want here
ExposureValues filtered_; // these values are filtered towards target
- AgcStatus status_; // to "latch" settings so they can't change
- AgcStatus output_status_; // the status we will write out
- std::mutex output_mutex_;
+ AgcStatus status_;
int lock_count_;
+ DeviceStatus last_device_status_;
+ libcamera::utils::Duration last_target_exposure_;
// Below here the "settings" that applications can change.
- std::mutex settings_mutex_;
std::string metering_mode_name_;
std::string exposure_mode_name_;
std::string constraint_mode_name_;
double ev_;
- double flicker_period_;
- double fixed_shutter_;
+ libcamera::utils::Duration flicker_period_;
+ libcamera::utils::Duration max_shutter_;
+ libcamera::utils::Duration fixed_shutter_;
double fixed_analogue_gain_;
};
diff --git a/src/ipa/raspberrypi/controller/rpi/alsc.cpp b/src/ipa/raspberrypi/controller/rpi/alsc.cpp
index 42fbc8a4..be3d1ae4 100644
--- a/src/ipa/raspberrypi/controller/rpi/alsc.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/alsc.cpp
@@ -6,12 +6,17 @@
*/
#include <math.h>
+#include <libcamera/base/log.h>
+
#include "../awb_status.h"
#include "alsc.hpp"
// Raspberry Pi ALSC (Auto Lens Shading Correction) algorithm.
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiAlsc)
#define NAME "rpi.alsc"
@@ -110,15 +115,14 @@ static void read_calibrations(std::vector<AlscCalibration> &calibrations,
"Alsc: too few values for ct " +
std::to_string(ct) + " in " + name);
calibrations.push_back(calibration);
- RPI_LOG("Read " << name << " calibration for ct "
- << ct);
+ LOG(RPiAlsc, Debug)
+ << "Read " << name << " calibration for ct " << ct;
}
}
}
void Alsc::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG("Alsc");
config_.frame_period = params.get<uint16_t>("frame_period", 12);
config_.startup_frames = params.get<uint16_t>("startup_frames", 10);
config_.speed = params.get<double>("speed", 0.05);
@@ -139,13 +143,15 @@ void Alsc::Read(boost::property_tree::ptree const &params)
read_lut(config_.luminance_lut,
params.get_child("luminance_lut"));
else
- RPI_WARN("Alsc: no luminance table - assume unity everywhere");
+ LOG(RPiAlsc, Warning)
+ << "no luminance table - assume unity everywhere";
read_calibrations(config_.calibrations_Cr, params, "calibrations_Cr");
read_calibrations(config_.calibrations_Cb, params, "calibrations_Cb");
config_.default_ct = params.get<double>("default_ct", 4500.0);
config_.threshold = params.get<double>("threshold", 1e-3);
}
+static double get_ct(Metadata *metadata, double default_ct);
static void get_cal_table(double ct,
std::vector<AlscCalibration> const &calibrations,
double cal_table[XY]);
@@ -163,7 +169,6 @@ static void add_luminance_to_tables(double results[3][Y][X],
void Alsc::Initialise()
{
- RPI_LOG("Alsc");
frame_count2_ = frame_count_ = frame_phase_ = 0;
first_time_ = true;
ct_ = config_.default_ct;
@@ -210,6 +215,9 @@ void Alsc::SwitchMode(CameraMode const &camera_mode,
// change.
bool reset_tables = first_time_ || compare_modes(camera_mode_, camera_mode);
+ // Believe the colour temperature from the AWB, if there is one.
+ ct_ = get_ct(metadata, ct_);
+
// Ensure the other thread isn't running while we do this.
waitForAysncThread();
@@ -248,22 +256,22 @@ void Alsc::SwitchMode(CameraMode const &camera_mode,
void Alsc::fetchAsyncResults()
{
- RPI_LOG("Fetch ALSC results");
+ LOG(RPiAlsc, Debug) << "Fetch ALSC results";
async_finished_ = false;
async_started_ = false;
memcpy(sync_results_, async_results_, sizeof(sync_results_));
}
-static double get_ct(Metadata *metadata, double default_ct)
+double get_ct(Metadata *metadata, double default_ct)
{
AwbStatus awb_status;
awb_status.temperature_K = default_ct; // in case nothing found
if (metadata->Get("awb.status", awb_status) != 0)
- RPI_WARN("Alsc: no AWB results found, using "
- << awb_status.temperature_K);
+ LOG(RPiAlsc, Debug) << "no AWB results found, using "
+ << awb_status.temperature_K;
else
- RPI_LOG("Alsc: AWB results found, using "
- << awb_status.temperature_K);
+ LOG(RPiAlsc, Debug) << "AWB results found, using "
+ << awb_status.temperature_K;
return awb_status.temperature_K;
}
@@ -285,7 +293,7 @@ static void copy_stats(bcm2835_isp_stats_region regions[XY], StatisticsPtr &stat
void Alsc::restartAsync(StatisticsPtr &stats, Metadata *image_metadata)
{
- RPI_LOG("Starting ALSC thread");
+ LOG(RPiAlsc, Debug) << "Starting ALSC calculation";
// Get the current colour temperature. It's all we need from the
// metadata. Default to the last CT value (which could be the default).
ct_ = get_ct(image_metadata, ct_);
@@ -293,7 +301,8 @@ void Alsc::restartAsync(StatisticsPtr &stats, Metadata *image_metadata)
// the LSC table that the pipeline applied to them.
AlscStatus alsc_status;
if (image_metadata->Get("alsc.status", alsc_status) != 0) {
- RPI_WARN("No ALSC status found for applied gains!");
+ LOG(RPiAlsc, Warning)
+ << "No ALSC status found for applied gains!";
for (int y = 0; y < Y; y++)
for (int x = 0; x < X; x++) {
alsc_status.r[y][x] = 1.0;
@@ -320,13 +329,12 @@ void Alsc::Prepare(Metadata *image_metadata)
double speed = frame_count_ < (int)config_.startup_frames
? 1.0
: config_.speed;
- RPI_LOG("Alsc: frame_count " << frame_count_ << " speed " << speed);
+ LOG(RPiAlsc, Debug)
+ << "frame_count " << frame_count_ << " speed " << speed;
{
std::unique_lock<std::mutex> lock(mutex_);
- if (async_started_ && async_finished_) {
- RPI_LOG("ALSC thread finished");
+ if (async_started_ && async_finished_)
fetchAsyncResults();
- }
}
// Apply IIR filter to results and program into the pipeline.
double *ptr = (double *)sync_results_,
@@ -350,13 +358,11 @@ void Alsc::Process(StatisticsPtr &stats, Metadata *image_metadata)
frame_phase_++;
if (frame_count2_ < (int)config_.startup_frames)
frame_count2_++;
- RPI_LOG("Alsc: frame_phase " << frame_phase_);
+ LOG(RPiAlsc, Debug) << "frame_phase " << frame_phase_;
if (frame_phase_ >= (int)config_.frame_period ||
frame_count2_ < (int)config_.startup_frames) {
- if (async_started_ == false) {
- RPI_LOG("ALSC thread starting");
+ if (async_started_ == false)
restartAsync(stats, image_metadata);
- }
}
}
@@ -387,25 +393,26 @@ void get_cal_table(double ct, std::vector<AlscCalibration> const &calibrations,
if (calibrations.empty()) {
for (int i = 0; i < XY; i++)
cal_table[i] = 1.0;
- RPI_LOG("Alsc: no calibrations found");
+ LOG(RPiAlsc, Debug) << "no calibrations found";
} else if (ct <= calibrations.front().ct) {
memcpy(cal_table, calibrations.front().table,
XY * sizeof(double));
- RPI_LOG("Alsc: using calibration for "
- << calibrations.front().ct);
+ LOG(RPiAlsc, Debug) << "using calibration for "
+ << calibrations.front().ct;
} else if (ct >= calibrations.back().ct) {
memcpy(cal_table, calibrations.back().table,
XY * sizeof(double));
- RPI_LOG("Alsc: using calibration for "
- << calibrations.front().ct);
+ LOG(RPiAlsc, Debug) << "using calibration for "
+ << calibrations.back().ct;
} else {
int idx = 0;
while (ct > calibrations[idx + 1].ct)
idx++;
double ct0 = calibrations[idx].ct,
ct1 = calibrations[idx + 1].ct;
- RPI_LOG("Alsc: ct is " << ct << ", interpolating between "
- << ct0 << " and " << ct1);
+ LOG(RPiAlsc, Debug)
+ << "ct is " << ct << ", interpolating between "
+ << ct0 << " and " << ct1;
for (int i = 0; i < XY; i++)
cal_table[i] =
(calibrations[idx].table[i] * (ct1 - ct) +
@@ -606,9 +613,9 @@ static double gauss_seidel2_SOR(double const M[XY][4], double omega,
double lambda[XY])
{
double old_lambda[XY];
- for (int i = 0; i < XY; i++)
- old_lambda[i] = lambda[i];
int i;
+ for (i = 0; i < XY; i++)
+ old_lambda[i] = lambda[i];
lambda[0] = compute_lambda_bottom_start(0, M, lambda);
for (i = 1; i < X; i++)
lambda[i] = compute_lambda_bottom(i, M, lambda);
@@ -628,7 +635,7 @@ static double gauss_seidel2_SOR(double const M[XY][4], double omega,
lambda[i] = compute_lambda_bottom(i, M, lambda);
lambda[0] = compute_lambda_bottom_start(0, M, lambda);
double max_diff = 0;
- for (int i = 0; i < XY; i++) {
+ for (i = 0; i < XY; i++) {
lambda[i] = old_lambda[i] + (lambda[i] - old_lambda[i]) * omega;
if (fabs(lambda[i] - old_lambda[i]) > fabs(max_diff))
max_diff = lambda[i] - old_lambda[i];
@@ -656,15 +663,16 @@ static void run_matrix_iterations(double const C[XY], double lambda[XY],
for (int i = 0; i < n_iter; i++) {
double max_diff = fabs(gauss_seidel2_SOR(M, omega, lambda));
if (max_diff < threshold) {
- RPI_LOG("Stop after " << i + 1 << " iterations");
+ LOG(RPiAlsc, Debug)
+ << "Stop after " << i + 1 << " iterations";
break;
}
// this happens very occasionally (so make a note), though
// doesn't seem to matter
if (max_diff > last_max_diff)
- RPI_LOG("Iteration " << i << ": max_diff gone up "
- << last_max_diff << " to "
- << max_diff);
+ LOG(RPiAlsc, Debug)
+ << "Iteration " << i << ": max_diff gone up "
+ << last_max_diff << " to " << max_diff;
last_max_diff = max_diff;
}
// We're going to normalise the lambdas so the smallest is 1. Not sure
diff --git a/src/ipa/raspberrypi/controller/rpi/awb.cpp b/src/ipa/raspberrypi/controller/rpi/awb.cpp
index a5536e47..5cfd33a3 100644
--- a/src/ipa/raspberrypi/controller/rpi/awb.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/awb.cpp
@@ -5,19 +5,24 @@
* awb.cpp - AWB control algorithm
*/
-#include "../logging.hpp"
+#include <libcamera/base/log.h>
+
#include "../lux_status.h"
#include "awb.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiAwb)
#define NAME "rpi.awb"
#define AWB_STATS_SIZE_X DEFAULT_AWB_REGIONS_X
#define AWB_STATS_SIZE_Y DEFAULT_AWB_REGIONS_Y
-const double Awb::RGB::INVALID = -1.0;
+// todo - the locking in this algorithm needs some tidying up as has been done
+// elsewhere (ALSC and AGC).
void AwbMode::Read(boost::property_tree::ptree const &params)
{
@@ -55,10 +60,10 @@ static void read_ct_curve(Pwl &ct_r, Pwl &ct_b,
void AwbConfig::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG("AwbConfig");
bayes = params.get<int>("bayes", 1);
frame_period = params.get<uint16_t>("frame_period", 10);
startup_frames = params.get<uint16_t>("startup_frames", 10);
+ convergence_frames = params.get<unsigned int>("convergence_frames", 3);
speed = params.get<double>("speed", 0.05);
if (params.get_child_optional("ct_curve"))
read_ct_curve(ct_r, ct_b, params.get_child("ct_curve"));
@@ -100,8 +105,8 @@ void AwbConfig::Read(boost::property_tree::ptree const &params)
if (bayes) {
if (ct_r.Empty() || ct_b.Empty() || priors.empty() ||
default_mode == nullptr) {
- RPI_WARN(
- "Bayesian AWB mis-configured - switch to Grey method");
+ LOG(RPiAwb, Warning)
+ << "Bayesian AWB mis-configured - switch to Grey method";
bayes = false;
}
}
@@ -120,6 +125,7 @@ Awb::Awb(Controller *controller)
async_abort_ = async_start_ = async_started_ = async_finished_ = false;
mode_ = nullptr;
manual_r_ = manual_b_ = 0.0;
+ first_switch_mode_ = true;
async_thread_ = std::thread(std::bind(&Awb::asyncFunc, this));
}
@@ -128,8 +134,8 @@ Awb::~Awb()
{
std::lock_guard<std::mutex> lock(mutex_);
async_abort_ = true;
- async_signal_.notify_one();
}
+ async_signal_.notify_one();
async_thread_.join();
}
@@ -145,7 +151,7 @@ void Awb::Read(boost::property_tree::ptree const &params)
void Awb::Initialise()
{
- frame_count2_ = frame_count_ = frame_phase_ = 0;
+ frame_count_ = frame_phase_ = 0;
// Put something sane into the status that we are filtering towards,
// just in case the first few frames don't have anything meaningful in
// them.
@@ -163,48 +169,92 @@ void Awb::Initialise()
sync_results_.gain_b = 1.0;
}
prev_sync_results_ = sync_results_;
+ async_results_ = sync_results_;
+}
+
+unsigned int Awb::GetConvergenceFrames() const
+{
+ // If not in auto mode, there is no convergence
+ // to happen, so no need to drop any frames - return zero.
+ if (!isAutoEnabled())
+ return 0;
+ else
+ return config_.convergence_frames;
}
void Awb::SetMode(std::string const &mode_name)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
mode_name_ = mode_name;
}
void Awb::SetManualGains(double manual_r, double manual_b)
{
- std::unique_lock<std::mutex> lock(settings_mutex_);
// If any of these are 0.0, we swich back to auto.
manual_r_ = manual_r;
manual_b_ = manual_b;
+ // If not in auto mode, set these values into the sync_results which
+ // means that Prepare() will adopt them immediately.
+ if (!isAutoEnabled()) {
+ sync_results_.gain_r = prev_sync_results_.gain_r = manual_r_;
+ sync_results_.gain_g = prev_sync_results_.gain_g = 1.0;
+ sync_results_.gain_b = prev_sync_results_.gain_b = manual_b_;
+ }
+}
+
+void Awb::SwitchMode([[maybe_unused]] CameraMode const &camera_mode,
+ Metadata *metadata)
+{
+ // On the first mode switch we'll have no meaningful colour
+ // temperature, so try to dead reckon one if in manual mode.
+ if (!isAutoEnabled() && first_switch_mode_ && config_.bayes) {
+ Pwl ct_r_inverse = config_.ct_r.Inverse();
+ Pwl ct_b_inverse = config_.ct_b.Inverse();
+ double ct_r = ct_r_inverse.Eval(ct_r_inverse.Domain().Clip(1 / manual_r_));
+ double ct_b = ct_b_inverse.Eval(ct_b_inverse.Domain().Clip(1 / manual_b_));
+ prev_sync_results_.temperature_K = (ct_r + ct_b) / 2;
+ sync_results_.temperature_K = prev_sync_results_.temperature_K;
+ }
+ // Let other algorithms know the current white balance values.
+ metadata->Set("awb.status", prev_sync_results_);
+ first_switch_mode_ = false;
+}
+
+bool Awb::isAutoEnabled() const
+{
+ return manual_r_ == 0.0 || manual_b_ == 0.0;
}
void Awb::fetchAsyncResults()
{
- RPI_LOG("Fetch AWB results");
+ LOG(RPiAwb, Debug) << "Fetch AWB results";
async_finished_ = false;
async_started_ = false;
- sync_results_ = async_results_;
+ // It's possible manual gains could be set even while the async
+ // thread was running, so only copy the results if still in auto mode.
+ if (isAutoEnabled())
+ sync_results_ = async_results_;
}
-void Awb::restartAsync(StatisticsPtr &stats, std::string const &mode_name,
- double lux)
+void Awb::restartAsync(StatisticsPtr &stats, double lux)
{
- RPI_LOG("Starting AWB thread");
+ LOG(RPiAwb, Debug) << "Starting AWB calculation";
// this makes a new reference which belongs to the asynchronous thread
statistics_ = stats;
// store the mode as it could technically change
- auto m = config_.modes.find(mode_name);
+ auto m = config_.modes.find(mode_name_);
mode_ = m != config_.modes.end()
? &m->second
: (mode_ == nullptr ? config_.default_mode : mode_);
lux_ = lux;
frame_phase_ = 0;
- async_start_ = true;
async_started_ = true;
- size_t len = mode_name.copy(async_results_.mode,
- sizeof(async_results_.mode) - 1);
+ size_t len = mode_name_.copy(async_results_.mode,
+ sizeof(async_results_.mode) - 1);
async_results_.mode[len] = '\0';
+ {
+ std::lock_guard<std::mutex> lock(mutex_);
+ async_start_ = true;
+ }
async_signal_.notify_one();
}
@@ -215,13 +265,12 @@ void Awb::Prepare(Metadata *image_metadata)
double speed = frame_count_ < (int)config_.startup_frames
? 1.0
: config_.speed;
- RPI_LOG("Awb: frame_count " << frame_count_ << " speed " << speed);
+ LOG(RPiAwb, Debug)
+ << "frame_count " << frame_count_ << " speed " << speed;
{
std::unique_lock<std::mutex> lock(mutex_);
- if (async_started_ && async_finished_) {
- RPI_LOG("AWB thread finished");
+ if (async_started_ && async_finished_)
fetchAsyncResults();
- }
}
// Finally apply IIR filter to results and put into metadata.
memcpy(prev_sync_results_.mode, sync_results_.mode,
@@ -236,9 +285,10 @@ void Awb::Prepare(Metadata *image_metadata)
prev_sync_results_.gain_b = speed * sync_results_.gain_b +
(1.0 - speed) * prev_sync_results_.gain_b;
image_metadata->Set("awb.status", prev_sync_results_);
- RPI_LOG("Using AWB gains r " << prev_sync_results_.gain_r << " g "
- << prev_sync_results_.gain_g << " b "
- << prev_sync_results_.gain_b);
+ LOG(RPiAwb, Debug)
+ << "Using AWB gains r " << prev_sync_results_.gain_r << " g "
+ << prev_sync_results_.gain_g << " b "
+ << prev_sync_results_.gain_b;
}
void Awb::Process(StatisticsPtr &stats, Metadata *image_metadata)
@@ -246,28 +296,20 @@ void Awb::Process(StatisticsPtr &stats, Metadata *image_metadata)
// Count frames since we last poked the async thread.
if (frame_phase_ < (int)config_.frame_period)
frame_phase_++;
- if (frame_count2_ < (int)config_.startup_frames)
- frame_count2_++;
- RPI_LOG("Awb: frame_phase " << frame_phase_);
- if (frame_phase_ >= (int)config_.frame_period ||
- frame_count2_ < (int)config_.startup_frames) {
+ LOG(RPiAwb, Debug) << "frame_phase " << frame_phase_;
+ // We do not restart the async thread if we're not in auto mode.
+ if (isAutoEnabled() &&
+ (frame_phase_ >= (int)config_.frame_period ||
+ frame_count_ < (int)config_.startup_frames)) {
// Update any settings and any image metadata that we need.
- std::string mode_name;
- {
- std::unique_lock<std::mutex> lock(settings_mutex_);
- mode_name = mode_name_;
- }
struct LuxStatus lux_status = {};
lux_status.lux = 400; // in case no metadata
if (image_metadata->Get("lux.status", lux_status) != 0)
- RPI_LOG("No lux metadata found");
- RPI_LOG("Awb lux value is " << lux_status.lux);
+ LOG(RPiAwb, Debug) << "No lux metadata found";
+ LOG(RPiAwb, Debug) << "Awb lux value is " << lux_status.lux;
- std::unique_lock<std::mutex> lock(mutex_);
- if (async_started_ == false) {
- RPI_LOG("AWB thread starting");
- restartAsync(stats, mode_name, lux_status.lux);
- }
+ if (async_started_ == false)
+ restartAsync(stats, lux_status.lux);
}
}
@@ -287,8 +329,8 @@ void Awb::asyncFunc()
{
std::lock_guard<std::mutex> lock(mutex_);
async_finished_ = true;
- sync_signal_.notify_one();
}
+ sync_signal_.notify_one();
}
}
@@ -297,16 +339,16 @@ static void generate_stats(std::vector<Awb::RGB> &zones,
double min_G)
{
for (int i = 0; i < AWB_STATS_SIZE_X * AWB_STATS_SIZE_Y; i++) {
- Awb::RGB zone; // this is "invalid", unless R gets overwritten later
+ Awb::RGB zone;
double counted = stats[i].counted;
if (counted >= min_pixels) {
zone.G = stats[i].g_sum / counted;
if (zone.G >= min_G) {
zone.R = stats[i].r_sum / counted;
zone.B = stats[i].b_sum / counted;
+ zones.push_back(zone);
}
}
- zones.push_back(zone);
}
}
@@ -336,7 +378,7 @@ double Awb::computeDelta2Sum(double gain_r, double gain_b)
double delta_r = gain_r * z.R - 1 - config_.whitepoint_r;
double delta_b = gain_b * z.B - 1 - config_.whitepoint_b;
double delta2 = delta_r * delta_r + delta_b * delta_b;
- //RPI_LOG("delta_r " << delta_r << " delta_b " << delta_b << " delta2 " << delta2);
+ //LOG(RPiAwb, Debug) << "delta_r " << delta_r << " delta_b " << delta_b << " delta2 " << delta2;
delta2 = std::min(delta2, config_.delta_limit);
delta2_sum += delta2;
}
@@ -399,10 +441,11 @@ double Awb::coarseSearch(Pwl const &prior)
double prior_log_likelihood =
prior.Eval(prior.Domain().Clip(t));
double final_log_likelihood = delta2_sum - prior_log_likelihood;
- RPI_LOG("t: " << t << " gain_r " << gain_r << " gain_b "
- << gain_b << " delta2_sum " << delta2_sum
- << " prior " << prior_log_likelihood << " final "
- << final_log_likelihood);
+ LOG(RPiAwb, Debug)
+ << "t: " << t << " gain_r " << gain_r << " gain_b "
+ << gain_b << " delta2_sum " << delta2_sum
+ << " prior " << prior_log_likelihood << " final "
+ << final_log_likelihood;
points_.push_back(Pwl::Point(t, final_log_likelihood));
if (points_.back().y < points_[best_point].y)
best_point = points_.size() - 1;
@@ -413,7 +456,7 @@ double Awb::coarseSearch(Pwl const &prior)
mode_->ct_hi);
}
t = points_[best_point].x;
- RPI_LOG("Coarse search found CT " << t);
+ LOG(RPiAwb, Debug) << "Coarse search found CT " << t;
// We have the best point of the search, but refine it with a quadratic
// interpolation around its neighbours.
if (points_.size() > 2) {
@@ -422,15 +465,16 @@ double Awb::coarseSearch(Pwl const &prior)
t = interpolate_quadatric(points_[best_point - 1],
points_[best_point],
points_[best_point + 1]);
- RPI_LOG("After quadratic refinement, coarse search has CT "
- << t);
+ LOG(RPiAwb, Debug)
+ << "After quadratic refinement, coarse search has CT "
+ << t;
}
return t;
}
void Awb::fineSearch(double &t, double &r, double &b, Pwl const &prior)
{
- int span_r, span_b;
+ int span_r = -1, span_b = -1;
config_.ct_r.Eval(t, &span_r);
config_.ct_b.Eval(t, &span_b);
double step = t / 10 * config_.coarse_step * 0.1;
@@ -475,8 +519,9 @@ void Awb::fineSearch(double &t, double &r, double &b, Pwl const &prior)
double gain_r = 1 / r_test, gain_b = 1 / b_test;
double delta2_sum = computeDelta2Sum(gain_r, gain_b);
points[j].y = delta2_sum - prior_log_likelihood;
- RPI_LOG("At t " << t_test << " r " << r_test << " b "
- << b_test << ": " << points[j].y);
+ LOG(RPiAwb, Debug)
+ << "At t " << t_test << " r " << r_test << " b "
+ << b_test << ": " << points[j].y;
if (points[j].y < points[best_point].y)
best_point = j;
}
@@ -493,17 +538,18 @@ void Awb::fineSearch(double &t, double &r, double &b, Pwl const &prior)
double gain_r = 1 / r_test, gain_b = 1 / b_test;
double delta2_sum = computeDelta2Sum(gain_r, gain_b);
double final_log_likelihood = delta2_sum - prior_log_likelihood;
- RPI_LOG("Finally "
+ LOG(RPiAwb, Debug)
+ << "Finally "
<< t_test << " r " << r_test << " b " << b_test << ": "
<< final_log_likelihood
- << (final_log_likelihood < best_log_likelihood ? " BEST"
- : ""));
+ << (final_log_likelihood < best_log_likelihood ? " BEST" : "");
if (best_t == 0 || final_log_likelihood < best_log_likelihood)
best_log_likelihood = final_log_likelihood,
best_t = t_test, best_r = r_test, best_b = b_test;
}
t = best_t, r = best_r, b = best_b;
- RPI_LOG("Fine search found t " << t << " r " << r << " b " << b);
+ LOG(RPiAwb, Debug)
+ << "Fine search found t " << t << " r " << r << " b " << b;
}
void Awb::awbBayes()
@@ -517,13 +563,14 @@ void Awb::awbBayes()
Pwl prior = interpolatePrior();
prior *= zones_.size() / (double)(AWB_STATS_SIZE_X * AWB_STATS_SIZE_Y);
prior.Map([](double x, double y) {
- RPI_LOG("(" << x << "," << y << ")");
+ LOG(RPiAwb, Debug) << "(" << x << "," << y << ")";
});
double t = coarseSearch(prior);
double r = config_.ct_r.Eval(t);
double b = config_.ct_b.Eval(t);
- RPI_LOG("After coarse search: r " << r << " b " << b << " (gains r "
- << 1 / r << " b " << 1 / b << ")");
+ LOG(RPiAwb, Debug)
+ << "After coarse search: r " << r << " b " << b << " (gains r "
+ << 1 / r << " b " << 1 / b << ")";
// Not entirely sure how to handle the fine search yet. Mostly the
// estimated CT is already good enough, but the fine search allows us to
// wander transverely off the CT curve. Under some illuminants, where
@@ -531,8 +578,9 @@ void Awb::awbBayes()
// though I probably need more real datasets before deciding exactly how
// this should be controlled and tuned.
fineSearch(t, r, b, prior);
- RPI_LOG("After fine search: r " << r << " b " << b << " (gains r "
- << 1 / r << " b " << 1 / b << ")");
+ LOG(RPiAwb, Debug)
+ << "After fine search: r " << r << " b " << b << " (gains r "
+ << 1 / r << " b " << 1 / b << ")";
// Write results out for the main thread to pick up. Remember to adjust
// the gains from the ones that the "canonical sensor" would require to
// the ones needed by *this* sensor.
@@ -544,7 +592,7 @@ void Awb::awbBayes()
void Awb::awbGrey()
{
- RPI_LOG("Grey world AWB");
+ LOG(RPiAwb, Debug) << "Grey world AWB";
// Make a separate list of the derivatives for each of red and blue, so
// that we can sort them to exclude the extreme gains. We could
// consider some variations, such as normalising all the zones first, or
@@ -576,27 +624,18 @@ void Awb::awbGrey()
void Awb::doAwb()
{
- if (manual_r_ != 0.0 && manual_b_ != 0.0) {
- async_results_.temperature_K = 4500; // don't know what it is
- async_results_.gain_r = manual_r_;
- async_results_.gain_g = 1.0;
- async_results_.gain_b = manual_b_;
- RPI_LOG("Using manual white balance: gain_r "
- << async_results_.gain_r << " gain_b "
- << async_results_.gain_b);
- } else {
- prepareStats();
- RPI_LOG("Valid zones: " << zones_.size());
- if (zones_.size() > config_.min_regions) {
- if (config_.bayes)
- awbBayes();
- else
- awbGrey();
- RPI_LOG("CT found is "
- << async_results_.temperature_K
- << " with gains r " << async_results_.gain_r
- << " and b " << async_results_.gain_b);
- }
+ prepareStats();
+ LOG(RPiAwb, Debug) << "Valid zones: " << zones_.size();
+ if (zones_.size() > config_.min_regions) {
+ if (config_.bayes)
+ awbBayes();
+ else
+ awbGrey();
+ LOG(RPiAwb, Debug)
+ << "CT found is "
+ << async_results_.temperature_K
+ << " with gains r " << async_results_.gain_r
+ << " and b " << async_results_.gain_b;
}
}
diff --git a/src/ipa/raspberrypi/controller/rpi/awb.hpp b/src/ipa/raspberrypi/controller/rpi/awb.hpp
index 9124d042..8af1f27c 100644
--- a/src/ipa/raspberrypi/controller/rpi/awb.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/awb.hpp
@@ -37,6 +37,7 @@ struct AwbConfig {
uint16_t frame_period;
// number of initial frames for which speed taken as 1.0 (maximum)
uint16_t startup_frames;
+ unsigned int convergence_frames; // approx number of frames to converge
double speed; // IIR filter speed applied to algorithm results
bool fast; // "fast" mode uses a 16x16 rather than 32x32 grid
Pwl ct_r; // function maps CT to r (= R/G)
@@ -82,29 +83,27 @@ public:
char const *Name() const override;
void Initialise() override;
void Read(boost::property_tree::ptree const &params) override;
+ unsigned int GetConvergenceFrames() const override;
void SetMode(std::string const &name) override;
void SetManualGains(double manual_r, double manual_b) override;
+ void SwitchMode(CameraMode const &camera_mode, Metadata *metadata) override;
void Prepare(Metadata *image_metadata) override;
void Process(StatisticsPtr &stats, Metadata *image_metadata) override;
struct RGB {
- RGB(double _R = INVALID, double _G = INVALID,
- double _B = INVALID)
+ RGB(double _R = 0, double _G = 0, double _B = 0)
: R(_R), G(_G), B(_B)
{
}
double R, G, B;
- static const double INVALID;
- bool Valid() const { return G != INVALID; }
- bool Invalid() const { return G == INVALID; }
RGB &operator+=(RGB const &other)
{
R += other.R, G += other.G, B += other.B;
return *this;
}
- RGB Square() const { return RGB(R * R, G * G, B * B); }
};
private:
+ bool isAutoEnabled() const;
// configuration is read-only, and available to both threads
AwbConfig config_;
std::thread async_thread_;
@@ -127,15 +126,12 @@ private:
// counts up to frame_period before restarting the async thread
int frame_phase_;
int frame_count_; // counts up to startup_frames
- int frame_count2_; // counts up to startup_frames for Process method
AwbStatus sync_results_;
AwbStatus prev_sync_results_;
std::string mode_name_;
- std::mutex settings_mutex_;
// The following are for the asynchronous thread to use, though the main
// thread can set/reset them if the async thread is known to be idle:
- void restartAsync(StatisticsPtr &stats, std::string const &mode_name,
- double lux);
+ void restartAsync(StatisticsPtr &stats, double lux);
// copy out the results from the async thread so that it can be restarted
void fetchAsyncResults();
StatisticsPtr statistics_;
@@ -156,6 +152,7 @@ private:
double manual_r_;
// manual b setting
double manual_b_;
+ bool first_switch_mode_; // is this the first call to SwitchMode?
};
static inline Awb::RGB operator+(Awb::RGB const &a, Awb::RGB const &b)
diff --git a/src/ipa/raspberrypi/controller/rpi/black_level.cpp b/src/ipa/raspberrypi/controller/rpi/black_level.cpp
index 0629b77c..6b3497f1 100644
--- a/src/ipa/raspberrypi/controller/rpi/black_level.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/black_level.cpp
@@ -8,12 +8,16 @@
#include <math.h>
#include <stdint.h>
+#include <libcamera/base/log.h>
+
#include "../black_level_status.h"
-#include "../logging.hpp"
#include "black_level.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiBlackLevel)
#define NAME "rpi.black_level"
@@ -29,12 +33,15 @@ char const *BlackLevel::Name() const
void BlackLevel::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG(Name());
uint16_t black_level = params.get<uint16_t>(
"black_level", 4096); // 64 in 10 bits scaled to 16 bits
black_level_r_ = params.get<uint16_t>("black_level_r", black_level);
black_level_g_ = params.get<uint16_t>("black_level_g", black_level);
black_level_b_ = params.get<uint16_t>("black_level_b", black_level);
+ LOG(RPiBlackLevel, Debug)
+ << " Read black levels red " << black_level_r_
+ << " green " << black_level_g_
+ << " blue " << black_level_b_;
}
void BlackLevel::Prepare(Metadata *image_metadata)
diff --git a/src/ipa/raspberrypi/controller/rpi/ccm.cpp b/src/ipa/raspberrypi/controller/rpi/ccm.cpp
index a8a2caff..821a4c7c 100644
--- a/src/ipa/raspberrypi/controller/rpi/ccm.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/ccm.cpp
@@ -5,15 +5,19 @@
* ccm.cpp - CCM (colour correction matrix) control algorithm
*/
+#include <libcamera/base/log.h>
+
#include "../awb_status.h"
#include "../ccm_status.h"
-#include "../logging.hpp"
#include "../lux_status.h"
#include "../metadata.hpp"
#include "ccm.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiCcm)
// This algorithm selects a CCM (Colour Correction Matrix) according to the
// colour temperature estimated by AWB (interpolating between known matricies as
@@ -129,9 +133,9 @@ void Ccm::Prepare(Metadata *image_metadata)
lux_ok = get_locked(image_metadata, "lux.status", lux);
}
if (!awb_ok)
- RPI_WARN("Ccm: no colour temperature found");
+ LOG(RPiCcm, Warning) << "no colour temperature found";
if (!lux_ok)
- RPI_WARN("Ccm: no lux value found");
+ LOG(RPiCcm, Warning) << "no lux value found";
Matrix ccm = calculate_ccm(config_.ccms, awb.temperature_K);
double saturation = saturation_;
struct CcmStatus ccm_status;
@@ -144,13 +148,15 @@ void Ccm::Prepare(Metadata *image_metadata)
for (int i = 0; i < 3; i++)
ccm_status.matrix[j * 3 + i] =
std::max(-8.0, std::min(7.9999, ccm.m[j][i]));
- RPI_LOG("CCM: colour temperature " << awb.temperature_K << "K");
- RPI_LOG("CCM: " << ccm_status.matrix[0] << " " << ccm_status.matrix[1]
- << " " << ccm_status.matrix[2] << " "
- << ccm_status.matrix[3] << " " << ccm_status.matrix[4]
- << " " << ccm_status.matrix[5] << " "
- << ccm_status.matrix[6] << " " << ccm_status.matrix[7]
- << " " << ccm_status.matrix[8]);
+ LOG(RPiCcm, Debug)
+ << "colour temperature " << awb.temperature_K << "K";
+ LOG(RPiCcm, Debug)
+ << "CCM: " << ccm_status.matrix[0] << " " << ccm_status.matrix[1]
+ << " " << ccm_status.matrix[2] << " "
+ << ccm_status.matrix[3] << " " << ccm_status.matrix[4]
+ << " " << ccm_status.matrix[5] << " "
+ << ccm_status.matrix[6] << " " << ccm_status.matrix[7]
+ << " " << ccm_status.matrix[8];
image_metadata->Set("ccm.status", ccm_status);
}
diff --git a/src/ipa/raspberrypi/controller/rpi/ccm.hpp b/src/ipa/raspberrypi/controller/rpi/ccm.hpp
index fcf077e7..330ed51f 100644
--- a/src/ipa/raspberrypi/controller/rpi/ccm.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/ccm.hpp
@@ -7,7 +7,6 @@
#pragma once
#include <vector>
-#include <atomic>
#include "../ccm_algorithm.hpp"
#include "../pwl.hpp"
@@ -70,7 +69,7 @@ public:
private:
CcmConfig config_;
- std::atomic<double> saturation_;
+ double saturation_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/rpi/contrast.cpp b/src/ipa/raspberrypi/controller/rpi/contrast.cpp
index 103153db..ae55aad5 100644
--- a/src/ipa/raspberrypi/controller/rpi/contrast.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/contrast.cpp
@@ -6,12 +6,17 @@
*/
#include <stdint.h>
+#include <libcamera/base/log.h>
+
#include "../contrast_status.h"
#include "../histogram.hpp"
#include "contrast.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiContrast)
// This is a very simple control algorithm which simply retrieves the results of
// AGC and AWB via their "status" metadata, and applies digital gain to the
@@ -97,11 +102,13 @@ Pwl compute_stretch_curve(Histogram const &histogram,
double hist_lo = histogram.Quantile(config.lo_histogram) *
(65536 / NUM_HISTOGRAM_BINS);
double level_lo = config.lo_level * 65536;
- RPI_LOG("Move histogram point " << hist_lo << " to " << level_lo);
+ LOG(RPiContrast, Debug)
+ << "Move histogram point " << hist_lo << " to " << level_lo;
hist_lo = std::max(
level_lo,
std::min(65535.0, std::min(hist_lo, level_lo + config.lo_max)));
- RPI_LOG("Final values " << hist_lo << " -> " << level_lo);
+ LOG(RPiContrast, Debug)
+ << "Final values " << hist_lo << " -> " << level_lo;
enhance.Append(hist_lo, level_lo);
// Keep the mid-point (median) in the same place, though, to limit the
// apparent amount of global brightness shift.
@@ -113,11 +120,13 @@ Pwl compute_stretch_curve(Histogram const &histogram,
double hist_hi = histogram.Quantile(config.hi_histogram) *
(65536 / NUM_HISTOGRAM_BINS);
double level_hi = config.hi_level * 65536;
- RPI_LOG("Move histogram point " << hist_hi << " to " << level_hi);
+ LOG(RPiContrast, Debug)
+ << "Move histogram point " << hist_hi << " to " << level_hi;
hist_hi = std::min(
level_hi,
std::max(0.0, std::max(hist_hi, level_hi - config.hi_max)));
- RPI_LOG("Final values " << hist_hi << " -> " << level_hi);
+ LOG(RPiContrast, Debug)
+ << "Final values " << hist_hi << " -> " << level_hi;
enhance.Append(hist_hi, level_hi);
enhance.Append(65535, 65535);
return enhance;
@@ -127,7 +136,8 @@ Pwl apply_manual_contrast(Pwl const &gamma_curve, double brightness,
double contrast)
{
Pwl new_gamma_curve;
- RPI_LOG("Manual brightness " << brightness << " contrast " << contrast);
+ LOG(RPiContrast, Debug)
+ << "Manual brightness " << brightness << " contrast " << contrast;
gamma_curve.Map([&](double x, double y) {
new_gamma_curve.Append(
x, std::max(0.0, std::min(65535.0,
@@ -140,7 +150,6 @@ Pwl apply_manual_contrast(Pwl const &gamma_curve, double brightness,
void Contrast::Process(StatisticsPtr &stats,
[[maybe_unused]] Metadata *image_metadata)
{
- double brightness = brightness_, contrast = contrast_;
Histogram histogram(stats->hist[0].g_hist, NUM_HISTOGRAM_BINS);
// We look at the histogram and adjust the gamma curve in the following
// ways: 1. Adjust the gamma curve so as to pull the start of the
@@ -155,13 +164,13 @@ void Contrast::Process(StatisticsPtr &stats,
}
// 2. Finally apply any manually selected brightness/contrast
// adjustment.
- if (brightness != 0 || contrast != 1.0)
- gamma_curve = apply_manual_contrast(gamma_curve, brightness,
- contrast);
+ if (brightness_ != 0 || contrast_ != 1.0)
+ gamma_curve = apply_manual_contrast(gamma_curve, brightness_,
+ contrast_);
// And fill in the status for output. Use more points towards the bottom
// of the curve.
ContrastStatus status;
- fill_in_status(status, brightness, contrast, gamma_curve);
+ fill_in_status(status, brightness_, contrast_, gamma_curve);
{
std::unique_lock<std::mutex> lock(mutex_);
status_ = status;
diff --git a/src/ipa/raspberrypi/controller/rpi/contrast.hpp b/src/ipa/raspberrypi/controller/rpi/contrast.hpp
index 6836f181..85624539 100644
--- a/src/ipa/raspberrypi/controller/rpi/contrast.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/contrast.hpp
@@ -6,7 +6,6 @@
*/
#pragma once
-#include <atomic>
#include <mutex>
#include "../contrast_algorithm.hpp"
@@ -42,8 +41,8 @@ public:
private:
ContrastConfig config_;
- std::atomic<double> brightness_;
- std::atomic<double> contrast_;
+ double brightness_;
+ double contrast_;
ContrastStatus status_;
std::mutex mutex_;
};
diff --git a/src/ipa/raspberrypi/controller/rpi/dpc.cpp b/src/ipa/raspberrypi/controller/rpi/dpc.cpp
index 348e1609..110f5056 100644
--- a/src/ipa/raspberrypi/controller/rpi/dpc.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/dpc.cpp
@@ -5,10 +5,14 @@
* dpc.cpp - DPC (defective pixel correction) control algorithm
*/
-#include "../logging.hpp"
+#include <libcamera/base/log.h>
+
#include "dpc.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiDpc)
// We use the lux status so that we can apply stronger settings in darkness (if
// necessary).
@@ -37,7 +41,7 @@ void Dpc::Prepare(Metadata *image_metadata)
DpcStatus dpc_status = {};
// Should we vary this with lux level or analogue gain? TBD.
dpc_status.strength = config_.strength;
- RPI_LOG("Dpc: strength " << dpc_status.strength);
+ LOG(RPiDpc, Debug) << "strength " << dpc_status.strength;
image_metadata->Set("dpc.status", dpc_status);
}
diff --git a/src/ipa/raspberrypi/controller/rpi/focus.cpp b/src/ipa/raspberrypi/controller/rpi/focus.cpp
index bab4406f..a87ec802 100644
--- a/src/ipa/raspberrypi/controller/rpi/focus.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/focus.cpp
@@ -6,7 +6,7 @@
*/
#include <stdint.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
#include "../focus_status.h"
#include "focus.hpp"
diff --git a/src/ipa/raspberrypi/controller/rpi/geq.cpp b/src/ipa/raspberrypi/controller/rpi/geq.cpp
index b6c98414..4530cb75 100644
--- a/src/ipa/raspberrypi/controller/rpi/geq.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/geq.cpp
@@ -5,14 +5,18 @@
* geq.cpp - GEQ (green equalisation) control algorithm
*/
+#include <libcamera/base/log.h>
+
#include "../device_status.h"
-#include "../logging.hpp"
#include "../lux_status.h"
#include "../pwl.hpp"
#include "geq.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiGeq)
// We use the lux status so that we can apply stronger settings in darkness (if
// necessary).
@@ -44,11 +48,12 @@ void Geq::Prepare(Metadata *image_metadata)
LuxStatus lux_status = {};
lux_status.lux = 400;
if (image_metadata->Get("lux.status", lux_status))
- RPI_WARN("Geq: no lux data found");
- DeviceStatus device_status = {};
+ LOG(RPiGeq, Warning) << "no lux data found";
+ DeviceStatus device_status;
device_status.analogue_gain = 1.0; // in case not found
if (image_metadata->Get("device.status", device_status))
- RPI_WARN("Geq: no device metadata - use analogue gain of 1x");
+ LOG(RPiGeq, Warning)
+ << "no device metadata - use analogue gain of 1x";
GeqStatus geq_status = {};
double strength =
config_.strength.Empty()
@@ -60,10 +65,11 @@ void Geq::Prepare(Metadata *image_metadata)
double slope = config_.slope * strength;
geq_status.offset = std::min(65535.0, std::max(0.0, offset));
geq_status.slope = std::min(.99999, std::max(0.0, slope));
- RPI_LOG("Geq: offset " << geq_status.offset << " slope "
- << geq_status.slope << " (analogue gain "
- << device_status.analogue_gain << " lux "
- << lux_status.lux << ")");
+ LOG(RPiGeq, Debug)
+ << "offset " << geq_status.offset << " slope "
+ << geq_status.slope << " (analogue gain "
+ << device_status.analogue_gain << " lux "
+ << lux_status.lux << ")";
image_metadata->Set("geq.status", geq_status);
}
diff --git a/src/ipa/raspberrypi/controller/rpi/lux.cpp b/src/ipa/raspberrypi/controller/rpi/lux.cpp
index 5acd49a0..6367b17d 100644
--- a/src/ipa/raspberrypi/controller/rpi/lux.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/lux.cpp
@@ -8,12 +8,17 @@
#include "linux/bcm2835-isp.h"
+#include <libcamera/base/log.h>
+
#include "../device_status.h"
-#include "../logging.hpp"
#include "lux.hpp"
using namespace RPiController;
+using namespace libcamera;
+using namespace std::literals::chrono_literals;
+
+LOG_DEFINE_CATEGORY(RPiLux)
#define NAME "rpi.lux"
@@ -33,9 +38,8 @@ char const *Lux::Name() const
void Lux::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG(Name());
reference_shutter_speed_ =
- params.get<double>("reference_shutter_speed");
+ params.get<double>("reference_shutter_speed") * 1.0us;
reference_gain_ = params.get<double>("reference_gain");
reference_aperture_ = params.get<double>("reference_aperture", 1.0);
reference_Y_ = params.get<double>("reference_Y");
@@ -43,6 +47,11 @@ void Lux::Read(boost::property_tree::ptree const &params)
current_aperture_ = reference_aperture_;
}
+void Lux::SetCurrentAperture(double aperture)
+{
+ current_aperture_ = aperture;
+}
+
void Lux::Prepare(Metadata *image_metadata)
{
std::unique_lock<std::mutex> lock(mutex_);
@@ -51,16 +60,9 @@ void Lux::Prepare(Metadata *image_metadata)
void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
{
- // set some initial values to shut the compiler up
- DeviceStatus device_status =
- { .shutter_speed = 1.0,
- .analogue_gain = 1.0,
- .lens_position = 0.0,
- .aperture = 0.0,
- .flash_intensity = 0.0 };
+ DeviceStatus device_status;
if (image_metadata->Get("device.status", device_status) == 0) {
double current_gain = device_status.analogue_gain;
- double current_shutter_speed = device_status.shutter_speed;
double current_aperture = device_status.aperture;
if (current_aperture == 0)
current_aperture = current_aperture_;
@@ -75,7 +77,7 @@ void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
double current_Y = sum / (double)num + .5;
double gain_ratio = reference_gain_ / current_gain;
double shutter_speed_ratio =
- reference_shutter_speed_ / current_shutter_speed;
+ reference_shutter_speed_ / device_status.shutter_speed;
double aperture_ratio = reference_aperture_ / current_aperture;
double Y_ratio = current_Y * (65536 / num_bins) / reference_Y_;
double estimated_lux = shutter_speed_ratio * gain_ratio *
@@ -84,7 +86,7 @@ void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
LuxStatus status;
status.lux = estimated_lux;
status.aperture = current_aperture;
- RPI_LOG(Name() << ": estimated lux " << estimated_lux);
+ LOG(RPiLux, Debug) << ": estimated lux " << estimated_lux;
{
std::unique_lock<std::mutex> lock(mutex_);
status_ = status;
@@ -93,7 +95,7 @@ void Lux::Process(StatisticsPtr &stats, Metadata *image_metadata)
// algorithms get the latest value.
image_metadata->Set("lux.status", status);
} else
- RPI_WARN(Name() << ": no device metadata");
+ LOG(RPiLux, Warning) << ": no device metadata";
}
// Register algorithm with the system.
diff --git a/src/ipa/raspberrypi/controller/rpi/lux.hpp b/src/ipa/raspberrypi/controller/rpi/lux.hpp
index 7b6c7258..3ebd35d1 100644
--- a/src/ipa/raspberrypi/controller/rpi/lux.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/lux.hpp
@@ -6,9 +6,10 @@
*/
#pragma once
-#include <atomic>
#include <mutex>
+#include <libcamera/base/utils.h>
+
#include "../lux_status.h"
#include "../algorithm.hpp"
@@ -29,12 +30,12 @@ public:
private:
// These values define the conditions of the reference image, against
// which we compare the new image.
- double reference_shutter_speed_; // in micro-seconds
+ libcamera::utils::Duration reference_shutter_speed_;
double reference_gain_;
double reference_aperture_; // units of 1/f
double reference_Y_; // out of 65536
double reference_lux_;
- std::atomic<double> current_aperture_;
+ double current_aperture_;
LuxStatus status_;
std::mutex mutex_;
};
diff --git a/src/ipa/raspberrypi/controller/rpi/noise.cpp b/src/ipa/raspberrypi/controller/rpi/noise.cpp
index 9e9eaf1b..63cad639 100644
--- a/src/ipa/raspberrypi/controller/rpi/noise.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/noise.cpp
@@ -7,13 +7,17 @@
#include <math.h>
+#include <libcamera/base/log.h>
+
#include "../device_status.h"
-#include "../logging.hpp"
#include "../noise_status.h"
#include "noise.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiNoise)
#define NAME "rpi.noise"
@@ -37,7 +41,6 @@ void Noise::SwitchMode(CameraMode const &camera_mode,
void Noise::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG(Name());
reference_constant_ = params.get<double>("reference_constant");
reference_slope_ = params.get<double>("reference_slope");
}
@@ -58,10 +61,11 @@ void Noise::Prepare(Metadata *image_metadata)
status.noise_constant = reference_constant_ * factor;
status.noise_slope = reference_slope_ * factor;
image_metadata->Set("noise.status", status);
- RPI_LOG(Name() << ": constant " << status.noise_constant
- << " slope " << status.noise_slope);
+ LOG(RPiNoise, Debug)
+ << "constant " << status.noise_constant
+ << " slope " << status.noise_slope;
} else
- RPI_WARN(Name() << " no metadata");
+ LOG(RPiNoise, Warning) << " no metadata";
}
// Register algorithm with the system.
diff --git a/src/ipa/raspberrypi/controller/rpi/noise.hpp b/src/ipa/raspberrypi/controller/rpi/noise.hpp
index 6f6e0be9..1c9de5c8 100644
--- a/src/ipa/raspberrypi/controller/rpi/noise.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/noise.hpp
@@ -26,7 +26,7 @@ private:
// the noise profile for analogue gain of 1.0
double reference_constant_;
double reference_slope_;
- std::atomic<double> mode_factor_;
+ double mode_factor_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/rpi/sdn.cpp b/src/ipa/raspberrypi/controller/rpi/sdn.cpp
index aa82830b..93845509 100644
--- a/src/ipa/raspberrypi/controller/rpi/sdn.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/sdn.cpp
@@ -1,16 +1,21 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Limited
*
* sdn.cpp - SDN (spatial denoise) control algorithm
*/
+#include <libcamera/base/log.h>
+
+#include "../denoise_status.h"
#include "../noise_status.h"
-#include "../sdn_status.h"
#include "sdn.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiSdn)
// Calculate settings for the spatial denoise block using the noise profile in
// the image metadata.
@@ -18,7 +23,7 @@ using namespace RPiController;
#define NAME "rpi.sdn"
Sdn::Sdn(Controller *controller)
- : Algorithm(controller)
+ : DenoiseAlgorithm(controller), mode_(DenoiseMode::ColourOff)
{
}
@@ -40,19 +45,26 @@ void Sdn::Prepare(Metadata *image_metadata)
struct NoiseStatus noise_status = {};
noise_status.noise_slope = 3.0; // in case no metadata
if (image_metadata->Get("noise.status", noise_status) != 0)
- RPI_WARN("Sdn: no noise profile found");
- RPI_LOG("Noise profile: constant " << noise_status.noise_constant
- << " slope "
- << noise_status.noise_slope);
- struct SdnStatus status;
+ LOG(RPiSdn, Warning) << "no noise profile found";
+ LOG(RPiSdn, Debug)
+ << "Noise profile: constant " << noise_status.noise_constant
+ << " slope " << noise_status.noise_slope;
+ struct DenoiseStatus status;
status.noise_constant = noise_status.noise_constant * deviation_;
status.noise_slope = noise_status.noise_slope * deviation_;
status.strength = strength_;
- image_metadata->Set("sdn.status", status);
- RPI_LOG("Sdn: programmed constant " << status.noise_constant
- << " slope " << status.noise_slope
- << " strength "
- << status.strength);
+ status.mode = static_cast<std::underlying_type_t<DenoiseMode>>(mode_);
+ image_metadata->Set("denoise.status", status);
+ LOG(RPiSdn, Debug)
+ << "programmed constant " << status.noise_constant
+ << " slope " << status.noise_slope
+ << " strength " << status.strength;
+}
+
+void Sdn::SetMode(DenoiseMode mode)
+{
+ // We only distinguish between off and all other modes.
+ mode_ = mode;
}
// Register algorithm with the system.
diff --git a/src/ipa/raspberrypi/controller/rpi/sdn.hpp b/src/ipa/raspberrypi/controller/rpi/sdn.hpp
index 486c000d..2371ce04 100644
--- a/src/ipa/raspberrypi/controller/rpi/sdn.hpp
+++ b/src/ipa/raspberrypi/controller/rpi/sdn.hpp
@@ -7,12 +7,13 @@
#pragma once
#include "../algorithm.hpp"
+#include "../denoise_algorithm.hpp"
namespace RPiController {
// Algorithm to calculate correct spatial denoise (SDN) settings.
-class Sdn : public Algorithm
+class Sdn : public DenoiseAlgorithm
{
public:
Sdn(Controller *controller = NULL);
@@ -20,10 +21,12 @@ public:
void Read(boost::property_tree::ptree const &params) override;
void Initialise() override;
void Prepare(Metadata *image_metadata) override;
+ void SetMode(DenoiseMode mode) override;
private:
double deviation_;
double strength_;
+ DenoiseMode mode_;
};
} // namespace RPiController
diff --git a/src/ipa/raspberrypi/controller/rpi/sharpen.cpp b/src/ipa/raspberrypi/controller/rpi/sharpen.cpp
index c953a7d9..b0c2e00a 100644
--- a/src/ipa/raspberrypi/controller/rpi/sharpen.cpp
+++ b/src/ipa/raspberrypi/controller/rpi/sharpen.cpp
@@ -7,12 +7,16 @@
#include <math.h>
-#include "../logging.hpp"
+#include <libcamera/base/log.h>
+
#include "../sharpen_status.h"
#include "sharpen.hpp"
using namespace RPiController;
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(RPiSharpen)
#define NAME "rpi.sharpen"
@@ -35,10 +39,13 @@ void Sharpen::SwitchMode(CameraMode const &camera_mode,
void Sharpen::Read(boost::property_tree::ptree const &params)
{
- RPI_LOG(Name());
threshold_ = params.get<double>("threshold", 1.0);
strength_ = params.get<double>("strength", 1.0);
limit_ = params.get<double>("limit", 1.0);
+ LOG(RPiSharpen, Debug)
+ << "Read threshold " << threshold_
+ << " strength " << strength_
+ << " limit " << limit_;
}
void Sharpen::SetStrength(double strength)
diff --git a/src/ipa/raspberrypi/controller/sdn_status.h b/src/ipa/raspberrypi/controller/sdn_status.h
deleted file mode 100644
index 871e0b62..00000000
--- a/src/ipa/raspberrypi/controller/sdn_status.h
+++ /dev/null
@@ -1,23 +0,0 @@
-/* SPDX-License-Identifier: BSD-2-Clause */
-/*
- * Copyright (C) 2019, Raspberry Pi (Trading) Limited
- *
- * sdn_status.h - SDN (spatial denoise) control algorithm status
- */
-#pragma once
-
-// This stores the parameters required for Spatial Denoise (SDN).
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-struct SdnStatus {
- double noise_constant;
- double noise_slope;
- double strength;
-};
-
-#ifdef __cplusplus
-}
-#endif