summaryrefslogtreecommitdiff
path: root/src/ipa/rkisp1/algorithms/agc.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/ipa/rkisp1/algorithms/agc.cpp')
-rw-r--r--src/ipa/rkisp1/algorithms/agc.cpp278
1 files changed, 74 insertions, 204 deletions
diff --git a/src/ipa/rkisp1/algorithms/agc.cpp b/src/ipa/rkisp1/algorithms/agc.cpp
index 47a6f7b2..50e0690f 100644
--- a/src/ipa/rkisp1/algorithms/agc.cpp
+++ b/src/ipa/rkisp1/algorithms/agc.cpp
@@ -2,7 +2,7 @@
/*
* Copyright (C) 2021-2022, Ideas On Board
*
- * agc.cpp - AGC/AEC mean-based control algorithm
+ * AGC/AEC mean-based control algorithm
*/
#include "agc.h"
@@ -36,32 +36,32 @@ namespace ipa::rkisp1::algorithms {
LOG_DEFINE_CATEGORY(RkISP1Agc)
-/* Minimum limit for analogue gain value */
-static constexpr double kMinAnalogueGain = 1.0;
-
-/* \todo Honour the FrameDurationLimits control instead of hardcoding a limit */
-static constexpr utils::Duration kMaxShutterSpeed = 60ms;
-
-/* Number of frames to wait before calculating stats on minimum exposure */
-static constexpr uint32_t kNumStartupFrames = 10;
-
-/* Target value to reach for the top 2% of the histogram */
-static constexpr double kEvGainTarget = 0.5;
+Agc::Agc()
+{
+ supportsRaw_ = true;
+}
-/*
- * Relative luminance target.
+/**
+ * \brief Initialise the AGC algorithm from tuning files
+ * \param[in] context The shared IPA context
+ * \param[in] tuningData The YamlObject containing Agc tuning data
*
- * It's a number that's chosen so that, when the camera points at a grey
- * target, the resulting image brightness is considered right.
+ * This function calls the base class' tuningData parsers to discover which
+ * control values are supported.
*
- * \todo Why is the value different between IPU3 and RkISP1 ?
+ * \return 0 on success or errors from the base class
*/
-static constexpr double kRelativeLuminanceTarget = 0.4;
-
-Agc::Agc()
- : frameCount_(0), filteredExposure_(0s)
+int Agc::init(IPAContext &context, const YamlObject &tuningData)
{
- supportsRaw_ = true;
+ int ret;
+
+ ret = parseTuningData(tuningData);
+ if (ret)
+ return ret;
+
+ context.ctrlMap.merge(controls());
+
+ return 0;
}
/**
@@ -81,6 +81,9 @@ int Agc::configure(IPAContext &context, const IPACameraSensorInfo &configInfo)
context.activeState.agc.manual.exposure = context.activeState.agc.automatic.exposure;
context.activeState.agc.autoEnabled = !context.configuration.raw;
+ context.activeState.agc.constraintMode = constraintModes().begin()->first;
+ context.activeState.agc.exposureMode = exposureModeHelpers().begin()->first;
+
/*
* Define the measurement window for AGC as a centered rectangle
* covering 3/4 of the image width and height.
@@ -90,11 +93,14 @@ int Agc::configure(IPAContext &context, const IPACameraSensorInfo &configInfo)
context.configuration.agc.measureWindow.h_size = 3 * configInfo.outputSize.width / 4;
context.configuration.agc.measureWindow.v_size = 3 * configInfo.outputSize.height / 4;
- /*
- * \todo Use the upcoming per-frame context API that will provide a
- * frame index
- */
- frameCount_ = 0;
+ /* \todo Run this again when FrameDurationLimits is passed in */
+ setLimits(context.configuration.sensor.minShutterSpeed,
+ context.configuration.sensor.maxShutterSpeed,
+ context.configuration.sensor.minAnalogueGain,
+ context.configuration.sensor.maxAnalogueGain);
+
+ resetFrameCount();
+
return 0;
}
@@ -188,127 +194,24 @@ void Agc::prepare(IPAContext &context, const uint32_t frame,
params->module_en_update |= RKISP1_CIF_ISP_MODULE_HST;
}
-/**
- * \brief Apply a filter on the exposure value to limit the speed of changes
- * \param[in] exposureValue The target exposure from the AGC algorithm
- *
- * The speed of the filter is adaptive, and will produce the target quicker
- * during startup, or when the target exposure is within 20% of the most recent
- * filter output.
- *
- * \return The filtered exposure
- */
-utils::Duration Agc::filterExposure(utils::Duration exposureValue)
-{
- double speed = 0.2;
-
- /* Adapt instantly if we are in startup phase. */
- if (frameCount_ < kNumStartupFrames)
- speed = 1.0;
-
- /*
- * If we are close to the desired result, go faster to avoid making
- * multiple micro-adjustments.
- * \todo Make this customisable?
- */
- if (filteredExposure_ < 1.2 * exposureValue &&
- filteredExposure_ > 0.8 * exposureValue)
- speed = sqrt(speed);
-
- filteredExposure_ = speed * exposureValue +
- filteredExposure_ * (1.0 - speed);
-
- LOG(RkISP1Agc, Debug) << "After filtering, exposure " << filteredExposure_;
-
- return filteredExposure_;
-}
-
-/**
- * \brief Estimate the new exposure and gain values
- * \param[inout] context The shared IPA Context
- * \param[in] frameContext The FrameContext for this frame
- * \param[in] yGain The gain calculated on the current brightness level
- * \param[in] iqMeanGain The gain calculated based on the relative luminance target
- */
-void Agc::computeExposure(IPAContext &context, IPAFrameContext &frameContext,
- double yGain, double iqMeanGain)
+void Agc::fillMetadata(IPAContext &context, IPAFrameContext &frameContext,
+ ControlList &metadata)
{
- IPASessionConfiguration &configuration = context.configuration;
- IPAActiveState &activeState = context.activeState;
-
- /* Get the effective exposure and gain applied on the sensor. */
- uint32_t exposure = frameContext.sensor.exposure;
- double analogueGain = frameContext.sensor.gain;
-
- /* Use the highest of the two gain estimates. */
- double evGain = std::max(yGain, iqMeanGain);
-
- utils::Duration minShutterSpeed = configuration.sensor.minShutterSpeed;
- utils::Duration maxShutterSpeed = std::min(configuration.sensor.maxShutterSpeed,
- kMaxShutterSpeed);
-
- double minAnalogueGain = std::max(configuration.sensor.minAnalogueGain,
- kMinAnalogueGain);
- double maxAnalogueGain = configuration.sensor.maxAnalogueGain;
-
- /* Consider within 1% of the target as correctly exposed. */
- if (utils::abs_diff(evGain, 1.0) < 0.01)
- return;
-
- /* extracted from Rpi::Agc::computeTargetExposure. */
-
- /* Calculate the shutter time in seconds. */
- utils::Duration currentShutter = exposure * configuration.sensor.lineDuration;
-
- /*
- * Update the exposure value for the next computation using the values
- * of exposure and gain really used by the sensor.
- */
- utils::Duration effectiveExposureValue = currentShutter * analogueGain;
-
- LOG(RkISP1Agc, Debug) << "Actual total exposure " << currentShutter * analogueGain
- << " Shutter speed " << currentShutter
- << " Gain " << analogueGain
- << " Needed ev gain " << evGain;
-
- /*
- * Calculate the current exposure value for the scene as the latest
- * exposure value applied multiplied by the new estimated gain.
- */
- utils::Duration exposureValue = effectiveExposureValue * evGain;
-
- /* Clamp the exposure value to the min and max authorized. */
- utils::Duration maxTotalExposure = maxShutterSpeed * maxAnalogueGain;
- exposureValue = std::min(exposureValue, maxTotalExposure);
- LOG(RkISP1Agc, Debug) << "Target total exposure " << exposureValue
- << ", maximum is " << maxTotalExposure;
-
- /*
- * Divide the exposure value as new exposure and gain values.
- * \todo estimate if we need to desaturate
- */
- exposureValue = filterExposure(exposureValue);
-
- /*
- * Push the shutter time up to the maximum first, and only then
- * increase the gain.
- */
- utils::Duration shutterTime = std::clamp<utils::Duration>(exposureValue / minAnalogueGain,
- minShutterSpeed, maxShutterSpeed);
- double stepGain = std::clamp(exposureValue / shutterTime,
- minAnalogueGain, maxAnalogueGain);
- LOG(RkISP1Agc, Debug) << "Divided up shutter and gain are "
- << shutterTime << " and "
- << stepGain;
+ utils::Duration exposureTime = context.configuration.sensor.lineDuration
+ * frameContext.sensor.exposure;
+ metadata.set(controls::AnalogueGain, frameContext.sensor.gain);
+ metadata.set(controls::ExposureTime, exposureTime.get<std::micro>());
- /* Update the estimated exposure and gain. */
- activeState.agc.automatic.exposure = shutterTime / configuration.sensor.lineDuration;
- activeState.agc.automatic.gain = stepGain;
+ /* \todo Use VBlank value calculated from each frame exposure. */
+ uint32_t vTotal = context.configuration.sensor.size.height
+ + context.configuration.sensor.defVBlank;
+ utils::Duration frameDuration = context.configuration.sensor.lineDuration
+ * vTotal;
+ metadata.set(controls::FrameDuration, frameDuration.get<std::micro>());
}
/**
* \brief Estimate the relative luminance of the frame with a given gain
- * \param[in] expMeans The mean luminance values, from the RkISP1 statistics
* \param[in] gain The gain to apply to the frame
*
* This function estimates the average relative luminance of the frame that
@@ -322,8 +225,6 @@ void Agc::computeExposure(IPAContext &context, IPAFrameContext &frameContext,
* YUV doesn't take into account the fact that the R, G and B components
* contribute differently to the relative luminance.
*
- * \todo Have a dedicated YUV algorithm ?
- *
* The values are normalized to the [0.0, 1.0] range, where 1.0 corresponds to a
* theoretical perfect reflector of 100% reference white.
*
@@ -332,45 +233,17 @@ void Agc::computeExposure(IPAContext &context, IPAFrameContext &frameContext,
*
* \return The relative luminance
*/
-double Agc::estimateLuminance(Span<const uint8_t> expMeans, double gain)
+double Agc::estimateLuminance(double gain) const
{
double ySum = 0.0;
/* Sum the averages, saturated to 255. */
- for (uint8_t expMean : expMeans)
+ for (uint8_t expMean : expMeans_)
ySum += std::min(expMean * gain, 255.0);
/* \todo Weight with the AWB gains */
- return ySum / expMeans.size() / 255;
-}
-
-/**
- * \brief Estimate the mean value of the top 2% of the histogram
- * \param[in] hist The histogram statistics computed by the RkISP1
- * \return The mean value of the top 2% of the histogram
- */
-double Agc::measureBrightness(Span<const uint32_t> hist) const
-{
- Histogram histogram{ hist };
- /* Estimate the quantile mean of the top 2% of the histogram. */
- return histogram.interQuantileMean(0.98, 1.0);
-}
-
-void Agc::fillMetadata(IPAContext &context, IPAFrameContext &frameContext,
- ControlList &metadata)
-{
- utils::Duration exposureTime = context.configuration.sensor.lineDuration
- * frameContext.sensor.exposure;
- metadata.set(controls::AnalogueGain, frameContext.sensor.gain);
- metadata.set(controls::ExposureTime, exposureTime.get<std::micro>());
-
- /* \todo Use VBlank value calculated from each frame exposure. */
- uint32_t vTotal = context.configuration.sensor.size.height
- + context.configuration.sensor.defVBlank;
- utils::Duration frameDuration = context.configuration.sensor.lineDuration
- * vTotal;
- metadata.set(controls::FrameDuration, frameDuration.get<std::micro>());
+ return ySum / expMeans_.size() / 255;
}
/**
@@ -404,41 +277,38 @@ void Agc::process(IPAContext &context, [[maybe_unused]] const uint32_t frame,
const rkisp1_cif_isp_stat *params = &stats->params;
ASSERT(stats->meas_type & RKISP1_CIF_ISP_STAT_AUTOEXP);
- Span<const uint8_t> ae{ params->ae.exp_mean, context.hw->numAeCells };
- Span<const uint32_t> hist{
- params->hist.hist_bins,
- context.hw->numHistogramBins
- };
-
- double iqMean = measureBrightness(hist);
- double iqMeanGain = kEvGainTarget * hist.size() / iqMean;
+ /* The lower 4 bits are fractional and meant to be discarded. */
+ Histogram hist({ params->hist.hist_bins, context.hw->numHistogramBins },
+ [](uint32_t x) { return x >> 4; });
+ expMeans_ = { params->ae.exp_mean, context.hw->numAeCells };
/*
- * Estimate the gain needed to achieve a relative luminance target. To
- * account for non-linearity caused by saturation, the value needs to be
- * estimated in an iterative process, as multiplying by a gain will not
- * increase the relative luminance by the same factor if some image
- * regions are saturated.
+ * The Agc algorithm needs to know the effective exposure value that was
+ * applied to the sensor when the statistics were collected.
*/
- double yGain = 1.0;
- double yTarget = kRelativeLuminanceTarget;
-
- for (unsigned int i = 0; i < 8; i++) {
- double yValue = estimateLuminance(ae, yGain);
- double extra_gain = std::min(10.0, yTarget / (yValue + .001));
-
- yGain *= extra_gain;
- LOG(RkISP1Agc, Debug) << "Y value: " << yValue
- << ", Y target: " << yTarget
- << ", gives gain " << yGain;
- if (extra_gain < 1.01)
- break;
- }
+ utils::Duration exposureTime = context.configuration.sensor.lineDuration
+ * frameContext.sensor.exposure;
+ double analogueGain = frameContext.sensor.gain;
+ utils::Duration effectiveExposureValue = exposureTime * analogueGain;
+
+ utils::Duration shutterTime;
+ double aGain, dGain;
+ std::tie(shutterTime, aGain, dGain) =
+ calculateNewEv(context.activeState.agc.constraintMode,
+ context.activeState.agc.exposureMode,
+ hist, effectiveExposureValue);
- computeExposure(context, frameContext, yGain, iqMeanGain);
- frameCount_++;
+ LOG(RkISP1Agc, Debug)
+ << "Divided up shutter, analogue gain and digital gain are "
+ << shutterTime << ", " << aGain << " and " << dGain;
+
+ IPAActiveState &activeState = context.activeState;
+ /* Update the estimated exposure and gain. */
+ activeState.agc.automatic.exposure = shutterTime / context.configuration.sensor.lineDuration;
+ activeState.agc.automatic.gain = aGain;
fillMetadata(context, frameContext, metadata);
+ expMeans_ = {};
}
REGISTER_IPA_ALGORITHM(Agc, "Agc")