summaryrefslogtreecommitdiff
path: root/src/ipa/ipu3
diff options
context:
space:
mode:
Diffstat (limited to 'src/ipa/ipu3')
-rw-r--r--src/ipa/ipu3/algorithms/af.cpp3
-rw-r--r--src/ipa/ipu3/algorithms/agc.cpp44
-rw-r--r--src/ipa/ipu3/algorithms/agc.h4
-rw-r--r--src/ipa/ipu3/algorithms/awb.cpp69
-rw-r--r--src/ipa/ipu3/algorithms/awb.h19
-rw-r--r--src/ipa/ipu3/algorithms/blc.cpp6
-rw-r--r--src/ipa/ipu3/ipa_context.cpp12
-rw-r--r--src/ipa/ipu3/ipa_context.h9
-rw-r--r--src/ipa/ipu3/ipu3-ipa-design-guide.rst14
-rw-r--r--src/ipa/ipu3/ipu3.cpp56
10 files changed, 99 insertions, 137 deletions
diff --git a/src/ipa/ipu3/algorithms/af.cpp b/src/ipa/ipu3/algorithms/af.cpp
index 29eb7355..cf68fb59 100644
--- a/src/ipa/ipu3/algorithms/af.cpp
+++ b/src/ipa/ipu3/algorithms/af.cpp
@@ -11,7 +11,6 @@
#include <chrono>
#include <cmath>
#include <fcntl.h>
-#include <numeric>
#include <sys/ioctl.h>
#include <sys/stat.h>
#include <sys/types.h>
@@ -23,8 +22,6 @@
#include <libcamera/ipa/core_ipa_interface.h>
-#include "libipa/histogram.h"
-
/**
* \file af.h
*/
diff --git a/src/ipa/ipu3/algorithms/agc.cpp b/src/ipa/ipu3/algorithms/agc.cpp
index 0e0114f6..39d0aebb 100644
--- a/src/ipa/ipu3/algorithms/agc.cpp
+++ b/src/ipa/ipu3/algorithms/agc.cpp
@@ -9,14 +9,15 @@
#include <algorithm>
#include <chrono>
-#include <cmath>
#include <libcamera/base/log.h>
#include <libcamera/base/utils.h>
#include <libcamera/control_ids.h>
+
#include <libcamera/ipa/core_ipa_interface.h>
+#include "libipa/colours.h"
#include "libipa/histogram.h"
/**
@@ -33,7 +34,7 @@ namespace ipa::ipu3::algorithms {
* \class Agc
* \brief A mean-based auto-exposure algorithm
*
- * This algorithm calculates a shutter time and an analogue gain so that the
+ * This algorithm calculates an exposure time and an analogue gain so that the
* average value of the green channel of the brightest 2% of pixels approaches
* 0.5. The AWB gains are not used here, and all cells in the grid have the same
* weight, like an average-metering case. In this metering mode, the camera uses
@@ -51,13 +52,13 @@ LOG_DEFINE_CATEGORY(IPU3Agc)
static constexpr double kMinAnalogueGain = 1.0;
/* \todo Honour the FrameDurationLimits control instead of hardcoding a limit */
-static constexpr utils::Duration kMaxShutterSpeed = 60ms;
+static constexpr utils::Duration kMaxExposureTime = 60ms;
/* Histogram constants */
static constexpr uint32_t knumHistogramBins = 256;
Agc::Agc()
- : minShutterSpeed_(0s), maxShutterSpeed_(0s)
+ : minExposureTime_(0s), maxExposureTime_(0s)
{
}
@@ -100,9 +101,9 @@ int Agc::configure(IPAContext &context,
stride_ = configuration.grid.stride;
bdsGrid_ = configuration.grid.bdsGrid;
- minShutterSpeed_ = configuration.agc.minShutterSpeed;
- maxShutterSpeed_ = std::min(configuration.agc.maxShutterSpeed,
- kMaxShutterSpeed);
+ minExposureTime_ = configuration.agc.minExposureTime;
+ maxExposureTime_ = std::min(configuration.agc.maxExposureTime,
+ kMaxExposureTime);
minAnalogueGain_ = std::max(configuration.agc.minAnalogueGain, kMinAnalogueGain);
maxAnalogueGain_ = configuration.agc.maxAnalogueGain;
@@ -115,7 +116,7 @@ int Agc::configure(IPAContext &context,
context.activeState.agc.exposureMode = exposureModeHelpers().begin()->first;
/* \todo Run this again when FrameDurationLimits is passed in */
- setLimits(minShutterSpeed_, maxShutterSpeed_, minAnalogueGain_,
+ setLimits(minExposureTime_, maxExposureTime_, minAnalogueGain_,
maxAnalogueGain_);
resetFrameCount();
@@ -177,18 +178,16 @@ Histogram Agc::parseStatistics(const ipu3_uapi_stats_3a *stats,
*/
double Agc::estimateLuminance(double gain) const
{
- double redSum = 0, greenSum = 0, blueSum = 0;
+ RGB<double> sum{ 0.0 };
for (unsigned int i = 0; i < rgbTriples_.size(); i++) {
- redSum += std::min(std::get<0>(rgbTriples_[i]) * gain, 255.0);
- greenSum += std::min(std::get<1>(rgbTriples_[i]) * gain, 255.0);
- blueSum += std::min(std::get<2>(rgbTriples_[i]) * gain, 255.0);
+ sum.r() += std::min(std::get<0>(rgbTriples_[i]) * gain, 255.0);
+ sum.g() += std::min(std::get<1>(rgbTriples_[i]) * gain, 255.0);
+ sum.b() += std::min(std::get<2>(rgbTriples_[i]) * gain, 255.0);
}
- double ySum = redSum * rGain_ * 0.299
- + greenSum * gGain_ * 0.587
- + blueSum * bGain_ * 0.114;
-
+ RGB<double> gains{{ rGain_, gGain_, bGain_ }};
+ double ySum = rec601LuminanceFromRGB(sum * gains);
return ySum / (bdsGrid_.height * bdsGrid_.width) / 255;
}
@@ -222,20 +221,20 @@ void Agc::process(IPAContext &context, [[maybe_unused]] const uint32_t frame,
double analogueGain = frameContext.sensor.gain;
utils::Duration effectiveExposureValue = exposureTime * analogueGain;
- utils::Duration shutterTime;
+ utils::Duration newExposureTime;
double aGain, dGain;
- std::tie(shutterTime, aGain, dGain) =
+ std::tie(newExposureTime, aGain, dGain) =
calculateNewEv(context.activeState.agc.constraintMode,
context.activeState.agc.exposureMode, hist,
effectiveExposureValue);
LOG(IPU3Agc, Debug)
- << "Divided up shutter, analogue gain and digital gain are "
- << shutterTime << ", " << aGain << " and " << dGain;
+ << "Divided up exposure time, analogue gain and digital gain are "
+ << newExposureTime << ", " << aGain << " and " << dGain;
IPAActiveState &activeState = context.activeState;
- /* Update the estimated exposure and gain. */
- activeState.agc.exposure = shutterTime / context.configuration.sensor.lineDuration;
+ /* Update the estimated exposure time and gain. */
+ activeState.agc.exposure = newExposureTime / context.configuration.sensor.lineDuration;
activeState.agc.gain = aGain;
metadata.set(controls::AnalogueGain, frameContext.sensor.gain);
@@ -247,7 +246,6 @@ void Agc::process(IPAContext &context, [[maybe_unused]] const uint32_t frame,
utils::Duration frameDuration = context.configuration.sensor.lineDuration
* vTotal;
metadata.set(controls::FrameDuration, frameDuration.get<std::micro>());
-
}
REGISTER_IPA_ALGORITHM(Agc, "Agc")
diff --git a/src/ipa/ipu3/algorithms/agc.h b/src/ipa/ipu3/algorithms/agc.h
index 411f4da0..890c271b 100644
--- a/src/ipa/ipu3/algorithms/agc.h
+++ b/src/ipa/ipu3/algorithms/agc.h
@@ -42,8 +42,8 @@ private:
Histogram parseStatistics(const ipu3_uapi_stats_3a *stats,
const ipu3_uapi_grid_config &grid);
- utils::Duration minShutterSpeed_;
- utils::Duration maxShutterSpeed_;
+ utils::Duration minExposureTime_;
+ utils::Duration maxExposureTime_;
double minAnalogueGain_;
double maxAnalogueGain_;
diff --git a/src/ipa/ipu3/algorithms/awb.cpp b/src/ipa/ipu3/algorithms/awb.cpp
index 4d6e3994..55de05d9 100644
--- a/src/ipa/ipu3/algorithms/awb.cpp
+++ b/src/ipa/ipu3/algorithms/awb.cpp
@@ -13,6 +13,8 @@
#include <libcamera/control_ids.h>
+#include "libipa/colours.h"
+
/**
* \file awb.h
*/
@@ -301,51 +303,24 @@ void Awb::prepare(IPAContext &context,
params->use.acc_ccm = 1;
}
-/**
- * The function estimates the correlated color temperature using
- * from RGB color space input.
- * In physics and color science, the Planckian locus or black body locus is
- * the path or locus that the color of an incandescent black body would take
- * in a particular chromaticity space as the blackbody temperature changes.
- *
- * If a narrow range of color temperatures is considered (those encapsulating
- * daylight being the most practical case) one can approximate the Planckian
- * locus in order to calculate the CCT in terms of chromaticity coordinates.
- *
- * More detailed information can be found in:
- * https://en.wikipedia.org/wiki/Color_temperature#Approximation
- */
-uint32_t Awb::estimateCCT(double red, double green, double blue)
-{
- /* Convert the RGB values to CIE tristimulus values (XYZ) */
- double X = (-0.14282) * (red) + (1.54924) * (green) + (-0.95641) * (blue);
- double Y = (-0.32466) * (red) + (1.57837) * (green) + (-0.73191) * (blue);
- double Z = (-0.68202) * (red) + (0.77073) * (green) + (0.56332) * (blue);
-
- /* Calculate the normalized chromaticity values */
- double x = X / (X + Y + Z);
- double y = Y / (X + Y + Z);
-
- /* Calculate CCT */
- double n = (x - 0.3320) / (0.1858 - y);
- return 449 * n * n * n + 3525 * n * n + 6823.3 * n + 5520.33;
-}
-
/* Generate an RGB vector with the average values for each zone */
void Awb::generateZones()
{
zones_.clear();
for (unsigned int i = 0; i < kAwbStatsSizeX * kAwbStatsSizeY; i++) {
- RGB zone;
double counted = awbStats_[i].counted;
if (counted >= cellsPerZoneThreshold_) {
- zone.G = awbStats_[i].sum.green / counted;
- if (zone.G >= kMinGreenLevelInZone) {
- zone.R = awbStats_[i].sum.red / counted;
- zone.B = awbStats_[i].sum.blue / counted;
+ RGB<double> zone{{
+ static_cast<double>(awbStats_[i].sum.red),
+ static_cast<double>(awbStats_[i].sum.green),
+ static_cast<double>(awbStats_[i].sum.blue)
+ }};
+
+ zone /= counted;
+
+ if (zone.g() >= kMinGreenLevelInZone)
zones_.push_back(zone);
- }
}
}
}
@@ -412,32 +387,32 @@ void Awb::awbGreyWorld()
* consider some variations, such as normalising all the zones first, or
* doing an L2 average etc.
*/
- std::vector<RGB> &redDerivative(zones_);
- std::vector<RGB> blueDerivative(redDerivative);
+ std::vector<RGB<double>> &redDerivative(zones_);
+ std::vector<RGB<double>> blueDerivative(redDerivative);
std::sort(redDerivative.begin(), redDerivative.end(),
- [](RGB const &a, RGB const &b) {
- return a.G * b.R < b.G * a.R;
+ [](RGB<double> const &a, RGB<double> const &b) {
+ return a.g() * b.r() < b.g() * a.r();
});
std::sort(blueDerivative.begin(), blueDerivative.end(),
- [](RGB const &a, RGB const &b) {
- return a.G * b.B < b.G * a.B;
+ [](RGB<double> const &a, RGB<double> const &b) {
+ return a.g() * b.b() < b.g() * a.b();
});
/* Average the middle half of the values. */
int discard = redDerivative.size() / 4;
- RGB sumRed(0, 0, 0);
- RGB sumBlue(0, 0, 0);
+ RGB<double> sumRed{ 0.0 };
+ RGB<double> sumBlue{ 0.0 };
for (auto ri = redDerivative.begin() + discard,
bi = blueDerivative.begin() + discard;
ri != redDerivative.end() - discard; ri++, bi++)
sumRed += *ri, sumBlue += *bi;
- double redGain = sumRed.G / (sumRed.R + 1),
- blueGain = sumBlue.G / (sumBlue.B + 1);
+ double redGain = sumRed.g() / (sumRed.r() + 1),
+ blueGain = sumBlue.g() / (sumBlue.b() + 1);
/* Color temperature is not relevant in Grey world but still useful to estimate it :-) */
- asyncResults_.temperatureK = estimateCCT(sumRed.R, sumRed.G, sumBlue.B);
+ asyncResults_.temperatureK = estimateCCT({{ sumRed.r(), sumRed.g(), sumBlue.b() }});
/*
* Gain values are unsigned integer value ranging [0, 8) with 13 bit
diff --git a/src/ipa/ipu3/algorithms/awb.h b/src/ipa/ipu3/algorithms/awb.h
index c0202823..dbf69c90 100644
--- a/src/ipa/ipu3/algorithms/awb.h
+++ b/src/ipa/ipu3/algorithms/awb.h
@@ -13,6 +13,8 @@
#include <libcamera/geometry.h>
+#include "libcamera/internal/vector.h"
+
#include "algorithm.h"
namespace libcamera {
@@ -48,20 +50,6 @@ public:
ControlList &metadata) override;
private:
- /* \todo Make these structs available to all the ISPs ? */
- struct RGB {
- RGB(double _R = 0, double _G = 0, double _B = 0)
- : R(_R), G(_G), B(_B)
- {
- }
- double R, G, B;
- RGB &operator+=(RGB const &other)
- {
- R += other.R, G += other.G, B += other.B;
- return *this;
- }
- };
-
struct AwbStatus {
double temperatureK;
double redGain;
@@ -75,11 +63,10 @@ private:
void generateAwbStats(const ipu3_uapi_stats_3a *stats);
void clearAwbStats();
void awbGreyWorld();
- uint32_t estimateCCT(double red, double green, double blue);
static constexpr uint16_t threshold(float value);
static constexpr uint16_t gainValue(double gain);
- std::vector<RGB> zones_;
+ std::vector<RGB<double>> zones_;
Accumulator awbStats_[kAwbStatsSizeX * kAwbStatsSizeY];
AwbStatus asyncResults_;
diff --git a/src/ipa/ipu3/algorithms/blc.cpp b/src/ipa/ipu3/algorithms/blc.cpp
index 257f40e2..35748fb2 100644
--- a/src/ipa/ipu3/algorithms/blc.cpp
+++ b/src/ipa/ipu3/algorithms/blc.cpp
@@ -7,8 +7,6 @@
#include "blc.h"
-#include <string.h>
-
/**
* \file blc.h
* \brief IPU3 Black Level Correction control
@@ -57,8 +55,8 @@ void BlackLevelCorrection::prepare([[maybe_unused]] IPAContext &context,
* tuning processes. This is a first rough approximation.
*/
params->obgrid_param.gr = 64;
- params->obgrid_param.r = 64;
- params->obgrid_param.b = 64;
+ params->obgrid_param.r = 64;
+ params->obgrid_param.b = 64;
params->obgrid_param.gb = 64;
/* Enable the custom black level correction processing */
diff --git a/src/ipa/ipu3/ipa_context.cpp b/src/ipa/ipu3/ipa_context.cpp
index 917d0654..3b22f791 100644
--- a/src/ipa/ipu3/ipa_context.cpp
+++ b/src/ipa/ipu3/ipa_context.cpp
@@ -39,6 +39,10 @@ namespace libcamera::ipa::ipu3 {
* \struct IPAContext
* \brief Global IPA context data shared between all algorithms
*
+ * \fn IPAContext::IPAContext
+ * \brief Initialize the instance with the given number of frame contexts
+ * \param[in] frameContextSize Size of the frame context ring buffer
+ *
* \var IPAContext::configuration
* \brief The IPA session configuration, immutable during the session
*
@@ -92,11 +96,11 @@ namespace libcamera::ipa::ipu3 {
* \var IPASessionConfiguration::agc
* \brief AGC parameters configuration of the IPA
*
- * \var IPASessionConfiguration::agc.minShutterSpeed
- * \brief Minimum shutter speed supported with the configured sensor
+ * \var IPASessionConfiguration::agc.minExposureTime
+ * \brief Minimum exposure time supported with the configured sensor
*
- * \var IPASessionConfiguration::agc.maxShutterSpeed
- * \brief Maximum shutter speed supported with the configured sensor
+ * \var IPASessionConfiguration::agc.maxExposureTime
+ * \brief Maximum exposure time supported with the configured sensor
*
* \var IPASessionConfiguration::agc.minAnalogueGain
* \brief Minimum analogue gain supported with the configured sensor
diff --git a/src/ipa/ipu3/ipa_context.h b/src/ipa/ipu3/ipa_context.h
index c85d1e34..97fcf06c 100644
--- a/src/ipa/ipu3/ipa_context.h
+++ b/src/ipa/ipu3/ipa_context.h
@@ -33,8 +33,8 @@ struct IPASessionConfiguration {
} af;
struct {
- utils::Duration minShutterSpeed;
- utils::Duration maxShutterSpeed;
+ utils::Duration minExposureTime;
+ utils::Duration maxExposureTime;
double minAnalogueGain;
double maxAnalogueGain;
} agc;
@@ -84,6 +84,11 @@ struct IPAFrameContext : public FrameContext {
};
struct IPAContext {
+ IPAContext(unsigned int frameContextSize)
+ : frameContexts(frameContextSize)
+ {
+ }
+
IPASessionConfiguration configuration;
IPAActiveState activeState;
diff --git a/src/ipa/ipu3/ipu3-ipa-design-guide.rst b/src/ipa/ipu3/ipu3-ipa-design-guide.rst
index 72506397..85d735c6 100644
--- a/src/ipa/ipu3/ipu3-ipa-design-guide.rst
+++ b/src/ipa/ipu3/ipu3-ipa-design-guide.rst
@@ -27,8 +27,8 @@ from applications, and managing events from the pipeline handler.
└─┬───┬───┬──────┬────┬────┬────┬─┴────▼─┬──┘ 1: init()
│ │ │ │ ▲ │ ▲ │ ▲ │ ▲ │ 2: configure()
│1 │2 │3 │4│ │4│ │4│ │4│ │5 3: mapBuffers(), start()
- │ │ │ │ │ │ │ │ │ │ │ │ 4: (▼) queueRequest(), fillParamsBuffer(), processStatsBuffer()
- ▼ ▼ ▼ ▼ │ ▼ │ ▼ │ ▼ │ ▼ (▲) setSensorControls, paramsBufferReady, metadataReady Signals
+ │ │ │ │ │ │ │ │ │ │ │ │ 4: (▼) queueRequest(), computeParams(), processStats()
+ ▼ ▼ ▼ ▼ │ ▼ │ ▼ │ ▼ │ ▼ (▲) setSensorControls, paramsComputed, metadataReady Signals
┌──────────────────┴────┴────┴────┴─────────┐ 5: stop(), unmapBuffers()
│ IPU3 IPA │
│ ┌───────────────────────┐ │
@@ -104,8 +104,8 @@ to operate when running:
- configure()
- queueRequest()
-- fillParamsBuffer()
-- processStatsBuffer()
+- computeParams()
+- processStats()
The configuration phase allows the pipeline-handler to inform the IPA of
the current stream configurations, which is then passed into each
@@ -119,7 +119,7 @@ When configured, the IPA is notified by the pipeline handler of the
Camera ``start()`` event, after which incoming requests will be queued
for processing, requiring a parameter buffer (``ipu3_uapi_params``) to
be populated for the ImgU. This is given to the IPA through
-``fillParamsBuffer()``, and then passed directly to each algorithm
+``computeParams()``, and then passed directly to each algorithm
through the ``prepare()`` call allowing the ISP configuration to be
updated for the needs of each component that the algorithm is
responsible for.
@@ -129,7 +129,7 @@ structure that it modifies, and it should take care to ensure that any
structure set by a use flag is fully initialised to suitable values.
The parameter buffer is returned to the pipeline handler through the
-``paramsBufferReady`` signal, and from there queued to the ImgU along
+``paramsComputed`` signal, and from there queued to the ImgU along
with a raw frame captured with the CIO2.
Post-frame completion
@@ -138,7 +138,7 @@ Post-frame completion
When the capture of an image is completed, and successfully processed
through the ImgU, the generated statistics buffer
(``ipu3_uapi_stats_3a``) is given to the IPA through
-``processStatsBuffer()``. This provides the IPA with an opportunity to
+``processStats()``. This provides the IPA with an opportunity to
examine the results of the ISP and run the calculations required by each
algorithm on the new data. The algorithms may require context from the
operations of other algorithms, for example, the AWB might choose to use
diff --git a/src/ipa/ipu3/ipu3.cpp b/src/ipa/ipu3/ipu3.cpp
index cdcdf1fb..1cae08bf 100644
--- a/src/ipa/ipu3/ipu3.cpp
+++ b/src/ipa/ipu3/ipu3.cpp
@@ -23,24 +23,22 @@
#include <libcamera/base/utils.h>
#include <libcamera/control_ids.h>
+#include <libcamera/controls.h>
#include <libcamera/framebuffer.h>
+#include <libcamera/geometry.h>
+#include <libcamera/request.h>
+
#include <libcamera/ipa/ipa_interface.h>
#include <libcamera/ipa/ipa_module_info.h>
#include <libcamera/ipa/ipu3_ipa_interface.h>
-#include <libcamera/request.h>
#include "libcamera/internal/mapped_framebuffer.h"
#include "libcamera/internal/yaml_parser.h"
-#include "algorithms/af.h"
-#include "algorithms/agc.h"
-#include "algorithms/algorithm.h"
-#include "algorithms/awb.h"
-#include "algorithms/blc.h"
-#include "algorithms/tone_mapping.h"
#include "libipa/camera_sensor_helper.h"
#include "ipa_context.h"
+#include "module.h"
/* Minimum grid width, expressed as a number of cells */
static constexpr uint32_t kMinGridWidth = 16;
@@ -89,14 +87,14 @@ namespace ipa::ipu3 {
* parameter buffer, and adapting the settings of the sensor attached to the
* IPU3 CIO2 through sensor-specific V4L2 controls.
*
- * In fillParamsBuffer(), we populate the ImgU parameter buffer with
+ * In computeParams(), we populate the ImgU parameter buffer with
* settings to configure the device in preparation for handling the frame
* queued in the Request.
*
* When the frame has completed processing, the ImgU will generate a statistics
- * buffer which is given to the IPA with processStatsBuffer(). In this we run the
+ * buffer which is given to the IPA with processStats(). In this we run the
* algorithms to parse the statistics and cache any results for the next
- * fillParamsBuffer() call.
+ * computeParams() call.
*
* The individual algorithms are split into modular components that are called
* iteratively to allow them to process statistics from the ImgU in the order
@@ -114,7 +112,7 @@ namespace ipa::ipu3 {
* blue gains to apply to generate a neutral grey frame overall.
*
* AGC is handled by calculating a histogram of the green channel to estimate an
- * analogue gain and shutter time which will provide a well exposed frame. A
+ * analogue gain and exposure time which will provide a well exposed frame. A
* low-pass IIR filter is used to smooth the changes to the sensor to reduce
* perceivable steps.
*
@@ -157,10 +155,10 @@ public:
void unmapBuffers(const std::vector<unsigned int> &ids) override;
void queueRequest(const uint32_t frame, const ControlList &controls) override;
- void fillParamsBuffer(const uint32_t frame, const uint32_t bufferId) override;
- void processStatsBuffer(const uint32_t frame, const int64_t frameTimestamp,
- const uint32_t bufferId,
- const ControlList &sensorControls) override;
+ void computeParams(const uint32_t frame, const uint32_t bufferId) override;
+ void processStats(const uint32_t frame, const int64_t frameTimestamp,
+ const uint32_t bufferId,
+ const ControlList &sensorControls) override;
protected:
std::string logPrefix() const override;
@@ -189,7 +187,7 @@ private:
};
IPAIPU3::IPAIPU3()
- : context_({ {}, {}, { kMaxFrameContexts }, {} })
+ : context_(kMaxFrameContexts)
{
}
@@ -217,13 +215,13 @@ void IPAIPU3::updateSessionConfiguration(const ControlInfoMap &sensorControls)
/*
* When the AGC computes the new exposure values for a frame, it needs
- * to know the limits for shutter speed and analogue gain.
+ * to know the limits for exposure time and analogue gain.
* As it depends on the sensor, update it with the controls.
*
- * \todo take VBLANK into account for maximum shutter speed
+ * \todo take VBLANK into account for maximum exposure time
*/
- context_.configuration.agc.minShutterSpeed = minExposure * context_.configuration.sensor.lineDuration;
- context_.configuration.agc.maxShutterSpeed = maxExposure * context_.configuration.sensor.lineDuration;
+ context_.configuration.agc.minExposureTime = minExposure * context_.configuration.sensor.lineDuration;
+ context_.configuration.agc.maxExposureTime = maxExposure * context_.configuration.sensor.lineDuration;
context_.configuration.agc.minAnalogueGain = camHelper_->gain(minGain);
context_.configuration.agc.maxAnalogueGain = camHelper_->gain(maxGain);
}
@@ -313,8 +311,8 @@ int IPAIPU3::init(const IPASettings &settings,
/* Clean context */
context_.configuration = {};
- context_.configuration.sensor.lineDuration = sensorInfo.minLineLength
- * 1.0s / sensorInfo.pixelRate;
+ context_.configuration.sensor.lineDuration =
+ sensorInfo.minLineLength * 1.0s / sensorInfo.pixelRate;
/* Load the tuning data file. */
File file(settings.configurationFile);
@@ -477,8 +475,8 @@ int IPAIPU3::configure(const IPAConfigInfo &configInfo,
context_.frameContexts.clear();
/* Initialise the sensor configuration. */
- context_.configuration.sensor.lineDuration = sensorInfo_.minLineLength
- * 1.0s / sensorInfo_.pixelRate;
+ context_.configuration.sensor.lineDuration =
+ sensorInfo_.minLineLength * 1.0s / sensorInfo_.pixelRate;
context_.configuration.sensor.size = sensorInfo_.outputSize;
/*
@@ -540,7 +538,7 @@ void IPAIPU3::unmapBuffers(const std::vector<unsigned int> &ids)
* Algorithms are expected to fill the IPU3 parameter buffer for the next
* frame given their most recent processing of the ImgU statistics.
*/
-void IPAIPU3::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
+void IPAIPU3::computeParams(const uint32_t frame, const uint32_t bufferId)
{
auto it = buffers_.find(bufferId);
if (it == buffers_.end()) {
@@ -568,7 +566,7 @@ void IPAIPU3::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
for (auto const &algo : algorithms())
algo->prepare(context_, frame, frameContext, params);
- paramsBufferReady.emit(frame);
+ paramsComputed.emit(frame);
}
/**
@@ -582,9 +580,9 @@ void IPAIPU3::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId)
* statistics are passed to each algorithm module to run their calculations and
* update their state accordingly.
*/
-void IPAIPU3::processStatsBuffer(const uint32_t frame,
- [[maybe_unused]] const int64_t frameTimestamp,
- const uint32_t bufferId, const ControlList &sensorControls)
+void IPAIPU3::processStats(const uint32_t frame,
+ [[maybe_unused]] const int64_t frameTimestamp,
+ const uint32_t bufferId, const ControlList &sensorControls)
{
auto it = buffers_.find(bufferId);
if (it == buffers_.end()) {