summaryrefslogtreecommitdiff
path: root/src/ipa/ipu3
diff options
context:
space:
mode:
Diffstat (limited to 'src/ipa/ipu3')
-rw-r--r--src/ipa/ipu3/algorithms/af.cpp458
-rw-r--r--src/ipa/ipu3/algorithms/af.h73
-rw-r--r--src/ipa/ipu3/algorithms/agc.cpp255
-rw-r--r--src/ipa/ipu3/algorithms/agc.h61
-rw-r--r--src/ipa/ipu3/algorithms/algorithm.h22
-rw-r--r--src/ipa/ipu3/algorithms/awb.cpp480
-rw-r--r--src/ipa/ipu3/algorithms/awb.h81
-rw-r--r--src/ipa/ipu3/algorithms/blc.cpp71
-rw-r--r--src/ipa/ipu3/algorithms/blc.h28
-rw-r--r--src/ipa/ipu3/algorithms/meson.build9
-rw-r--r--src/ipa/ipu3/algorithms/tone_mapping.cpp120
-rw-r--r--src/ipa/ipu3/algorithms/tone_mapping.h35
-rw-r--r--src/ipa/ipu3/data/meson.build9
-rw-r--r--src/ipa/ipu3/data/uncalibrated.yaml11
-rw-r--r--src/ipa/ipu3/ipa_context.cpp190
-rw-r--r--src/ipa/ipu3/ipa_context.h102
-rw-r--r--src/ipa/ipu3/ipu3-ipa-design-guide.rst162
-rw-r--r--src/ipa/ipu3/ipu3.cpp692
-rw-r--r--src/ipa/ipu3/meson.build31
-rw-r--r--src/ipa/ipu3/module.h27
20 files changed, 2917 insertions, 0 deletions
diff --git a/src/ipa/ipu3/algorithms/af.cpp b/src/ipa/ipu3/algorithms/af.cpp
new file mode 100644
index 00000000..cf68fb59
--- /dev/null
+++ b/src/ipa/ipu3/algorithms/af.cpp
@@ -0,0 +1,458 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Red Hat
+ *
+ * IPU3 auto focus algorithm
+ */
+
+#include "af.h"
+
+#include <algorithm>
+#include <chrono>
+#include <cmath>
+#include <fcntl.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include <linux/videodev2.h>
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/ipa/core_ipa_interface.h>
+
+/**
+ * \file af.h
+ */
+
+/*
+ * Static variables from ChromiumOS Intel Camera HAL and ia_imaging library:
+ * - https://chromium.googlesource.com/chromiumos/platform/arc-camera/+/master/hal/intel/psl/ipu3/statsConverter/ipu3-stats.h
+ * - https://chromium.googlesource.com/chromiumos/platform/camera/+/refs/heads/main/hal/intel/ipu3/include/ia_imaging/af_public.h
+ */
+
+/** The minimum horizontal grid dimension. */
+static constexpr uint8_t kAfMinGridWidth = 16;
+/** The minimum vertical grid dimension. */
+static constexpr uint8_t kAfMinGridHeight = 16;
+/** The maximum horizontal grid dimension. */
+static constexpr uint8_t kAfMaxGridWidth = 32;
+/** The maximum vertical grid dimension. */
+static constexpr uint8_t kAfMaxGridHeight = 24;
+/** The minimum value of Log2 of the width of the grid cell. */
+static constexpr uint16_t kAfMinGridBlockWidth = 4;
+/** The minimum value of Log2 of the height of the grid cell. */
+static constexpr uint16_t kAfMinGridBlockHeight = 3;
+/** The maximum value of Log2 of the width of the grid cell. */
+static constexpr uint16_t kAfMaxGridBlockWidth = 6;
+/** The maximum value of Log2 of the height of the grid cell. */
+static constexpr uint16_t kAfMaxGridBlockHeight = 6;
+/** The number of blocks in vertical axis per slice. */
+static constexpr uint16_t kAfDefaultHeightPerSlice = 2;
+
+namespace libcamera {
+
+using namespace std::literals::chrono_literals;
+
+namespace ipa::ipu3::algorithms {
+
+LOG_DEFINE_CATEGORY(IPU3Af)
+
+/**
+ * Maximum focus steps of the VCM control
+ * \todo should be obtained from the VCM driver
+ */
+static constexpr uint32_t kMaxFocusSteps = 1023;
+
+/* Minimum focus step for searching appropriate focus */
+static constexpr uint32_t kCoarseSearchStep = 30;
+static constexpr uint32_t kFineSearchStep = 1;
+
+/* Max ratio of variance change, 0.0 < kMaxChange < 1.0 */
+static constexpr double kMaxChange = 0.5;
+
+/* The numbers of frame to be ignored, before performing focus scan. */
+static constexpr uint32_t kIgnoreFrame = 10;
+
+/* Fine scan range 0 < kFineRange < 1 */
+static constexpr double kFineRange = 0.05;
+
+/* Settings for IPU3 AF filter */
+static struct ipu3_uapi_af_filter_config afFilterConfigDefault = {
+ .y1_coeff_0 = { 0, 1, 3, 7 },
+ .y1_coeff_1 = { 11, 13, 1, 2 },
+ .y1_coeff_2 = { 8, 19, 34, 242 },
+ .y1_sign_vec = 0x7fdffbfe,
+ .y2_coeff_0 = { 0, 1, 6, 6 },
+ .y2_coeff_1 = { 13, 25, 3, 0 },
+ .y2_coeff_2 = { 25, 3, 177, 254 },
+ .y2_sign_vec = 0x4e53ca72,
+ .y_calc = { 8, 8, 8, 8 },
+ .nf = { 0, 9, 0, 9, 0 },
+};
+
+/**
+ * \class Af
+ * \brief An auto-focus algorithm based on IPU3 statistics
+ *
+ * This algorithm is used to determine the position of the lens to make a
+ * focused image. The IPU3 AF processing block computes the statistics that
+ * are composed by two types of filtered value and stores in a AF buffer.
+ * Typically, for a clear image, it has a relatively higher contrast than a
+ * blurred one. Therefore, if an image with the highest contrast can be
+ * found through the scan, the position of the len indicates to a clearest
+ * image.
+ */
+Af::Af()
+ : focus_(0), bestFocus_(0), currentVariance_(0.0), previousVariance_(0.0),
+ coarseCompleted_(false), fineCompleted_(false)
+{
+}
+
+/**
+ * \brief Configure the Af given a configInfo
+ * \param[in] context The shared IPA context
+ * \param[in] configInfo The IPA configuration data
+ * \return 0 on success, a negative error code otherwise
+ */
+int Af::configure(IPAContext &context, const IPAConfigInfo &configInfo)
+{
+ struct ipu3_uapi_grid_config &grid = context.configuration.af.afGrid;
+ grid.width = kAfMinGridWidth;
+ grid.height = kAfMinGridHeight;
+ grid.block_width_log2 = kAfMinGridBlockWidth;
+ grid.block_height_log2 = kAfMinGridBlockHeight;
+
+ /*
+ * \todo - while this clamping code is effectively a no-op, it satisfies
+ * the compiler that the constant definitions of the hardware limits
+ * are used, and paves the way to support dynamic grid sizing in the
+ * future. While the block_{width,height}_log2 remain assigned to the
+ * minimum, this code should be optimized out by the compiler.
+ */
+ grid.width = std::clamp(grid.width, kAfMinGridWidth, kAfMaxGridWidth);
+ grid.height = std::clamp(grid.height, kAfMinGridHeight, kAfMaxGridHeight);
+
+ grid.block_width_log2 = std::clamp(grid.block_width_log2,
+ kAfMinGridBlockWidth,
+ kAfMaxGridBlockWidth);
+
+ grid.block_height_log2 = std::clamp(grid.block_height_log2,
+ kAfMinGridBlockHeight,
+ kAfMaxGridBlockHeight);
+
+ grid.height_per_slice = kAfDefaultHeightPerSlice;
+
+ /* Position the AF grid in the center of the BDS output. */
+ Rectangle bds(configInfo.bdsOutputSize);
+ Size gridSize(grid.width << grid.block_width_log2,
+ grid.height << grid.block_height_log2);
+
+ /*
+ * \todo - Support request metadata
+ * - Set the ROI based on any input controls in the request
+ * - Return the AF ROI as metadata in the Request
+ */
+ Rectangle roi = gridSize.centeredTo(bds.center());
+ Point start = roi.topLeft();
+
+ /* x_start and y_start should be even */
+ grid.x_start = utils::alignDown(start.x, 2);
+ grid.y_start = utils::alignDown(start.y, 2);
+ grid.y_start |= IPU3_UAPI_GRID_Y_START_EN;
+
+ /* Initial max focus step */
+ maxStep_ = kMaxFocusSteps;
+
+ /* Initial frame ignore counter */
+ afIgnoreFrameReset();
+
+ /* Initial focus value */
+ context.activeState.af.focus = 0;
+ /* Maximum variance of the AF statistics */
+ context.activeState.af.maxVariance = 0;
+ /* The stable AF value flag. if it is true, the AF should be in a stable state. */
+ context.activeState.af.stable = false;
+
+ return 0;
+}
+
+/**
+ * \copydoc libcamera::ipa::Algorithm::prepare
+ */
+void Af::prepare(IPAContext &context,
+ [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
+ ipu3_uapi_params *params)
+{
+ const struct ipu3_uapi_grid_config &grid = context.configuration.af.afGrid;
+ params->acc_param.af.grid_cfg = grid;
+ params->acc_param.af.filter_config = afFilterConfigDefault;
+
+ /* Enable AF processing block */
+ params->use.acc_af = 1;
+}
+
+/**
+ * \brief AF coarse scan
+ * \param[in] context The shared IPA context
+ *
+ * Find a near focused image using a coarse step. The step is determined by
+ * kCoarseSearchStep.
+ */
+void Af::afCoarseScan(IPAContext &context)
+{
+ if (coarseCompleted_)
+ return;
+
+ if (afNeedIgnoreFrame())
+ return;
+
+ if (afScan(context, kCoarseSearchStep)) {
+ coarseCompleted_ = true;
+ context.activeState.af.maxVariance = 0;
+ focus_ = context.activeState.af.focus -
+ (context.activeState.af.focus * kFineRange);
+ context.activeState.af.focus = focus_;
+ previousVariance_ = 0;
+ maxStep_ = std::clamp(focus_ + static_cast<uint32_t>((focus_ * kFineRange)),
+ 0U, kMaxFocusSteps);
+ }
+}
+
+/**
+ * \brief AF fine scan
+ * \param[in] context The shared IPA context
+ *
+ * Find an optimum lens position with moving 1 step for each search.
+ */
+void Af::afFineScan(IPAContext &context)
+{
+ if (!coarseCompleted_)
+ return;
+
+ if (afNeedIgnoreFrame())
+ return;
+
+ if (afScan(context, kFineSearchStep)) {
+ context.activeState.af.stable = true;
+ fineCompleted_ = true;
+ }
+}
+
+/**
+ * \brief AF reset
+ * \param[in] context The shared IPA context
+ *
+ * Reset all the parameters to start over the AF process.
+ */
+void Af::afReset(IPAContext &context)
+{
+ if (afNeedIgnoreFrame())
+ return;
+
+ context.activeState.af.maxVariance = 0;
+ context.activeState.af.focus = 0;
+ focus_ = 0;
+ context.activeState.af.stable = false;
+ ignoreCounter_ = kIgnoreFrame;
+ previousVariance_ = 0.0;
+ coarseCompleted_ = false;
+ fineCompleted_ = false;
+ maxStep_ = kMaxFocusSteps;
+}
+
+/**
+ * \brief AF variance comparison
+ * \param[in] context The IPA context
+ * \param[in] min_step The VCM movement step
+ *
+ * We always pick the largest variance to replace the previous one. The image
+ * with a larger variance also indicates it is a clearer image than previous
+ * one. If we find a negative derivative, we return immediately.
+ *
+ * \return True, if it finds a AF value.
+ */
+bool Af::afScan(IPAContext &context, int min_step)
+{
+ if (focus_ > maxStep_) {
+ /* If reach the max step, move lens to the position. */
+ context.activeState.af.focus = bestFocus_;
+ return true;
+ } else {
+ /*
+ * Find the maximum of the variance by estimating its
+ * derivative. If the direction changes, it means we have
+ * passed a maximum one step before.
+ */
+ if ((currentVariance_ - context.activeState.af.maxVariance) >=
+ -(context.activeState.af.maxVariance * 0.1)) {
+ /*
+ * Positive and zero derivative:
+ * The variance is still increasing. The focus could be
+ * increased for the next comparison. Also, the max variance
+ * and previous focus value are updated.
+ */
+ bestFocus_ = focus_;
+ focus_ += min_step;
+ context.activeState.af.focus = focus_;
+ context.activeState.af.maxVariance = currentVariance_;
+ } else {
+ /*
+ * Negative derivative:
+ * The variance starts to decrease which means the maximum
+ * variance is found. Set focus step to previous good one
+ * then return immediately.
+ */
+ context.activeState.af.focus = bestFocus_;
+ return true;
+ }
+ }
+
+ previousVariance_ = currentVariance_;
+ LOG(IPU3Af, Debug) << " Previous step is "
+ << bestFocus_
+ << " Current step is "
+ << focus_;
+ return false;
+}
+
+/**
+ * \brief Determine the frame to be ignored
+ * \return Return True if the frame should be ignored, false otherwise
+ */
+bool Af::afNeedIgnoreFrame()
+{
+ if (ignoreCounter_ == 0)
+ return false;
+ else
+ ignoreCounter_--;
+ return true;
+}
+
+/**
+ * \brief Reset frame ignore counter
+ */
+void Af::afIgnoreFrameReset()
+{
+ ignoreCounter_ = kIgnoreFrame;
+}
+
+/**
+ * \brief Estimate variance
+ * \param[in] y_items The AF filter data set from the IPU3 statistics buffer
+ * \param[in] isY1 Selects between filter Y1 or Y2 to calculate the variance
+ *
+ * Calculate the mean of the data set provided by \a y_item, and then calculate
+ * the variance of that data set from the mean.
+ *
+ * The operation can work on one of two sets of values contained within the
+ * y_item data set supplied by the IPU3. The two data sets are the results of
+ * both the Y1 and Y2 filters which are used to support coarse (Y1) and fine
+ * (Y2) calculations of the contrast.
+ *
+ * \return The variance of the values in the data set \a y_item selected by \a isY1
+ */
+double Af::afEstimateVariance(Span<const y_table_item_t> y_items, bool isY1)
+{
+ uint32_t total = 0;
+ double mean;
+ double var_sum = 0;
+
+ for (auto y : y_items)
+ total += isY1 ? y.y1_avg : y.y2_avg;
+
+ mean = total / y_items.size();
+
+ for (auto y : y_items) {
+ double avg = isY1 ? y.y1_avg : y.y2_avg;
+ var_sum += pow(avg - mean, 2);
+ }
+
+ return var_sum / y_items.size();
+}
+
+/**
+ * \brief Determine out-of-focus situation
+ * \param[in] context The IPA context
+ *
+ * Out-of-focus means that the variance change rate for a focused and a new
+ * variance is greater than a threshold.
+ *
+ * \return True if the variance threshold is crossed indicating lost focus,
+ * false otherwise
+ */
+bool Af::afIsOutOfFocus(IPAContext &context)
+{
+ const uint32_t diff_var = std::abs(currentVariance_ -
+ context.activeState.af.maxVariance);
+ const double var_ratio = diff_var / context.activeState.af.maxVariance;
+
+ LOG(IPU3Af, Debug) << "Variance change rate: "
+ << var_ratio
+ << " Current VCM step: "
+ << context.activeState.af.focus;
+
+ if (var_ratio > kMaxChange)
+ return true;
+ else
+ return false;
+}
+
+/**
+ * \brief Determine the max contrast image and lens position
+ * \param[in] context The IPA context
+ * \param[in] frame The frame context sequence number
+ * \param[in] frameContext The current frame context
+ * \param[in] stats The statistics buffer of IPU3
+ * \param[out] metadata Metadata for the frame, to be filled by the algorithm
+ *
+ * Ideally, a clear image also has a relatively higher contrast. So, every
+ * image for each focus step should be tested to find an optimal focus step.
+ *
+ * The Hill Climbing Algorithm[1] is used to find the maximum variance of the
+ * AF statistics which is the AF output of IPU3. The focus step is increased
+ * then the variance of the AF statistics are estimated. If it finds the
+ * negative derivative we have just passed the peak, and we infer that the best
+ * focus is found.
+ *
+ * [1] Hill Climbing Algorithm, https://en.wikipedia.org/wiki/Hill_climbing
+ */
+void Af::process(IPAContext &context, [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
+ const ipu3_uapi_stats_3a *stats,
+ [[maybe_unused]] ControlList &metadata)
+{
+ /* Evaluate the AF buffer length */
+ uint32_t afRawBufferLen = context.configuration.af.afGrid.width *
+ context.configuration.af.afGrid.height;
+
+ ASSERT(afRawBufferLen < IPU3_UAPI_AF_Y_TABLE_MAX_SIZE);
+
+ Span<const y_table_item_t> y_items(reinterpret_cast<const y_table_item_t *>(&stats->af_raw_buffer.y_table),
+ afRawBufferLen);
+
+ /*
+ * Calculate the mean and the variance of AF statistics for a given grid.
+ * For coarse: y1 are used.
+ * For fine: y2 results are used.
+ */
+ currentVariance_ = afEstimateVariance(y_items, !coarseCompleted_);
+
+ if (!context.activeState.af.stable) {
+ afCoarseScan(context);
+ afFineScan(context);
+ } else {
+ if (afIsOutOfFocus(context))
+ afReset(context);
+ else
+ afIgnoreFrameReset();
+ }
+}
+
+REGISTER_IPA_ALGORITHM(Af, "Af")
+
+} /* namespace ipa::ipu3::algorithms */
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/algorithms/af.h b/src/ipa/ipu3/algorithms/af.h
new file mode 100644
index 00000000..68126d46
--- /dev/null
+++ b/src/ipa/ipu3/algorithms/af.h
@@ -0,0 +1,73 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Red Hat
+ *
+ * IPU3 Af algorithm
+ */
+
+#pragma once
+
+#include <linux/intel-ipu3.h>
+
+#include <libcamera/base/utils.h>
+
+#include <libcamera/geometry.h>
+
+#include "algorithm.h"
+
+namespace libcamera {
+
+namespace ipa::ipu3::algorithms {
+
+class Af : public Algorithm
+{
+ /* The format of y_table. From ipu3-ipa repo */
+ typedef struct __attribute__((packed)) y_table_item {
+ uint16_t y1_avg;
+ uint16_t y2_avg;
+ } y_table_item_t;
+public:
+ Af();
+ ~Af() = default;
+
+ int configure(IPAContext &context, const IPAConfigInfo &configInfo) override;
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ ipu3_uapi_params *params) override;
+ void process(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ const ipu3_uapi_stats_3a *stats,
+ ControlList &metadata) override;
+
+private:
+ void afCoarseScan(IPAContext &context);
+ void afFineScan(IPAContext &context);
+ bool afScan(IPAContext &context, int min_step);
+ void afReset(IPAContext &context);
+ bool afNeedIgnoreFrame();
+ void afIgnoreFrameReset();
+ double afEstimateVariance(Span<const y_table_item_t> y_items, bool isY1);
+
+ bool afIsOutOfFocus(IPAContext &context);
+
+ /* VCM step configuration. It is the current setting of the VCM step. */
+ uint32_t focus_;
+ /* The best VCM step. It is a local optimum VCM step during scanning. */
+ uint32_t bestFocus_;
+ /* Current AF statistic variance. */
+ double currentVariance_;
+ /* The frames are ignore before starting measuring. */
+ uint32_t ignoreCounter_;
+ /* It is used to determine the derivative during scanning */
+ double previousVariance_;
+ /* The designated maximum range of focus scanning. */
+ uint32_t maxStep_;
+ /* If the coarse scan completes, it is set to true. */
+ bool coarseCompleted_;
+ /* If the fine scan completes, it is set to true. */
+ bool fineCompleted_;
+};
+
+} /* namespace ipa::ipu3::algorithms */
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/algorithms/agc.cpp b/src/ipa/ipu3/algorithms/agc.cpp
new file mode 100644
index 00000000..39d0aebb
--- /dev/null
+++ b/src/ipa/ipu3/algorithms/agc.cpp
@@ -0,0 +1,255 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Ideas On Board
+ *
+ * AGC/AEC mean-based control algorithm
+ */
+
+#include "agc.h"
+
+#include <algorithm>
+#include <chrono>
+
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
+#include <libcamera/control_ids.h>
+
+#include <libcamera/ipa/core_ipa_interface.h>
+
+#include "libipa/colours.h"
+#include "libipa/histogram.h"
+
+/**
+ * \file agc.h
+ */
+
+namespace libcamera {
+
+using namespace std::literals::chrono_literals;
+
+namespace ipa::ipu3::algorithms {
+
+/**
+ * \class Agc
+ * \brief A mean-based auto-exposure algorithm
+ *
+ * This algorithm calculates an exposure time and an analogue gain so that the
+ * average value of the green channel of the brightest 2% of pixels approaches
+ * 0.5. The AWB gains are not used here, and all cells in the grid have the same
+ * weight, like an average-metering case. In this metering mode, the camera uses
+ * light information from the entire scene and creates an average for the final
+ * exposure setting, giving no weighting to any particular portion of the
+ * metered area.
+ *
+ * Reference: Battiato, Messina & Castorina. (2008). Exposure
+ * Correction for Imaging Devices: An Overview. 10.1201/9781420054538.ch12.
+ */
+
+LOG_DEFINE_CATEGORY(IPU3Agc)
+
+/* Minimum limit for analogue gain value */
+static constexpr double kMinAnalogueGain = 1.0;
+
+/* \todo Honour the FrameDurationLimits control instead of hardcoding a limit */
+static constexpr utils::Duration kMaxExposureTime = 60ms;
+
+/* Histogram constants */
+static constexpr uint32_t knumHistogramBins = 256;
+
+Agc::Agc()
+ : minExposureTime_(0s), maxExposureTime_(0s)
+{
+}
+
+/**
+ * \brief Initialise the AGC algorithm from tuning files
+ * \param[in] context The shared IPA context
+ * \param[in] tuningData The YamlObject containing Agc tuning data
+ *
+ * This function calls the base class' tuningData parsers to discover which
+ * control values are supported.
+ *
+ * \return 0 on success or errors from the base class
+ */
+int Agc::init(IPAContext &context, const YamlObject &tuningData)
+{
+ int ret;
+
+ ret = parseTuningData(tuningData);
+ if (ret)
+ return ret;
+
+ context.ctrlMap.merge(controls());
+
+ return 0;
+}
+
+/**
+ * \brief Configure the AGC given a configInfo
+ * \param[in] context The shared IPA context
+ * \param[in] configInfo The IPA configuration data
+ *
+ * \return 0
+ */
+int Agc::configure(IPAContext &context,
+ [[maybe_unused]] const IPAConfigInfo &configInfo)
+{
+ const IPASessionConfiguration &configuration = context.configuration;
+ IPAActiveState &activeState = context.activeState;
+
+ stride_ = configuration.grid.stride;
+ bdsGrid_ = configuration.grid.bdsGrid;
+
+ minExposureTime_ = configuration.agc.minExposureTime;
+ maxExposureTime_ = std::min(configuration.agc.maxExposureTime,
+ kMaxExposureTime);
+
+ minAnalogueGain_ = std::max(configuration.agc.minAnalogueGain, kMinAnalogueGain);
+ maxAnalogueGain_ = configuration.agc.maxAnalogueGain;
+
+ /* Configure the default exposure and gain. */
+ activeState.agc.gain = minAnalogueGain_;
+ activeState.agc.exposure = 10ms / configuration.sensor.lineDuration;
+
+ context.activeState.agc.constraintMode = constraintModes().begin()->first;
+ context.activeState.agc.exposureMode = exposureModeHelpers().begin()->first;
+
+ /* \todo Run this again when FrameDurationLimits is passed in */
+ setLimits(minExposureTime_, maxExposureTime_, minAnalogueGain_,
+ maxAnalogueGain_);
+ resetFrameCount();
+
+ return 0;
+}
+
+Histogram Agc::parseStatistics(const ipu3_uapi_stats_3a *stats,
+ const ipu3_uapi_grid_config &grid)
+{
+ uint32_t hist[knumHistogramBins] = { 0 };
+
+ rgbTriples_.clear();
+
+ for (unsigned int cellY = 0; cellY < grid.height; cellY++) {
+ for (unsigned int cellX = 0; cellX < grid.width; cellX++) {
+ uint32_t cellPosition = cellY * stride_ + cellX;
+
+ const ipu3_uapi_awb_set_item *cell =
+ reinterpret_cast<const ipu3_uapi_awb_set_item *>(
+ &stats->awb_raw_buffer.meta_data[cellPosition]);
+
+ rgbTriples_.push_back({
+ cell->R_avg,
+ (cell->Gr_avg + cell->Gb_avg) / 2,
+ cell->B_avg
+ });
+
+ /*
+ * Store the average green value to estimate the
+ * brightness. Even the overexposed pixels are
+ * taken into account.
+ */
+ hist[(cell->Gr_avg + cell->Gb_avg) / 2]++;
+ }
+ }
+
+ return Histogram(Span<uint32_t>(hist));
+}
+
+/**
+ * \brief Estimate the relative luminance of the frame with a given gain
+ * \param[in] gain The gain to apply in estimating luminance
+ *
+ * The estimation is based on the AWB statistics for the current frame. Red,
+ * green and blue averages for all cells are first multiplied by the gain, and
+ * then saturated to approximate the sensor behaviour at high brightness
+ * values. The approximation is quite rough, as it doesn't take into account
+ * non-linearities when approaching saturation.
+ *
+ * The relative luminance (Y) is computed from the linear RGB components using
+ * the Rec. 601 formula. The values are normalized to the [0.0, 1.0] range,
+ * where 1.0 corresponds to a theoretical perfect reflector of 100% reference
+ * white.
+ *
+ * More detailed information can be found in:
+ * https://en.wikipedia.org/wiki/Relative_luminance
+ *
+ * \return The relative luminance of the frame
+ */
+double Agc::estimateLuminance(double gain) const
+{
+ RGB<double> sum{ 0.0 };
+
+ for (unsigned int i = 0; i < rgbTriples_.size(); i++) {
+ sum.r() += std::min(std::get<0>(rgbTriples_[i]) * gain, 255.0);
+ sum.g() += std::min(std::get<1>(rgbTriples_[i]) * gain, 255.0);
+ sum.b() += std::min(std::get<2>(rgbTriples_[i]) * gain, 255.0);
+ }
+
+ RGB<double> gains{{ rGain_, gGain_, bGain_ }};
+ double ySum = rec601LuminanceFromRGB(sum * gains);
+ return ySum / (bdsGrid_.height * bdsGrid_.width) / 255;
+}
+
+/**
+ * \brief Process IPU3 statistics, and run AGC operations
+ * \param[in] context The shared IPA context
+ * \param[in] frame The current frame sequence number
+ * \param[in] frameContext The current frame context
+ * \param[in] stats The IPU3 statistics and ISP results
+ * \param[out] metadata Metadata for the frame, to be filled by the algorithm
+ *
+ * Identify the current image brightness, and use that to estimate the optimal
+ * new exposure and gain for the scene.
+ */
+void Agc::process(IPAContext &context, [[maybe_unused]] const uint32_t frame,
+ IPAFrameContext &frameContext,
+ const ipu3_uapi_stats_3a *stats,
+ ControlList &metadata)
+{
+ Histogram hist = parseStatistics(stats, context.configuration.grid.bdsGrid);
+ rGain_ = context.activeState.awb.gains.red;
+ gGain_ = context.activeState.awb.gains.blue;
+ bGain_ = context.activeState.awb.gains.green;
+
+ /*
+ * The Agc algorithm needs to know the effective exposure value that was
+ * applied to the sensor when the statistics were collected.
+ */
+ utils::Duration exposureTime = context.configuration.sensor.lineDuration
+ * frameContext.sensor.exposure;
+ double analogueGain = frameContext.sensor.gain;
+ utils::Duration effectiveExposureValue = exposureTime * analogueGain;
+
+ utils::Duration newExposureTime;
+ double aGain, dGain;
+ std::tie(newExposureTime, aGain, dGain) =
+ calculateNewEv(context.activeState.agc.constraintMode,
+ context.activeState.agc.exposureMode, hist,
+ effectiveExposureValue);
+
+ LOG(IPU3Agc, Debug)
+ << "Divided up exposure time, analogue gain and digital gain are "
+ << newExposureTime << ", " << aGain << " and " << dGain;
+
+ IPAActiveState &activeState = context.activeState;
+ /* Update the estimated exposure time and gain. */
+ activeState.agc.exposure = newExposureTime / context.configuration.sensor.lineDuration;
+ activeState.agc.gain = aGain;
+
+ metadata.set(controls::AnalogueGain, frameContext.sensor.gain);
+ metadata.set(controls::ExposureTime, exposureTime.get<std::micro>());
+
+ /* \todo Use VBlank value calculated from each frame exposure. */
+ uint32_t vTotal = context.configuration.sensor.size.height
+ + context.configuration.sensor.defVBlank;
+ utils::Duration frameDuration = context.configuration.sensor.lineDuration
+ * vTotal;
+ metadata.set(controls::FrameDuration, frameDuration.get<std::micro>());
+}
+
+REGISTER_IPA_ALGORITHM(Agc, "Agc")
+
+} /* namespace ipa::ipu3::algorithms */
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/algorithms/agc.h b/src/ipa/ipu3/algorithms/agc.h
new file mode 100644
index 00000000..890c271b
--- /dev/null
+++ b/src/ipa/ipu3/algorithms/agc.h
@@ -0,0 +1,61 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Ideas On Board
+ *
+ * IPU3 AGC/AEC mean-based control algorithm
+ */
+
+#pragma once
+
+#include <linux/intel-ipu3.h>
+
+#include <libcamera/base/utils.h>
+
+#include <libcamera/geometry.h>
+
+#include "libipa/agc_mean_luminance.h"
+#include "libipa/histogram.h"
+
+#include "algorithm.h"
+
+namespace libcamera {
+
+struct IPACameraSensorInfo;
+
+namespace ipa::ipu3::algorithms {
+
+class Agc : public Algorithm, public AgcMeanLuminance
+{
+public:
+ Agc();
+ ~Agc() = default;
+
+ int init(IPAContext &context, const YamlObject &tuningData) override;
+ int configure(IPAContext &context, const IPAConfigInfo &configInfo) override;
+ void process(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ const ipu3_uapi_stats_3a *stats,
+ ControlList &metadata) override;
+
+private:
+ double estimateLuminance(double gain) const override;
+ Histogram parseStatistics(const ipu3_uapi_stats_3a *stats,
+ const ipu3_uapi_grid_config &grid);
+
+ utils::Duration minExposureTime_;
+ utils::Duration maxExposureTime_;
+
+ double minAnalogueGain_;
+ double maxAnalogueGain_;
+
+ uint32_t stride_;
+ double rGain_;
+ double gGain_;
+ double bGain_;
+ ipu3_uapi_grid_config bdsGrid_;
+ std::vector<std::tuple<uint8_t, uint8_t, uint8_t>> rgbTriples_;
+};
+
+} /* namespace ipa::ipu3::algorithms */
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/algorithms/algorithm.h b/src/ipa/ipu3/algorithms/algorithm.h
new file mode 100644
index 00000000..c7801f93
--- /dev/null
+++ b/src/ipa/ipu3/algorithms/algorithm.h
@@ -0,0 +1,22 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Ideas On Board
+ *
+ * IPU3 control algorithm interface
+ */
+
+#pragma once
+
+#include <libipa/algorithm.h>
+
+#include "module.h"
+
+namespace libcamera {
+
+namespace ipa::ipu3 {
+
+using Algorithm = libcamera::ipa::Algorithm<Module>;
+
+} /* namespace ipa::ipu3 */
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/algorithms/awb.cpp b/src/ipa/ipu3/algorithms/awb.cpp
new file mode 100644
index 00000000..55de05d9
--- /dev/null
+++ b/src/ipa/ipu3/algorithms/awb.cpp
@@ -0,0 +1,480 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Ideas On Board
+ *
+ * AWB control algorithm
+ */
+#include "awb.h"
+
+#include <algorithm>
+#include <cmath>
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/control_ids.h>
+
+#include "libipa/colours.h"
+
+/**
+ * \file awb.h
+ */
+
+namespace libcamera {
+
+namespace ipa::ipu3::algorithms {
+
+LOG_DEFINE_CATEGORY(IPU3Awb)
+
+/*
+ * When zones are used for the grey world algorithm, they are only considered if
+ * their average green value is at least 16/255 (after black level subtraction)
+ * to exclude zones that are too dark and don't provide relevant colour
+ * information (on the opposite side of the spectrum, saturated regions are
+ * excluded by the ImgU statistics engine).
+ */
+static constexpr uint32_t kMinGreenLevelInZone = 16;
+
+/*
+ * Minimum proportion of non-saturated cells in a zone for the zone to be used
+ * by the AWB algorithm.
+ */
+static constexpr double kMaxCellSaturationRatio = 0.8;
+
+/*
+ * Maximum ratio of saturated pixels in a cell for the cell to be considered
+ * non-saturated and counted by the AWB algorithm.
+ */
+static constexpr uint32_t kMinCellsPerZoneRatio = 255 * 90 / 100;
+
+/**
+ * \struct Accumulator
+ * \brief RGB statistics for a given zone
+ *
+ * Accumulate red, green and blue values for each non-saturated item over a
+ * zone. Items can for instance be pixels, but also the average of groups of
+ * pixels, depending on who uses the accumulator.
+ * \todo move this description and structure into a common header
+ *
+ * Zones which are saturated beyond the threshold defined in
+ * ipu3_uapi_awb_config_s are not included in the average.
+ *
+ * \var Accumulator::counted
+ * \brief Number of unsaturated cells used to calculate the sums
+ *
+ * \var Accumulator::sum
+ * \brief A structure containing the average red, green and blue sums
+ *
+ * \var Accumulator::sum.red
+ * \brief Sum of the average red values of each unsaturated cell in the zone
+ *
+ * \var Accumulator::sum.green
+ * \brief Sum of the average green values of each unsaturated cell in the zone
+ *
+ * \var Accumulator::sum.blue
+ * \brief Sum of the average blue values of each unsaturated cell in the zone
+ */
+
+/**
+ * \struct Awb::AwbStatus
+ * \brief AWB parameters calculated
+ *
+ * The AwbStatus structure is intended to store the AWB
+ * parameters calculated by the algorithm
+ *
+ * \var AwbStatus::temperatureK
+ * \brief Color temperature calculated
+ *
+ * \var AwbStatus::redGain
+ * \brief Gain calculated for the red channel
+ *
+ * \var AwbStatus::greenGain
+ * \brief Gain calculated for the green channel
+ *
+ * \var AwbStatus::blueGain
+ * \brief Gain calculated for the blue channel
+ */
+
+/* Default settings for Bayer noise reduction replicated from the Kernel */
+static const struct ipu3_uapi_bnr_static_config imguCssBnrDefaults = {
+ .wb_gains = { 16, 16, 16, 16 },
+ .wb_gains_thr = { 255, 255, 255, 255 },
+ .thr_coeffs = { 1700, 0, 31, 31, 0, 16 },
+ .thr_ctrl_shd = { 26, 26, 26, 26 },
+ .opt_center = { -648, 0, -366, 0 },
+ .lut = {
+ { 17, 23, 28, 32, 36, 39, 42, 45,
+ 48, 51, 53, 55, 58, 60, 62, 64,
+ 66, 68, 70, 72, 73, 75, 77, 78,
+ 80, 82, 83, 85, 86, 88, 89, 90 } },
+ .bp_ctrl = { 20, 0, 1, 40, 0, 6, 0, 6, 0 },
+ .dn_detect_ctrl = { 9, 3, 4, 0, 8, 0, 1, 1, 1, 1, 0 },
+ .column_size = 1296,
+ .opt_center_sqr = { 419904, 133956 },
+};
+
+/* Default color correction matrix defined as an identity matrix */
+static const struct ipu3_uapi_ccm_mat_config imguCssCcmDefault = {
+ 8191, 0, 0, 0,
+ 0, 8191, 0, 0,
+ 0, 0, 8191, 0
+};
+
+/**
+ * \class Awb
+ * \brief A Grey world white balance correction algorithm
+ *
+ * The Grey World algorithm assumes that the scene, in average, is neutral grey.
+ * Reference: Lam, Edmund & Fung, George. (2008). Automatic White Balancing in
+ * Digital Photography. 10.1201/9781420054538.ch10.
+ *
+ * The IPU3 generates statistics from the Bayer Down Scaler output into a grid
+ * defined in the ipu3_uapi_awb_config_s structure.
+ *
+ * - Cells are defined in Pixels
+ * - Zones are defined in Cells
+ *
+ * 80 cells
+ * /───────────── 1280 pixels ───────────\
+ * 16 zones
+ * 16
+ * ┌────┬────┬────┬────┬────┬─ ──────┬────┐ \
+ * │Cell│ │ │ │ │ | │ │ │
+ * 16 │ px │ │ │ │ │ | │ │ │
+ * ├────┼────┼────┼────┼────┼─ ──────┼────┤ │
+ * │ │ │ │ │ │ | │ │
+ * │ │ │ │ │ │ | │ │ 7
+ * │ ── │ ── │ ── │ ── │ ── │ ── ── ─┤ ── │ 1 2 4
+ * │ │ │ │ │ │ | │ │ 2 0 5
+ *
+ * │ │ │ │ │ │ | │ │ z p c
+ * ├────┼────┼────┼────┼────┼─ ──────┼────┤ o i e
+ * │ │ │ │ │ │ | │ │ n x l
+ * │ │ | │ │ e e l
+ * ├─── ───┼─ ──────┼────┤ s l s
+ * │ │ | │ │ s
+ * │ │ | │ │
+ * ├─── Zone of Cells ───┼─ ──────┼────┤ │
+ * │ (5 x 4) │ | │ │ │
+ * │ │ | │ │ │
+ * ├── ───┼─ ──────┼────┤ │
+ * │ │ │ | │ │ │
+ * │ │ │ │ │ │ | │ │ │
+ * └────┴────┴────┴────┴────┴─ ──────┴────┘ /
+ *
+ *
+ * In each cell, the ImgU computes for each colour component the average of all
+ * unsaturated pixels (below a programmable threshold). It also provides the
+ * ratio of saturated pixels in the cell.
+ *
+ * The AWB algorithm operates on a coarser grid, made by grouping cells from the
+ * hardware grid into zones. The number of zones is fixed to \a kAwbStatsSizeX x
+ * \a kAwbStatsSizeY. For example, a frame of 1280x720 is divided into 80x45
+ * cells of [16x16] pixels and 16x12 zones of [5x4] cells each
+ * (\a kAwbStatsSizeX=16 and \a kAwbStatsSizeY=12). If the number of cells isn't
+ * an exact multiple of the number of zones, the right-most and bottom-most
+ * cells are ignored. The grid configuration is computed by
+ * IPAIPU3::calculateBdsGrid().
+ *
+ * Before calculating the gains, the algorithm aggregates the cell averages for
+ * each zone in generateAwbStats(). Cells that have a too high ratio of
+ * saturated pixels are ignored, and only zones that contain enough
+ * non-saturated cells are then used by the algorithm.
+ *
+ * The Grey World algorithm will then estimate the red and blue gains to apply, and
+ * store the results in the metadata. The green gain is always set to 1.
+ */
+
+Awb::Awb()
+ : Algorithm()
+{
+ asyncResults_.blueGain = 1.0;
+ asyncResults_.greenGain = 1.0;
+ asyncResults_.redGain = 1.0;
+ asyncResults_.temperatureK = 4500;
+
+ zones_.reserve(kAwbStatsSizeX * kAwbStatsSizeY);
+}
+
+Awb::~Awb() = default;
+
+/**
+ * \copydoc libcamera::ipa::Algorithm::configure
+ */
+int Awb::configure(IPAContext &context,
+ [[maybe_unused]] const IPAConfigInfo &configInfo)
+{
+ const ipu3_uapi_grid_config &grid = context.configuration.grid.bdsGrid;
+ stride_ = context.configuration.grid.stride;
+
+ cellsPerZoneX_ = std::round(grid.width / static_cast<double>(kAwbStatsSizeX));
+ cellsPerZoneY_ = std::round(grid.height / static_cast<double>(kAwbStatsSizeY));
+
+ /*
+ * Configure the minimum proportion of cells counted within a zone
+ * for it to be relevant for the grey world algorithm.
+ * \todo This proportion could be configured.
+ */
+ cellsPerZoneThreshold_ = cellsPerZoneX_ * cellsPerZoneY_ * kMaxCellSaturationRatio;
+ LOG(IPU3Awb, Debug) << "Threshold for AWB is set to " << cellsPerZoneThreshold_;
+
+ return 0;
+}
+
+constexpr uint16_t Awb::threshold(float value)
+{
+ /* AWB thresholds are in the range [0, 8191] */
+ return value * 8191;
+}
+
+constexpr uint16_t Awb::gainValue(double gain)
+{
+ /*
+ * The colour gains applied by the BNR for the four channels (Gr, R, B
+ * and Gb) are expressed in the parameters structure as 16-bit integers
+ * that store a fixed-point U3.13 value in the range [0, 8[.
+ *
+ * The real gain value is equal to the gain parameter plus one, i.e.
+ *
+ * Pout = Pin * (1 + gain / 8192)
+ *
+ * where 'Pin' is the input pixel value, 'Pout' the output pixel value,
+ * and 'gain' the gain in the parameters structure as a 16-bit integer.
+ */
+ return std::clamp((gain - 1.0) * 8192, 0.0, 65535.0);
+}
+
+/**
+ * \copydoc libcamera::ipa::Algorithm::prepare
+ */
+void Awb::prepare(IPAContext &context,
+ [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
+ ipu3_uapi_params *params)
+{
+ /*
+ * Green saturation thresholds are reduced because we are using the
+ * green channel only in the exposure computation.
+ */
+ params->acc_param.awb.config.rgbs_thr_r = threshold(1.0);
+ params->acc_param.awb.config.rgbs_thr_gr = threshold(0.9);
+ params->acc_param.awb.config.rgbs_thr_gb = threshold(0.9);
+ params->acc_param.awb.config.rgbs_thr_b = threshold(1.0);
+
+ /*
+ * Enable saturation inclusion on thr_b for ImgU to update the
+ * ipu3_uapi_awb_set_item->sat_ratio field.
+ */
+ params->acc_param.awb.config.rgbs_thr_b |= IPU3_UAPI_AWB_RGBS_THR_B_INCL_SAT |
+ IPU3_UAPI_AWB_RGBS_THR_B_EN;
+
+ const ipu3_uapi_grid_config &grid = context.configuration.grid.bdsGrid;
+
+ params->acc_param.awb.config.grid = context.configuration.grid.bdsGrid;
+
+ /*
+ * Optical center is column start (respectively row start) of the
+ * cell of interest minus its X center (respectively Y center).
+ *
+ * For the moment use BDS as a first approximation, but it should
+ * be calculated based on Shading (SHD) parameters.
+ */
+ params->acc_param.bnr = imguCssBnrDefaults;
+ Size &bdsOutputSize = context.configuration.grid.bdsOutputSize;
+ params->acc_param.bnr.column_size = bdsOutputSize.width;
+ params->acc_param.bnr.opt_center.x_reset = grid.x_start - (bdsOutputSize.width / 2);
+ params->acc_param.bnr.opt_center.y_reset = grid.y_start - (bdsOutputSize.height / 2);
+ params->acc_param.bnr.opt_center_sqr.x_sqr_reset = params->acc_param.bnr.opt_center.x_reset
+ * params->acc_param.bnr.opt_center.x_reset;
+ params->acc_param.bnr.opt_center_sqr.y_sqr_reset = params->acc_param.bnr.opt_center.y_reset
+ * params->acc_param.bnr.opt_center.y_reset;
+
+ params->acc_param.bnr.wb_gains.gr = gainValue(context.activeState.awb.gains.green);
+ params->acc_param.bnr.wb_gains.r = gainValue(context.activeState.awb.gains.red);
+ params->acc_param.bnr.wb_gains.b = gainValue(context.activeState.awb.gains.blue);
+ params->acc_param.bnr.wb_gains.gb = gainValue(context.activeState.awb.gains.green);
+
+ LOG(IPU3Awb, Debug) << "Color temperature estimated: " << asyncResults_.temperatureK;
+
+ /* The CCM matrix may change when color temperature will be used */
+ params->acc_param.ccm = imguCssCcmDefault;
+
+ params->use.acc_awb = 1;
+ params->use.acc_bnr = 1;
+ params->use.acc_ccm = 1;
+}
+
+/* Generate an RGB vector with the average values for each zone */
+void Awb::generateZones()
+{
+ zones_.clear();
+
+ for (unsigned int i = 0; i < kAwbStatsSizeX * kAwbStatsSizeY; i++) {
+ double counted = awbStats_[i].counted;
+ if (counted >= cellsPerZoneThreshold_) {
+ RGB<double> zone{{
+ static_cast<double>(awbStats_[i].sum.red),
+ static_cast<double>(awbStats_[i].sum.green),
+ static_cast<double>(awbStats_[i].sum.blue)
+ }};
+
+ zone /= counted;
+
+ if (zone.g() >= kMinGreenLevelInZone)
+ zones_.push_back(zone);
+ }
+ }
+}
+
+/* Translate the IPU3 statistics into the default statistics zone array */
+void Awb::generateAwbStats(const ipu3_uapi_stats_3a *stats)
+{
+ /*
+ * Generate a (kAwbStatsSizeX x kAwbStatsSizeY) array from the IPU3 grid which is
+ * (grid.width x grid.height).
+ */
+ for (unsigned int cellY = 0; cellY < kAwbStatsSizeY * cellsPerZoneY_; cellY++) {
+ for (unsigned int cellX = 0; cellX < kAwbStatsSizeX * cellsPerZoneX_; cellX++) {
+ uint32_t cellPosition = cellY * stride_ + cellX;
+ uint32_t zoneX = cellX / cellsPerZoneX_;
+ uint32_t zoneY = cellY / cellsPerZoneY_;
+
+ uint32_t awbZonePosition = zoneY * kAwbStatsSizeX + zoneX;
+
+ /* Cast the initial IPU3 structure to simplify the reading */
+ const ipu3_uapi_awb_set_item *currentCell =
+ reinterpret_cast<const ipu3_uapi_awb_set_item *>(
+ &stats->awb_raw_buffer.meta_data[cellPosition]
+ );
+
+ /*
+ * Use cells which have less than 90%
+ * saturation as an initial means to include
+ * otherwise bright cells which are not fully
+ * saturated.
+ *
+ * \todo The 90% saturation rate may require
+ * further empirical measurements and
+ * optimisation during camera tuning phases.
+ */
+ if (currentCell->sat_ratio <= kMinCellsPerZoneRatio) {
+ /* The cell is not saturated, use the current cell */
+ awbStats_[awbZonePosition].counted++;
+ uint32_t greenValue = currentCell->Gr_avg + currentCell->Gb_avg;
+ awbStats_[awbZonePosition].sum.green += greenValue / 2;
+ awbStats_[awbZonePosition].sum.red += currentCell->R_avg;
+ awbStats_[awbZonePosition].sum.blue += currentCell->B_avg;
+ }
+ }
+ }
+}
+
+void Awb::clearAwbStats()
+{
+ for (unsigned int i = 0; i < kAwbStatsSizeX * kAwbStatsSizeY; i++) {
+ awbStats_[i].sum.blue = 0;
+ awbStats_[i].sum.red = 0;
+ awbStats_[i].sum.green = 0;
+ awbStats_[i].counted = 0;
+ }
+}
+
+void Awb::awbGreyWorld()
+{
+ LOG(IPU3Awb, Debug) << "Grey world AWB";
+ /*
+ * Make a separate list of the derivatives for each of red and blue, so
+ * that we can sort them to exclude the extreme gains. We could
+ * consider some variations, such as normalising all the zones first, or
+ * doing an L2 average etc.
+ */
+ std::vector<RGB<double>> &redDerivative(zones_);
+ std::vector<RGB<double>> blueDerivative(redDerivative);
+ std::sort(redDerivative.begin(), redDerivative.end(),
+ [](RGB<double> const &a, RGB<double> const &b) {
+ return a.g() * b.r() < b.g() * a.r();
+ });
+ std::sort(blueDerivative.begin(), blueDerivative.end(),
+ [](RGB<double> const &a, RGB<double> const &b) {
+ return a.g() * b.b() < b.g() * a.b();
+ });
+
+ /* Average the middle half of the values. */
+ int discard = redDerivative.size() / 4;
+
+ RGB<double> sumRed{ 0.0 };
+ RGB<double> sumBlue{ 0.0 };
+ for (auto ri = redDerivative.begin() + discard,
+ bi = blueDerivative.begin() + discard;
+ ri != redDerivative.end() - discard; ri++, bi++)
+ sumRed += *ri, sumBlue += *bi;
+
+ double redGain = sumRed.g() / (sumRed.r() + 1),
+ blueGain = sumBlue.g() / (sumBlue.b() + 1);
+
+ /* Color temperature is not relevant in Grey world but still useful to estimate it :-) */
+ asyncResults_.temperatureK = estimateCCT({{ sumRed.r(), sumRed.g(), sumBlue.b() }});
+
+ /*
+ * Gain values are unsigned integer value ranging [0, 8) with 13 bit
+ * fractional part.
+ */
+ redGain = std::clamp(redGain, 0.0, 65535.0 / 8192);
+ blueGain = std::clamp(blueGain, 0.0, 65535.0 / 8192);
+
+ asyncResults_.redGain = redGain;
+ /* Hardcode the green gain to 1.0. */
+ asyncResults_.greenGain = 1.0;
+ asyncResults_.blueGain = blueGain;
+}
+
+void Awb::calculateWBGains(const ipu3_uapi_stats_3a *stats)
+{
+ ASSERT(stats->stats_3a_status.awb_en);
+
+ clearAwbStats();
+ generateAwbStats(stats);
+ generateZones();
+
+ LOG(IPU3Awb, Debug) << "Valid zones: " << zones_.size();
+
+ if (zones_.size() > 10) {
+ awbGreyWorld();
+ LOG(IPU3Awb, Debug) << "Gain found for red: " << asyncResults_.redGain
+ << " and for blue: " << asyncResults_.blueGain;
+ }
+}
+
+/**
+ * \copydoc libcamera::ipa::Algorithm::process
+ */
+void Awb::process(IPAContext &context, [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
+ const ipu3_uapi_stats_3a *stats,
+ [[maybe_unused]] ControlList &metadata)
+{
+ calculateWBGains(stats);
+
+ /*
+ * Gains are only recalculated if enough zones were detected.
+ * The results are cached, so if no results were calculated, we set the
+ * cached values from asyncResults_ here.
+ */
+ context.activeState.awb.gains.blue = asyncResults_.blueGain;
+ context.activeState.awb.gains.green = asyncResults_.greenGain;
+ context.activeState.awb.gains.red = asyncResults_.redGain;
+ context.activeState.awb.temperatureK = asyncResults_.temperatureK;
+
+ metadata.set(controls::AwbEnable, true);
+ metadata.set(controls::ColourGains, {
+ static_cast<float>(context.activeState.awb.gains.red),
+ static_cast<float>(context.activeState.awb.gains.blue)
+ });
+ metadata.set(controls::ColourTemperature,
+ context.activeState.awb.temperatureK);
+}
+
+REGISTER_IPA_ALGORITHM(Awb, "Awb")
+
+} /* namespace ipa::ipu3::algorithms */
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/algorithms/awb.h b/src/ipa/ipu3/algorithms/awb.h
new file mode 100644
index 00000000..1916990a
--- /dev/null
+++ b/src/ipa/ipu3/algorithms/awb.h
@@ -0,0 +1,81 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Ideas On Board
+ *
+ * IPU3 AWB control algorithm
+ */
+
+#pragma once
+
+#include <vector>
+
+#include <linux/intel-ipu3.h>
+
+#include <libcamera/geometry.h>
+
+#include "libipa/vector.h"
+
+#include "algorithm.h"
+
+namespace libcamera {
+
+namespace ipa::ipu3::algorithms {
+
+/* Region size for the statistics generation algorithm */
+static constexpr uint32_t kAwbStatsSizeX = 16;
+static constexpr uint32_t kAwbStatsSizeY = 12;
+
+struct Accumulator {
+ unsigned int counted;
+ struct {
+ uint64_t red;
+ uint64_t green;
+ uint64_t blue;
+ } sum;
+};
+
+class Awb : public Algorithm
+{
+public:
+ Awb();
+ ~Awb();
+
+ int configure(IPAContext &context, const IPAConfigInfo &configInfo) override;
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ ipu3_uapi_params *params) override;
+ void process(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ const ipu3_uapi_stats_3a *stats,
+ ControlList &metadata) override;
+
+private:
+ struct AwbStatus {
+ double temperatureK;
+ double redGain;
+ double greenGain;
+ double blueGain;
+ };
+
+private:
+ void calculateWBGains(const ipu3_uapi_stats_3a *stats);
+ void generateZones();
+ void generateAwbStats(const ipu3_uapi_stats_3a *stats);
+ void clearAwbStats();
+ void awbGreyWorld();
+ static constexpr uint16_t threshold(float value);
+ static constexpr uint16_t gainValue(double gain);
+
+ std::vector<RGB<double>> zones_;
+ Accumulator awbStats_[kAwbStatsSizeX * kAwbStatsSizeY];
+ AwbStatus asyncResults_;
+
+ uint32_t stride_;
+ uint32_t cellsPerZoneX_;
+ uint32_t cellsPerZoneY_;
+ uint32_t cellsPerZoneThreshold_;
+};
+
+} /* namespace ipa::ipu3::algorithms */
+
+} /* namespace libcamera*/
diff --git a/src/ipa/ipu3/algorithms/blc.cpp b/src/ipa/ipu3/algorithms/blc.cpp
new file mode 100644
index 00000000..35748fb2
--- /dev/null
+++ b/src/ipa/ipu3/algorithms/blc.cpp
@@ -0,0 +1,71 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google inc.
+ *
+ * IPU3 Black Level Correction control
+ */
+
+#include "blc.h"
+
+/**
+ * \file blc.h
+ * \brief IPU3 Black Level Correction control
+ */
+
+namespace libcamera {
+
+namespace ipa::ipu3::algorithms {
+
+/**
+ * \class BlackLevelCorrection
+ * \brief A class to handle black level correction
+ *
+ * The pixels output by the camera normally include a black level, because
+ * sensors do not always report a signal level of '0' for black. Pixels at or
+ * below this level should be considered black. To achieve that, the ImgU BLC
+ * algorithm subtracts a configurable offset from all pixels.
+ *
+ * The black level can be measured at runtime from an optical dark region of the
+ * camera sensor, or measured during the camera tuning process. The first option
+ * isn't currently supported.
+ */
+
+BlackLevelCorrection::BlackLevelCorrection()
+{
+}
+
+/**
+ * \brief Fill in the parameter structure, and enable black level correction
+ * \param[in] context The shared IPA context
+ * \param[in] frame The frame context sequence number
+ * \param[in] frameContext The FrameContext for this frame
+ * \param[out] params The IPU3 parameters
+ *
+ * Populate the IPU3 parameter structure with the correction values for each
+ * channel and enable the corresponding ImgU block processing.
+ */
+void BlackLevelCorrection::prepare([[maybe_unused]] IPAContext &context,
+ [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
+ ipu3_uapi_params *params)
+{
+ /*
+ * The Optical Black Level correction values
+ * \todo The correction values should come from sensor specific
+ * tuning processes. This is a first rough approximation.
+ */
+ params->obgrid_param.gr = 64;
+ params->obgrid_param.r = 64;
+ params->obgrid_param.b = 64;
+ params->obgrid_param.gb = 64;
+
+ /* Enable the custom black level correction processing */
+ params->use.obgrid = 1;
+ params->use.obgrid_param = 1;
+}
+
+REGISTER_IPA_ALGORITHM(BlackLevelCorrection, "BlackLevelCorrection")
+
+} /* namespace ipa::ipu3::algorithms */
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/algorithms/blc.h b/src/ipa/ipu3/algorithms/blc.h
new file mode 100644
index 00000000..62748045
--- /dev/null
+++ b/src/ipa/ipu3/algorithms/blc.h
@@ -0,0 +1,28 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google inc.
+ *
+ * IPU3 Black Level Correction control
+ */
+
+#pragma once
+
+#include "algorithm.h"
+
+namespace libcamera {
+
+namespace ipa::ipu3::algorithms {
+
+class BlackLevelCorrection : public Algorithm
+{
+public:
+ BlackLevelCorrection();
+
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ ipu3_uapi_params *params) override;
+};
+
+} /* namespace ipa::ipu3::algorithms */
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/algorithms/meson.build b/src/ipa/ipu3/algorithms/meson.build
new file mode 100644
index 00000000..b70a551c
--- /dev/null
+++ b/src/ipa/ipu3/algorithms/meson.build
@@ -0,0 +1,9 @@
+# SPDX-License-Identifier: CC0-1.0
+
+ipu3_ipa_algorithms = files([
+ 'af.cpp',
+ 'agc.cpp',
+ 'awb.cpp',
+ 'blc.cpp',
+ 'tone_mapping.cpp',
+])
diff --git a/src/ipa/ipu3/algorithms/tone_mapping.cpp b/src/ipa/ipu3/algorithms/tone_mapping.cpp
new file mode 100644
index 00000000..160338c1
--- /dev/null
+++ b/src/ipa/ipu3/algorithms/tone_mapping.cpp
@@ -0,0 +1,120 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google inc.
+ *
+ * IPU3 ToneMapping and Gamma control
+ */
+
+#include "tone_mapping.h"
+
+#include <cmath>
+#include <string.h>
+
+/**
+ * \file tone_mapping.h
+ */
+
+namespace libcamera {
+
+namespace ipa::ipu3::algorithms {
+
+/**
+ * \class ToneMapping
+ * \brief A class to handle tone mapping based on gamma
+ *
+ * This algorithm improves the image dynamic using a look-up table which is
+ * generated based on a gamma parameter.
+ */
+
+ToneMapping::ToneMapping()
+ : gamma_(1.0)
+{
+}
+
+/**
+ * \brief Configure the tone mapping given a configInfo
+ * \param[in] context The shared IPA context
+ * \param[in] configInfo The IPA configuration data
+ *
+ * \return 0
+ */
+int ToneMapping::configure(IPAContext &context,
+ [[maybe_unused]] const IPAConfigInfo &configInfo)
+{
+ /* Initialise tone mapping gamma value. */
+ context.activeState.toneMapping.gamma = 0.0;
+
+ return 0;
+}
+
+/**
+ * \brief Fill in the parameter structure, and enable gamma control
+ * \param[in] context The shared IPA context
+ * \param[in] frame The frame context sequence number
+ * \param[in] frameContext The FrameContext for this frame
+ * \param[out] params The IPU3 parameters
+ *
+ * Populate the IPU3 parameter structure with our tone mapping look up table and
+ * enable the gamma control module in the processing blocks.
+ */
+void ToneMapping::prepare([[maybe_unused]] IPAContext &context,
+ [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
+ ipu3_uapi_params *params)
+{
+ /* Copy the calculated LUT into the parameters buffer. */
+ memcpy(params->acc_param.gamma.gc_lut.lut,
+ context.activeState.toneMapping.gammaCorrection.lut,
+ IPU3_UAPI_GAMMA_CORR_LUT_ENTRIES *
+ sizeof(params->acc_param.gamma.gc_lut.lut[0]));
+
+ /* Enable the custom gamma table. */
+ params->use.acc_gamma = 1;
+ params->acc_param.gamma.gc_ctrl.enable = 1;
+}
+
+/**
+ * \brief Calculate the tone mapping look up table
+ * \param[in] context The shared IPA context
+ * \param[in] frame The current frame sequence number
+ * \param[in] frameContext The current frame context
+ * \param[in] stats The IPU3 statistics and ISP results
+ * \param[out] metadata Metadata for the frame, to be filled by the algorithm
+ *
+ * The tone mapping look up table is generated as an inverse power curve from
+ * our gamma setting.
+ */
+void ToneMapping::process(IPAContext &context, [[maybe_unused]] const uint32_t frame,
+ [[maybe_unused]] IPAFrameContext &frameContext,
+ [[maybe_unused]] const ipu3_uapi_stats_3a *stats,
+ [[maybe_unused]] ControlList &metadata)
+{
+ /*
+ * Hardcode gamma to 1.1 as a default for now.
+ *
+ * \todo Expose gamma control setting through the libcamera control API
+ */
+ gamma_ = 1.1;
+
+ if (context.activeState.toneMapping.gamma == gamma_)
+ return;
+
+ struct ipu3_uapi_gamma_corr_lut &lut =
+ context.activeState.toneMapping.gammaCorrection;
+
+ for (uint32_t i = 0; i < std::size(lut.lut); i++) {
+ double j = static_cast<double>(i) / (std::size(lut.lut) - 1);
+ double gamma = std::pow(j, 1.0 / gamma_);
+
+ /* The output value is expressed on 13 bits. */
+ lut.lut[i] = gamma * 8191;
+ }
+
+ context.activeState.toneMapping.gamma = gamma_;
+}
+
+REGISTER_IPA_ALGORITHM(ToneMapping, "ToneMapping")
+
+} /* namespace ipa::ipu3::algorithms */
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/algorithms/tone_mapping.h b/src/ipa/ipu3/algorithms/tone_mapping.h
new file mode 100644
index 00000000..b2b38010
--- /dev/null
+++ b/src/ipa/ipu3/algorithms/tone_mapping.h
@@ -0,0 +1,35 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google inc.
+ *
+ * IPU3 ToneMapping and Gamma control
+ */
+
+#pragma once
+
+#include "algorithm.h"
+
+namespace libcamera {
+
+namespace ipa::ipu3::algorithms {
+
+class ToneMapping : public Algorithm
+{
+public:
+ ToneMapping();
+
+ int configure(IPAContext &context, const IPAConfigInfo &configInfo) override;
+ void prepare(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext, ipu3_uapi_params *params) override;
+ void process(IPAContext &context, const uint32_t frame,
+ IPAFrameContext &frameContext,
+ const ipu3_uapi_stats_3a *stats,
+ ControlList &metadata) override;
+
+private:
+ double gamma_;
+};
+
+} /* namespace ipa::ipu3::algorithms */
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/data/meson.build b/src/ipa/ipu3/data/meson.build
new file mode 100644
index 00000000..0f7cd5c6
--- /dev/null
+++ b/src/ipa/ipu3/data/meson.build
@@ -0,0 +1,9 @@
+# SPDX-License-Identifier: CC0-1.0
+
+conf_files = files([
+ 'uncalibrated.yaml',
+])
+
+install_data(conf_files,
+ install_dir : ipa_data_dir / 'ipu3',
+ install_tag : 'runtime')
diff --git a/src/ipa/ipu3/data/uncalibrated.yaml b/src/ipa/ipu3/data/uncalibrated.yaml
new file mode 100644
index 00000000..794ab3ed
--- /dev/null
+++ b/src/ipa/ipu3/data/uncalibrated.yaml
@@ -0,0 +1,11 @@
+# SPDX-License-Identifier: CC0-1.0
+%YAML 1.1
+---
+version: 1
+algorithms:
+ - Af:
+ - Agc:
+ - Awb:
+ - BlackLevelCorrection:
+ - ToneMapping:
+...
diff --git a/src/ipa/ipu3/ipa_context.cpp b/src/ipa/ipu3/ipa_context.cpp
new file mode 100644
index 00000000..3b22f791
--- /dev/null
+++ b/src/ipa/ipu3/ipa_context.cpp
@@ -0,0 +1,190 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * IPU3 IPA Context
+ */
+
+#include "ipa_context.h"
+
+/**
+ * \file ipa_context.h
+ * \brief Context and state information shared between the algorithms
+ */
+
+namespace libcamera::ipa::ipu3 {
+
+/**
+ * \struct IPASessionConfiguration
+ * \brief Session configuration for the IPA module
+ *
+ * The session configuration contains all IPA configuration parameters that
+ * remain constant during the capture session, from IPA module start to stop.
+ * It is typically set during the configure() operation of the IPA module, but
+ * may also be updated in the start() operation.
+ */
+
+/**
+ * \struct IPAActiveState
+ * \brief The active state of the IPA algorithms
+ *
+ * The IPA is fed with the statistics generated from the latest frame captured
+ * by the hardware. The statistics are then processed by the IPA algorithms to
+ * compute ISP parameters required for the next frame capture. The current state
+ * of the algorithms is reflected through the IPAActiveState to store the values
+ * most recently computed by the IPA algorithms.
+ */
+
+/**
+ * \struct IPAContext
+ * \brief Global IPA context data shared between all algorithms
+ *
+ * \fn IPAContext::IPAContext
+ * \brief Initialize the instance with the given number of frame contexts
+ * \param[in] frameContextSize Size of the frame context ring buffer
+ *
+ * \var IPAContext::configuration
+ * \brief The IPA session configuration, immutable during the session
+ *
+ * \var IPAContext::frameContexts
+ * \brief Ring buffer of the IPAFrameContext(s)
+ *
+ * \var IPAContext::activeState
+ * \brief The current state of IPA algorithms
+ *
+ * \var IPAContext::ctrlMap
+ * \brief A ControlInfoMap::Map of controls populated by the algorithms
+ */
+
+/**
+ * \var IPASessionConfiguration::grid
+ * \brief Grid configuration of the IPA
+ *
+ * \var IPASessionConfiguration::grid.bdsGrid
+ * \brief Bayer Down Scaler grid plane config used by the kernel
+ *
+ * \var IPASessionConfiguration::grid.bdsOutputSize
+ * \brief BDS output size configured by the pipeline handler
+ *
+ * \var IPASessionConfiguration::grid.stride
+ * \brief Number of cells on one line including the ImgU padding
+ */
+
+/**
+ * \var IPASessionConfiguration::af
+ * \brief AF grid configuration of the IPA
+ *
+ * \var IPASessionConfiguration::af.afGrid
+ * \brief AF scene grid configuration
+ */
+
+/**
+ * \var IPAActiveState::af
+ * \brief Context for the Automatic Focus algorithm
+ *
+ * \var IPAActiveState::af.focus
+ * \brief Current position of the lens
+ *
+ * \var IPAActiveState::af.maxVariance
+ * \brief The maximum variance of the current image
+ *
+ * \var IPAActiveState::af.stable
+ * \brief It is set to true, if the best focus is found
+ */
+
+/**
+ * \var IPASessionConfiguration::agc
+ * \brief AGC parameters configuration of the IPA
+ *
+ * \var IPASessionConfiguration::agc.minExposureTime
+ * \brief Minimum exposure time supported with the configured sensor
+ *
+ * \var IPASessionConfiguration::agc.maxExposureTime
+ * \brief Maximum exposure time supported with the configured sensor
+ *
+ * \var IPASessionConfiguration::agc.minAnalogueGain
+ * \brief Minimum analogue gain supported with the configured sensor
+ *
+ * \var IPASessionConfiguration::agc.maxAnalogueGain
+ * \brief Maximum analogue gain supported with the configured sensor
+ */
+
+/**
+ * \var IPASessionConfiguration::sensor
+ * \brief Sensor-specific configuration of the IPA
+ *
+ * \var IPASessionConfiguration::sensor.lineDuration
+ * \brief Line duration in microseconds
+ *
+ * \var IPASessionConfiguration::sensor.defVBlank
+ * \brief The default vblank value of the sensor
+ *
+ * \var IPASessionConfiguration::sensor.size
+ * \brief Sensor output resolution
+ */
+
+/**
+ * \var IPAActiveState::agc
+ * \brief Context for the Automatic Gain Control algorithm
+ *
+ * The exposure and gain determined are expected to be applied to the sensor
+ * at the earliest opportunity.
+ *
+ * \var IPAActiveState::agc.exposure
+ * \brief Exposure time expressed as a number of lines
+ *
+ * \var IPAActiveState::agc.gain
+ * \brief Analogue gain multiplier
+ *
+ * The gain should be adapted to the sensor specific gain code before applying.
+ */
+
+/**
+ * \var IPAActiveState::awb
+ * \brief Context for the Automatic White Balance algorithm
+ *
+ * \var IPAActiveState::awb.gains
+ * \brief White balance gains
+ *
+ * \var IPAActiveState::awb.gains.red
+ * \brief White balance gain for R channel
+ *
+ * \var IPAActiveState::awb.gains.green
+ * \brief White balance gain for G channel
+ *
+ * \var IPAActiveState::awb.gains.blue
+ * \brief White balance gain for B channel
+ *
+ * \var IPAActiveState::awb.temperatureK
+ * \brief Estimated color temperature
+ */
+
+/**
+ * \var IPAActiveState::toneMapping
+ * \brief Context for ToneMapping and Gamma control
+ *
+ * \var IPAActiveState::toneMapping.gamma
+ * \brief Gamma value for the LUT
+ *
+ * \var IPAActiveState::toneMapping.gammaCorrection
+ * \brief Per-pixel tone mapping implemented as a LUT
+ *
+ * The LUT structure is defined by the IPU3 kernel interface. See
+ * <linux/intel-ipu3.h> struct ipu3_uapi_gamma_corr_lut for further details.
+ */
+
+/**
+ * \struct IPAFrameContext
+ * \brief IPU3-specific FrameContext
+ *
+ * \var IPAFrameContext::sensor
+ * \brief Effective sensor values that were applied for the frame
+ *
+ * \var IPAFrameContext::sensor.exposure
+ * \brief Exposure time expressed as a number of lines
+ *
+ * \var IPAFrameContext::sensor.gain
+ * \brief Analogue gain multiplier
+ */
+
+} /* namespace libcamera::ipa::ipu3 */
diff --git a/src/ipa/ipu3/ipa_context.h b/src/ipa/ipu3/ipa_context.h
new file mode 100644
index 00000000..97fcf06c
--- /dev/null
+++ b/src/ipa/ipu3/ipa_context.h
@@ -0,0 +1,102 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * IPU3 IPA Context
+ *
+ */
+
+#pragma once
+
+#include <linux/intel-ipu3.h>
+
+#include <libcamera/base/utils.h>
+
+#include <libcamera/controls.h>
+#include <libcamera/geometry.h>
+
+#include <libipa/fc_queue.h>
+
+namespace libcamera {
+
+namespace ipa::ipu3 {
+
+struct IPASessionConfiguration {
+ struct {
+ ipu3_uapi_grid_config bdsGrid;
+ Size bdsOutputSize;
+ uint32_t stride;
+ } grid;
+
+ struct {
+ ipu3_uapi_grid_config afGrid;
+ } af;
+
+ struct {
+ utils::Duration minExposureTime;
+ utils::Duration maxExposureTime;
+ double minAnalogueGain;
+ double maxAnalogueGain;
+ } agc;
+
+ struct {
+ int32_t defVBlank;
+ utils::Duration lineDuration;
+ Size size;
+ } sensor;
+};
+
+struct IPAActiveState {
+ struct {
+ uint32_t focus;
+ double maxVariance;
+ bool stable;
+ } af;
+
+ struct {
+ uint32_t exposure;
+ double gain;
+ uint32_t constraintMode;
+ uint32_t exposureMode;
+ } agc;
+
+ struct {
+ struct {
+ double red;
+ double green;
+ double blue;
+ } gains;
+
+ double temperatureK;
+ } awb;
+
+ struct {
+ double gamma;
+ struct ipu3_uapi_gamma_corr_lut gammaCorrection;
+ } toneMapping;
+};
+
+struct IPAFrameContext : public FrameContext {
+ struct {
+ uint32_t exposure;
+ double gain;
+ } sensor;
+};
+
+struct IPAContext {
+ IPAContext(unsigned int frameContextSize)
+ : frameContexts(frameContextSize)
+ {
+ }
+
+ IPASessionConfiguration configuration;
+ IPAActiveState activeState;
+
+ FCQueue<IPAFrameContext> frameContexts;
+
+ ControlInfoMap::Map ctrlMap;
+};
+
+} /* namespace ipa::ipu3 */
+
+} /* namespace libcamera*/
diff --git a/src/ipa/ipu3/ipu3-ipa-design-guide.rst b/src/ipa/ipu3/ipu3-ipa-design-guide.rst
new file mode 100644
index 00000000..85d735c6
--- /dev/null
+++ b/src/ipa/ipu3/ipu3-ipa-design-guide.rst
@@ -0,0 +1,162 @@
+.. SPDX-License-Identifier: CC-BY-SA-4.0
+
+IPU3 IPA Architecture Design and Overview
+=========================================
+
+The IPU3 IPA is built as a modular and extensible framework with an
+upper layer to manage the interactions with the pipeline handler, and
+the image processing algorithms split to compartmentalise the processing
+required for each processing block, making use of the fixed-function
+accelerators provided by the ImgU ISP.
+
+The core IPU3 class is responsible for initialisation and construction
+of the algorithm components, processing controls set by the requests
+from applications, and managing events from the pipeline handler.
+
+::
+
+ ┌───────────────────────────────────────────┐
+ │ IPU3 Pipeline Handler │
+ │ ┌────────┐ ┌────────┐ ┌────────┐ │
+ │ │ │ │ │ │ │ │
+ │ │ Sensor ├───►│ CIO2 ├───►│ ImgU ├──►
+ │ │ │ │ │ │ │ │
+ │ └────────┘ └────────┘ └─▲────┬─┘ │ P: Parameter Buffer
+ │ │P │ │ S: Statistics Buffer
+ │ │ │S │
+ └─┬───┬───┬──────┬────┬────┬────┬─┴────▼─┬──┘ 1: init()
+ │ │ │ │ ▲ │ ▲ │ ▲ │ ▲ │ 2: configure()
+ │1 │2 │3 │4│ │4│ │4│ │4│ │5 3: mapBuffers(), start()
+ │ │ │ │ │ │ │ │ │ │ │ │ 4: (▼) queueRequest(), computeParams(), processStats()
+ ▼ ▼ ▼ ▼ │ ▼ │ ▼ │ ▼ │ ▼ (▲) setSensorControls, paramsComputed, metadataReady Signals
+ ┌──────────────────┴────┴────┴────┴─────────┐ 5: stop(), unmapBuffers()
+ │ IPU3 IPA │
+ │ ┌───────────────────────┐ │
+ │ ┌───────────┐ │ Algorithms │ │
+ │ │IPAContext │ │ ┌─────────┐ │ │
+ │ │ ┌───────┐ │ │ │ ... │ │ │
+ │ │ │ │ │ │ ┌─┴───────┐ │ │ │
+ │ │ │ SC │ │ │ │ Tonemap ├─┘ │ │
+ │ │ │ │ ◄───► ┌─┴───────┐ │ │ │
+ │ │ ├───────┤ │ │ │ AWB ├─┘ │ │
+ │ │ │ │ │ │ ┌─┴───────┐ │ │ │
+ │ │ │ FC │ │ │ │ AGC ├─┘ │ │
+ │ │ │ │ │ │ │ │ │ │
+ │ │ └───────┘ │ │ └─────────┘ │ │
+ │ └───────────┘ └───────────────────────┘ │
+ └───────────────────────────────────────────┘
+ SC: IPASessionConfiguration
+ FC: IPAFrameContext(s)
+
+The IPA instance is constructed and initialised at the point a Camera is
+created by the IPU3 pipeline handler. The initialisation call provides
+details about which camera sensor is being used, and the controls that
+it has available, along with their default values and ranges.
+
+Buffers
+~~~~~~~
+
+The IPA will have Parameter and Statistics buffers shared with it from
+the IPU3 Pipeline handler. These buffers will be passed to the IPA using
+the ``mapBuffers()`` call before the ``start()`` operation occurs.
+
+The IPA will map the buffers into CPU-accessible memory, associated with
+a buffer ID, and further events for sending or receiving parameter and
+statistics buffers will reference the ID to avoid expensive memory
+mapping operations, or the passing of file handles during streaming.
+
+After the ``stop()`` operation occurs, these buffers will be unmapped
+when requested by the pipeline handler using the ``unmapBuffers()`` call
+and no further access to the buffers is permitted.
+
+Context
+~~~~~~~
+
+Algorithm calls will always have the ``IPAContext`` available to them.
+This context comprises of two parts:
+
+- IPA Session Configuration
+- IPA Frame Context
+
+The session configuration structure ``IPASessionConfiguration``
+represents constant parameters determined before streaming commenced
+during ``configure()``.
+
+The IPA Frame Context provides the storage for algorithms for a single
+frame operation.
+
+The ``IPAFrameContext`` structure may be extended to an array, list, or
+queue to store historical state for each frame, allowing algorithms to
+obtain and reference results of calculations which are deeply pipelined.
+This may only be done if an algorithm needs to know the context that was
+applied at the frame the statistics were produced for, rather than the
+previous or current frame.
+
+Presently there is a single ``IPAFrameContext`` without historical data,
+and the context is maintained and updated through successive processing
+operations.
+
+Operating
+~~~~~~~~~
+
+There are three main interactions with the algorithms for the IPU3 IPA
+to operate when running:
+
+- configure()
+- queueRequest()
+- computeParams()
+- processStats()
+
+The configuration phase allows the pipeline-handler to inform the IPA of
+the current stream configurations, which is then passed into each
+algorithm to provide an opportunity to identify and track state of the
+hardware, such as image size or ImgU pipeline configurations.
+
+Pre-frame preparation
+~~~~~~~~~~~~~~~~~~~~~
+
+When configured, the IPA is notified by the pipeline handler of the
+Camera ``start()`` event, after which incoming requests will be queued
+for processing, requiring a parameter buffer (``ipu3_uapi_params``) to
+be populated for the ImgU. This is given to the IPA through
+``computeParams()``, and then passed directly to each algorithm
+through the ``prepare()`` call allowing the ISP configuration to be
+updated for the needs of each component that the algorithm is
+responsible for.
+
+The algorithm should set the use flag (``ipu3_uapi_flags``) for any
+structure that it modifies, and it should take care to ensure that any
+structure set by a use flag is fully initialised to suitable values.
+
+The parameter buffer is returned to the pipeline handler through the
+``paramsComputed`` signal, and from there queued to the ImgU along
+with a raw frame captured with the CIO2.
+
+Post-frame completion
+~~~~~~~~~~~~~~~~~~~~~
+
+When the capture of an image is completed, and successfully processed
+through the ImgU, the generated statistics buffer
+(``ipu3_uapi_stats_3a``) is given to the IPA through
+``processStats()``. This provides the IPA with an opportunity to
+examine the results of the ISP and run the calculations required by each
+algorithm on the new data. The algorithms may require context from the
+operations of other algorithms, for example, the AWB might choose to use
+a scene brightness determined by the AGC. It is important that the
+algorithms are ordered to ensure that required results are determined
+before they are needed.
+
+The ordering of the algorithm processing is determined by their
+placement in the ``IPU3::algorithms_`` ordered list.
+
+Finally, the IPA metadata for the completed frame is returned back via
+the ``metadataReady`` signal.
+
+Sensor Controls
+~~~~~~~~~~~~~~~
+
+The AutoExposure and AutoGain (AGC) algorithm differs slightly from the
+others as it requires operating directly on the sensor, as opposed to
+through the ImgU ISP. To support this, there is a ``setSensorControls``
+signal to allow the IPA to request controls to be set on the camera
+sensor through the pipeline handler.
diff --git a/src/ipa/ipu3/ipu3.cpp b/src/ipa/ipu3/ipu3.cpp
new file mode 100644
index 00000000..1cae08bf
--- /dev/null
+++ b/src/ipa/ipu3/ipu3.cpp
@@ -0,0 +1,692 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * IPU3 Image Processing Algorithms
+ */
+
+#include <algorithm>
+#include <array>
+#include <cmath>
+#include <limits>
+#include <map>
+#include <memory>
+#include <stdint.h>
+#include <utility>
+#include <vector>
+
+#include <linux/intel-ipu3.h>
+#include <linux/v4l2-controls.h>
+
+#include <libcamera/base/file.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
+
+#include <libcamera/control_ids.h>
+#include <libcamera/controls.h>
+#include <libcamera/framebuffer.h>
+#include <libcamera/geometry.h>
+#include <libcamera/request.h>
+
+#include <libcamera/ipa/ipa_interface.h>
+#include <libcamera/ipa/ipa_module_info.h>
+#include <libcamera/ipa/ipu3_ipa_interface.h>
+
+#include "libcamera/internal/mapped_framebuffer.h"
+#include "libcamera/internal/yaml_parser.h"
+
+#include "libipa/camera_sensor_helper.h"
+
+#include "ipa_context.h"
+#include "module.h"
+
+/* Minimum grid width, expressed as a number of cells */
+static constexpr uint32_t kMinGridWidth = 16;
+/* Maximum grid width, expressed as a number of cells */
+static constexpr uint32_t kMaxGridWidth = 80;
+/* Minimum grid height, expressed as a number of cells */
+static constexpr uint32_t kMinGridHeight = 16;
+/* Maximum grid height, expressed as a number of cells */
+static constexpr uint32_t kMaxGridHeight = 60;
+/* log2 of the minimum grid cell width and height, in pixels */
+static constexpr uint32_t kMinCellSizeLog2 = 3;
+/* log2 of the maximum grid cell width and height, in pixels */
+static constexpr uint32_t kMaxCellSizeLog2 = 6;
+
+/* Maximum number of frame contexts to be held */
+static constexpr uint32_t kMaxFrameContexts = 16;
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(IPAIPU3)
+
+using namespace std::literals::chrono_literals;
+
+namespace ipa::ipu3 {
+
+/**
+ * \brief The IPU3 IPA implementation
+ *
+ * The IPU3 Pipeline defines an IPU3-specific interface for communication
+ * between the PipelineHandler and the IPA module.
+ *
+ * We extend the IPAIPU3Interface to implement our algorithms and handle
+ * calls from the IPU3 PipelineHandler to satisfy requests from the
+ * application.
+ *
+ * At initialisation time, a CameraSensorHelper is instantiated to support
+ * camera-specific calculations, while the default controls are computed, and
+ * the algorithms are instantiated from the tuning data file.
+ *
+ * The IPU3 ImgU operates with a grid layout to divide the overall frame into
+ * rectangular cells of pixels. When the IPA is configured, we determine the
+ * best grid for the statistics based on the pipeline handler Bayer Down Scaler
+ * output size.
+ *
+ * Two main events are then handled to operate the IPU3 ImgU by populating its
+ * parameter buffer, and adapting the settings of the sensor attached to the
+ * IPU3 CIO2 through sensor-specific V4L2 controls.
+ *
+ * In computeParams(), we populate the ImgU parameter buffer with
+ * settings to configure the device in preparation for handling the frame
+ * queued in the Request.
+ *
+ * When the frame has completed processing, the ImgU will generate a statistics
+ * buffer which is given to the IPA with processStats(). In this we run the
+ * algorithms to parse the statistics and cache any results for the next
+ * computeParams() call.
+ *
+ * The individual algorithms are split into modular components that are called
+ * iteratively to allow them to process statistics from the ImgU in the order
+ * defined in the tuning data file.
+ *
+ * The current implementation supports five core algorithms:
+ *
+ * - Auto focus (AF)
+ * - Automatic gain and exposure control (AGC)
+ * - Automatic white balance (AWB)
+ * - Black level correction (BLC)
+ * - Tone mapping (Gamma)
+ *
+ * AWB is implemented using a Greyworld algorithm, and calculates the red and
+ * blue gains to apply to generate a neutral grey frame overall.
+ *
+ * AGC is handled by calculating a histogram of the green channel to estimate an
+ * analogue gain and exposure time which will provide a well exposed frame. A
+ * low-pass IIR filter is used to smooth the changes to the sensor to reduce
+ * perceivable steps.
+ *
+ * The tone mapping algorithm provides a gamma correction table to improve the
+ * contrast of the scene.
+ *
+ * The black level compensation algorithm subtracts a hardcoded black level from
+ * all pixels.
+ *
+ * The IPU3 ImgU has further processing blocks to support image quality
+ * improvements through bayer and temporal noise reductions, however those are
+ * not supported in the current implementation, and will use default settings as
+ * provided by the kernel driver.
+ *
+ * Demosaicing is operating with the default parameters and could be further
+ * optimised to provide improved sharpening coefficients, checker artifact
+ * removal, and false color correction.
+ *
+ * Additional image enhancements can be made by providing lens and
+ * sensor-specific tuning to adapt for Black Level compensation (BLC), Lens
+ * shading correction (SHD) and Color correction (CCM).
+ */
+class IPAIPU3 : public IPAIPU3Interface, public Module
+{
+public:
+ IPAIPU3();
+
+ int init(const IPASettings &settings,
+ const IPACameraSensorInfo &sensorInfo,
+ const ControlInfoMap &sensorControls,
+ ControlInfoMap *ipaControls) override;
+
+ int start() override;
+ void stop() override;
+
+ int configure(const IPAConfigInfo &configInfo,
+ ControlInfoMap *ipaControls) override;
+
+ void mapBuffers(const std::vector<IPABuffer> &buffers) override;
+ void unmapBuffers(const std::vector<unsigned int> &ids) override;
+
+ void queueRequest(const uint32_t frame, const ControlList &controls) override;
+ void computeParams(const uint32_t frame, const uint32_t bufferId) override;
+ void processStats(const uint32_t frame, const int64_t frameTimestamp,
+ const uint32_t bufferId,
+ const ControlList &sensorControls) override;
+
+protected:
+ std::string logPrefix() const override;
+
+private:
+ void updateControls(const IPACameraSensorInfo &sensorInfo,
+ const ControlInfoMap &sensorControls,
+ ControlInfoMap *ipaControls);
+ void updateSessionConfiguration(const ControlInfoMap &sensorControls);
+
+ void setControls(unsigned int frame);
+ void calculateBdsGrid(const Size &bdsOutputSize);
+
+ std::map<unsigned int, MappedFrameBuffer> buffers_;
+
+ ControlInfoMap sensorCtrls_;
+ ControlInfoMap lensCtrls_;
+
+ IPACameraSensorInfo sensorInfo_;
+
+ /* Interface to the Camera Helper */
+ std::unique_ptr<CameraSensorHelper> camHelper_;
+
+ /* Local parameter storage */
+ struct IPAContext context_;
+};
+
+IPAIPU3::IPAIPU3()
+ : context_(kMaxFrameContexts)
+{
+}
+
+std::string IPAIPU3::logPrefix() const
+{
+ return "ipu3";
+}
+
+/**
+ * \brief Compute IPASessionConfiguration using the sensor information and the
+ * sensor V4L2 controls
+ */
+void IPAIPU3::updateSessionConfiguration(const ControlInfoMap &sensorControls)
+{
+ const ControlInfo vBlank = sensorControls.find(V4L2_CID_VBLANK)->second;
+ context_.configuration.sensor.defVBlank = vBlank.def().get<int32_t>();
+
+ const ControlInfo &v4l2Exposure = sensorControls.find(V4L2_CID_EXPOSURE)->second;
+ int32_t minExposure = v4l2Exposure.min().get<int32_t>();
+ int32_t maxExposure = v4l2Exposure.max().get<int32_t>();
+
+ const ControlInfo &v4l2Gain = sensorControls.find(V4L2_CID_ANALOGUE_GAIN)->second;
+ int32_t minGain = v4l2Gain.min().get<int32_t>();
+ int32_t maxGain = v4l2Gain.max().get<int32_t>();
+
+ /*
+ * When the AGC computes the new exposure values for a frame, it needs
+ * to know the limits for exposure time and analogue gain.
+ * As it depends on the sensor, update it with the controls.
+ *
+ * \todo take VBLANK into account for maximum exposure time
+ */
+ context_.configuration.agc.minExposureTime = minExposure * context_.configuration.sensor.lineDuration;
+ context_.configuration.agc.maxExposureTime = maxExposure * context_.configuration.sensor.lineDuration;
+ context_.configuration.agc.minAnalogueGain = camHelper_->gain(minGain);
+ context_.configuration.agc.maxAnalogueGain = camHelper_->gain(maxGain);
+}
+
+/**
+ * \brief Compute camera controls using the sensor information and the sensor
+ * V4L2 controls
+ *
+ * Some of the camera controls are computed by the pipeline handler, some others
+ * by the IPA module which is in charge of handling, for example, the exposure
+ * time and the frame duration.
+ *
+ * This function computes:
+ * - controls::ExposureTime
+ * - controls::FrameDurationLimits
+ */
+void IPAIPU3::updateControls(const IPACameraSensorInfo &sensorInfo,
+ const ControlInfoMap &sensorControls,
+ ControlInfoMap *ipaControls)
+{
+ ControlInfoMap::Map controls{};
+ double lineDuration = context_.configuration.sensor.lineDuration.get<std::micro>();
+
+ /*
+ * Compute exposure time limits by using line length and pixel rate
+ * converted to microseconds. Use the V4L2_CID_EXPOSURE control to get
+ * exposure min, max and default and convert it from lines to
+ * microseconds.
+ */
+ const ControlInfo &v4l2Exposure = sensorControls.find(V4L2_CID_EXPOSURE)->second;
+ int32_t minExposure = v4l2Exposure.min().get<int32_t>() * lineDuration;
+ int32_t maxExposure = v4l2Exposure.max().get<int32_t>() * lineDuration;
+ int32_t defExposure = v4l2Exposure.def().get<int32_t>() * lineDuration;
+ controls[&controls::ExposureTime] = ControlInfo(minExposure, maxExposure,
+ defExposure);
+
+ /*
+ * Compute the frame duration limits.
+ *
+ * The frame length is computed assuming a fixed line length combined
+ * with the vertical frame sizes.
+ */
+ const ControlInfo &v4l2HBlank = sensorControls.find(V4L2_CID_HBLANK)->second;
+ uint32_t hblank = v4l2HBlank.def().get<int32_t>();
+ uint32_t lineLength = sensorInfo.outputSize.width + hblank;
+
+ const ControlInfo &v4l2VBlank = sensorControls.find(V4L2_CID_VBLANK)->second;
+ std::array<uint32_t, 3> frameHeights{
+ v4l2VBlank.min().get<int32_t>() + sensorInfo.outputSize.height,
+ v4l2VBlank.max().get<int32_t>() + sensorInfo.outputSize.height,
+ v4l2VBlank.def().get<int32_t>() + sensorInfo.outputSize.height,
+ };
+
+ std::array<int64_t, 3> frameDurations;
+ for (unsigned int i = 0; i < frameHeights.size(); ++i) {
+ uint64_t frameSize = lineLength * frameHeights[i];
+ frameDurations[i] = frameSize / (sensorInfo.pixelRate / 1000000U);
+ }
+
+ controls[&controls::FrameDurationLimits] = ControlInfo(frameDurations[0],
+ frameDurations[1],
+ frameDurations[2]);
+
+ controls.merge(context_.ctrlMap);
+ *ipaControls = ControlInfoMap(std::move(controls), controls::controls);
+}
+
+/**
+ * \brief Initialize the IPA module and its controls
+ *
+ * This function receives the camera sensor information from the pipeline
+ * handler, computes the limits of the controls it handles and returns
+ * them in the \a ipaControls output parameter.
+ */
+int IPAIPU3::init(const IPASettings &settings,
+ const IPACameraSensorInfo &sensorInfo,
+ const ControlInfoMap &sensorControls,
+ ControlInfoMap *ipaControls)
+{
+ camHelper_ = CameraSensorHelperFactoryBase::create(settings.sensorModel);
+ if (camHelper_ == nullptr) {
+ LOG(IPAIPU3, Error)
+ << "Failed to create camera sensor helper for "
+ << settings.sensorModel;
+ return -ENODEV;
+ }
+
+ /* Clean context */
+ context_.configuration = {};
+ context_.configuration.sensor.lineDuration =
+ sensorInfo.minLineLength * 1.0s / sensorInfo.pixelRate;
+
+ /* Load the tuning data file. */
+ File file(settings.configurationFile);
+ if (!file.open(File::OpenModeFlag::ReadOnly)) {
+ int ret = file.error();
+ LOG(IPAIPU3, Error)
+ << "Failed to open configuration file "
+ << settings.configurationFile << ": " << strerror(-ret);
+ return ret;
+ }
+
+ std::unique_ptr<libcamera::YamlObject> data = YamlParser::parse(file);
+ if (!data)
+ return -EINVAL;
+
+ unsigned int version = (*data)["version"].get<uint32_t>(0);
+ if (version != 1) {
+ LOG(IPAIPU3, Error)
+ << "Invalid tuning file version " << version;
+ return -EINVAL;
+ }
+
+ if (!data->contains("algorithms")) {
+ LOG(IPAIPU3, Error)
+ << "Tuning file doesn't contain any algorithm";
+ return -EINVAL;
+ }
+
+ int ret = createAlgorithms(context_, (*data)["algorithms"]);
+ if (ret)
+ return ret;
+
+ /* Initialize controls. */
+ updateControls(sensorInfo, sensorControls, ipaControls);
+
+ return 0;
+}
+
+/**
+ * \brief Perform any processing required before the first frame
+ */
+int IPAIPU3::start()
+{
+ /*
+ * Set the sensors V4L2 controls before the first frame to ensure that
+ * we have an expected and known configuration from the start.
+ */
+ setControls(0);
+
+ return 0;
+}
+
+/**
+ * \brief Ensure that all processing has completed
+ */
+void IPAIPU3::stop()
+{
+ context_.frameContexts.clear();
+}
+
+/**
+ * \brief Calculate a grid for the AWB statistics
+ *
+ * This function calculates a grid for the AWB algorithm in the IPU3 firmware.
+ * Its input is the BDS output size calculated in the ImgU.
+ * It is limited for now to the simplest method: find the lesser error
+ * with the width/height and respective log2 width/height of the cells.
+ *
+ * \todo The frame is divided into cells which can be 8x8 => 64x64.
+ * As a smaller cell improves the algorithm precision, adapting the
+ * x_start and y_start parameters of the grid would provoke a loss of
+ * some pixels but would also result in more accurate algorithms.
+ */
+void IPAIPU3::calculateBdsGrid(const Size &bdsOutputSize)
+{
+ Size best;
+ Size bestLog2;
+
+ /* Set the BDS output size in the IPAConfiguration structure */
+ context_.configuration.grid.bdsOutputSize = bdsOutputSize;
+
+ uint32_t minError = std::numeric_limits<uint32_t>::max();
+ for (uint32_t shift = kMinCellSizeLog2; shift <= kMaxCellSizeLog2; ++shift) {
+ uint32_t width = std::clamp(bdsOutputSize.width >> shift,
+ kMinGridWidth,
+ kMaxGridWidth);
+
+ width = width << shift;
+ uint32_t error = utils::abs_diff(width, bdsOutputSize.width);
+ if (error >= minError)
+ continue;
+
+ minError = error;
+ best.width = width;
+ bestLog2.width = shift;
+ }
+
+ minError = std::numeric_limits<uint32_t>::max();
+ for (uint32_t shift = kMinCellSizeLog2; shift <= kMaxCellSizeLog2; ++shift) {
+ uint32_t height = std::clamp(bdsOutputSize.height >> shift,
+ kMinGridHeight,
+ kMaxGridHeight);
+
+ height = height << shift;
+ uint32_t error = utils::abs_diff(height, bdsOutputSize.height);
+ if (error >= minError)
+ continue;
+
+ minError = error;
+ best.height = height;
+ bestLog2.height = shift;
+ }
+
+ struct ipu3_uapi_grid_config &bdsGrid = context_.configuration.grid.bdsGrid;
+ bdsGrid.x_start = 0;
+ bdsGrid.y_start = 0;
+ bdsGrid.width = best.width >> bestLog2.width;
+ bdsGrid.block_width_log2 = bestLog2.width;
+ bdsGrid.height = best.height >> bestLog2.height;
+ bdsGrid.block_height_log2 = bestLog2.height;
+
+ /* The ImgU pads the lines to a multiple of 4 cells. */
+ context_.configuration.grid.stride = utils::alignUp(bdsGrid.width, 4);
+
+ LOG(IPAIPU3, Debug) << "Best grid found is: ("
+ << (int)bdsGrid.width << " << " << (int)bdsGrid.block_width_log2 << ") x ("
+ << (int)bdsGrid.height << " << " << (int)bdsGrid.block_height_log2 << ")";
+}
+
+/**
+ * \brief Configure the IPU3 IPA
+ * \param[in] configInfo The IPA configuration data, received from the pipeline
+ * handler
+ * \param[in] ipaControls The IPA controls to update
+ *
+ * Calculate the best grid for the statistics based on the pipeline handler BDS
+ * output, and parse the minimum and maximum exposure and analogue gain control
+ * values.
+ *
+ * \todo Document what the BDS is, ideally in a block diagram of the ImgU.
+ *
+ * All algorithm modules are called to allow them to prepare the
+ * \a IPASessionConfiguration structure for the \a IPAContext.
+ */
+int IPAIPU3::configure(const IPAConfigInfo &configInfo,
+ ControlInfoMap *ipaControls)
+{
+ if (configInfo.sensorControls.empty()) {
+ LOG(IPAIPU3, Error) << "No sensor controls provided";
+ return -ENODATA;
+ }
+
+ sensorInfo_ = configInfo.sensorInfo;
+
+ lensCtrls_ = configInfo.lensControls;
+
+ /* Clear the IPA context for the new streaming session. */
+ context_.activeState = {};
+ context_.configuration = {};
+ context_.frameContexts.clear();
+
+ /* Initialise the sensor configuration. */
+ context_.configuration.sensor.lineDuration =
+ sensorInfo_.minLineLength * 1.0s / sensorInfo_.pixelRate;
+ context_.configuration.sensor.size = sensorInfo_.outputSize;
+
+ /*
+ * Compute the sensor V4L2 controls to be used by the algorithms and
+ * to be set on the sensor.
+ */
+ sensorCtrls_ = configInfo.sensorControls;
+
+ calculateBdsGrid(configInfo.bdsOutputSize);
+
+ /* Update the camera controls using the new sensor settings. */
+ updateControls(sensorInfo_, sensorCtrls_, ipaControls);
+
+ /* Update the IPASessionConfiguration using the sensor settings. */
+ updateSessionConfiguration(sensorCtrls_);
+
+ for (auto const &algo : algorithms()) {
+ int ret = algo->configure(context_, configInfo);
+ if (ret)
+ return ret;
+ }
+
+ return 0;
+}
+
+/**
+ * \brief Map the parameters and stats buffers allocated in the pipeline handler
+ * \param[in] buffers The buffers to map
+ */
+void IPAIPU3::mapBuffers(const std::vector<IPABuffer> &buffers)
+{
+ for (const IPABuffer &buffer : buffers) {
+ const FrameBuffer fb(buffer.planes);
+ buffers_.emplace(buffer.id,
+ MappedFrameBuffer(&fb, MappedFrameBuffer::MapFlag::ReadWrite));
+ }
+}
+
+/**
+ * \brief Unmap the parameters and stats buffers
+ * \param[in] ids The IDs of the buffers to unmap
+ */
+void IPAIPU3::unmapBuffers(const std::vector<unsigned int> &ids)
+{
+ for (unsigned int id : ids) {
+ auto it = buffers_.find(id);
+ if (it == buffers_.end())
+ continue;
+
+ buffers_.erase(it);
+ }
+}
+
+/**
+ * \brief Fill and return a buffer with ISP processing parameters for a frame
+ * \param[in] frame The frame number
+ * \param[in] bufferId ID of the parameter buffer to fill
+ *
+ * Algorithms are expected to fill the IPU3 parameter buffer for the next
+ * frame given their most recent processing of the ImgU statistics.
+ */
+void IPAIPU3::computeParams(const uint32_t frame, const uint32_t bufferId)
+{
+ auto it = buffers_.find(bufferId);
+ if (it == buffers_.end()) {
+ LOG(IPAIPU3, Error) << "Could not find param buffer!";
+ return;
+ }
+
+ Span<uint8_t> mem = it->second.planes()[0];
+ ipu3_uapi_params *params =
+ reinterpret_cast<ipu3_uapi_params *>(mem.data());
+
+ /*
+ * The incoming params buffer may contain uninitialised data, or the
+ * parameters of previously queued frames. Clearing the entire buffer
+ * may be an expensive operation, and the kernel will only read from
+ * structures which have their associated use-flag set.
+ *
+ * It is the responsibility of the algorithms to set the use flags
+ * accordingly for any data structure they update during prepare().
+ */
+ params->use = {};
+
+ IPAFrameContext &frameContext = context_.frameContexts.get(frame);
+
+ for (auto const &algo : algorithms())
+ algo->prepare(context_, frame, frameContext, params);
+
+ paramsComputed.emit(frame);
+}
+
+/**
+ * \brief Process the statistics generated by the ImgU
+ * \param[in] frame The frame number
+ * \param[in] frameTimestamp Timestamp of the frame
+ * \param[in] bufferId ID of the statistics buffer
+ * \param[in] sensorControls Sensor controls
+ *
+ * Parse the most recently processed image statistics from the ImgU. The
+ * statistics are passed to each algorithm module to run their calculations and
+ * update their state accordingly.
+ */
+void IPAIPU3::processStats(const uint32_t frame,
+ [[maybe_unused]] const int64_t frameTimestamp,
+ const uint32_t bufferId, const ControlList &sensorControls)
+{
+ auto it = buffers_.find(bufferId);
+ if (it == buffers_.end()) {
+ LOG(IPAIPU3, Error) << "Could not find stats buffer!";
+ return;
+ }
+
+ Span<uint8_t> mem = it->second.planes()[0];
+ const ipu3_uapi_stats_3a *stats =
+ reinterpret_cast<ipu3_uapi_stats_3a *>(mem.data());
+
+ IPAFrameContext &frameContext = context_.frameContexts.get(frame);
+
+ frameContext.sensor.exposure = sensorControls.get(V4L2_CID_EXPOSURE).get<int32_t>();
+ frameContext.sensor.gain = camHelper_->gain(sensorControls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>());
+
+ ControlList metadata(controls::controls);
+
+ for (auto const &algo : algorithms())
+ algo->process(context_, frame, frameContext, stats, metadata);
+
+ setControls(frame);
+
+ /*
+ * \todo The Metadata provides a path to getting extended data
+ * out to the application. Further data such as a simplifed Histogram
+ * might have value to be exposed, however such data may be
+ * difficult to report in a generically parsable way and we
+ * likely want to avoid putting platform specific metadata in.
+ */
+
+ metadataReady.emit(frame, metadata);
+}
+
+/**
+ * \brief Queue a request and process the control list from the application
+ * \param[in] frame The number of the frame which will be processed next
+ * \param[in] controls The controls for the \a frame
+ *
+ * Parse the request to handle any IPA-managed controls that were set from the
+ * application such as manual sensor settings.
+ */
+void IPAIPU3::queueRequest(const uint32_t frame, const ControlList &controls)
+{
+ IPAFrameContext &frameContext = context_.frameContexts.alloc(frame);
+
+ for (auto const &algo : algorithms())
+ algo->queueRequest(context_, frame, frameContext, controls);
+}
+
+/**
+ * \brief Handle sensor controls for a given \a frame number
+ * \param[in] frame The frame on which the sensor controls should be set
+ *
+ * Send the desired sensor control values to the pipeline handler to request
+ * that they are applied on the camera sensor.
+ */
+void IPAIPU3::setControls(unsigned int frame)
+{
+ int32_t exposure = context_.activeState.agc.exposure;
+ int32_t gain = camHelper_->gainCode(context_.activeState.agc.gain);
+
+ ControlList ctrls(sensorCtrls_);
+ ctrls.set(V4L2_CID_EXPOSURE, exposure);
+ ctrls.set(V4L2_CID_ANALOGUE_GAIN, gain);
+
+ ControlList lensCtrls(lensCtrls_);
+ lensCtrls.set(V4L2_CID_FOCUS_ABSOLUTE,
+ static_cast<int32_t>(context_.activeState.af.focus));
+
+ setSensorControls.emit(frame, ctrls, lensCtrls);
+}
+
+} /* namespace ipa::ipu3 */
+
+/**
+ * \brief External IPA module interface
+ *
+ * The IPAModuleInfo is required to match an IPA module construction against the
+ * intented pipeline handler with the module. The API and pipeline handler
+ * versions must match the corresponding IPA interface and pipeline handler.
+ *
+ * \sa struct IPAModuleInfo
+ */
+extern "C" {
+const struct IPAModuleInfo ipaModuleInfo = {
+ IPA_MODULE_API_VERSION,
+ 1,
+ "ipu3",
+ "ipu3",
+};
+
+/**
+ * \brief Create an instance of the IPA interface
+ *
+ * This function is the entry point of the IPA module. It is called by the IPA
+ * manager to create an instance of the IPA interface for each camera. When
+ * matched against with a pipeline handler, the IPAManager will construct an IPA
+ * instance for each associated Camera.
+ */
+IPAInterface *ipaCreate()
+{
+ return new ipa::ipu3::IPAIPU3();
+}
+}
+
+} /* namespace libcamera */
diff --git a/src/ipa/ipu3/meson.build b/src/ipa/ipu3/meson.build
new file mode 100644
index 00000000..34de6213
--- /dev/null
+++ b/src/ipa/ipu3/meson.build
@@ -0,0 +1,31 @@
+# SPDX-License-Identifier: CC0-1.0
+
+subdir('algorithms')
+subdir('data')
+
+ipa_name = 'ipa_ipu3'
+
+ipu3_ipa_sources = files([
+ 'ipa_context.cpp',
+ 'ipu3.cpp',
+])
+
+ipu3_ipa_sources += ipu3_ipa_algorithms
+
+mod = shared_module(ipa_name, ipu3_ipa_sources,
+ name_prefix : '',
+ include_directories : [ipa_includes],
+ dependencies : [libcamera_private, libipa_dep],
+ install : true,
+ install_dir : ipa_install_dir)
+
+if ipa_sign_module
+ custom_target(ipa_name + '.so.sign',
+ input : mod,
+ output : ipa_name + '.so.sign',
+ command : [ipa_sign, ipa_priv_key, '@INPUT@', '@OUTPUT@'],
+ install : false,
+ build_by_default : true)
+endif
+
+ipa_names += ipa_name
diff --git a/src/ipa/ipu3/module.h b/src/ipa/ipu3/module.h
new file mode 100644
index 00000000..60f65cc4
--- /dev/null
+++ b/src/ipa/ipu3/module.h
@@ -0,0 +1,27 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2022, Ideas On Board
+ *
+ * IPU3 IPA Module
+ */
+
+#pragma once
+
+#include <linux/intel-ipu3.h>
+
+#include <libcamera/ipa/ipu3_ipa_interface.h>
+
+#include <libipa/module.h>
+
+#include "ipa_context.h"
+
+namespace libcamera {
+
+namespace ipa::ipu3 {
+
+using Module = ipa::Module<IPAContext, IPAFrameContext, IPAConfigInfo,
+ ipu3_uapi_params, ipu3_uapi_stats_3a>;
+
+} /* namespace ipa::ipu3 */
+
+} /* namespace libcamera*/