diff options
-rw-r--r-- | src/ipa/ipu3/algorithms/agc.cpp | 11 | ||||
-rw-r--r-- | src/ipa/ipu3/algorithms/agc.h | 4 | ||||
-rw-r--r-- | src/ipa/ipu3/ipa_context.cpp | 26 | ||||
-rw-r--r-- | src/ipa/ipu3/ipa_context.h | 14 | ||||
-rw-r--r-- | src/ipa/ipu3/ipu3.cpp | 24 |
5 files changed, 57 insertions, 22 deletions
diff --git a/src/ipa/ipu3/algorithms/agc.cpp b/src/ipa/ipu3/algorithms/agc.cpp index 383a8deb..f16be534 100644 --- a/src/ipa/ipu3/algorithms/agc.cpp +++ b/src/ipa/ipu3/algorithms/agc.cpp @@ -183,14 +183,13 @@ utils::Duration Agc::filterExposure(utils::Duration exposureValue) * \param[in] yGain The gain calculated based on the relative luminance target * \param[in] iqMeanGain The gain calculated based on the relative luminance target */ -void Agc::computeExposure(IPAContext &context, double yGain, - double iqMeanGain) +void Agc::computeExposure(IPAContext &context, IPAFrameContext *frameContext, + double yGain, double iqMeanGain) { const IPASessionConfiguration &configuration = context.configuration; - IPAFrameContext &frameContext = context.frameContext; /* Get the effective exposure and gain applied on the sensor. */ - uint32_t exposure = frameContext.sensor.exposure; - double analogueGain = frameContext.sensor.gain; + uint32_t exposure = frameContext->sensor.exposure; + double analogueGain = frameContext->sensor.gain; /* Use the highest of the two gain estimates. */ double evGain = std::max(yGain, iqMeanGain); @@ -360,7 +359,7 @@ void Agc::process(IPAContext &context, [[maybe_unused]] IPAFrameContext *frameCo break; } - computeExposure(context, yGain, iqMeanGain); + computeExposure(context, frameContext, yGain, iqMeanGain); frameCount_++; } diff --git a/src/ipa/ipu3/algorithms/agc.h b/src/ipa/ipu3/algorithms/agc.h index 219a1a96..105ae0f2 100644 --- a/src/ipa/ipu3/algorithms/agc.h +++ b/src/ipa/ipu3/algorithms/agc.h @@ -35,8 +35,8 @@ private: double measureBrightness(const ipu3_uapi_stats_3a *stats, const ipu3_uapi_grid_config &grid) const; utils::Duration filterExposure(utils::Duration currentExposure); - void computeExposure(IPAContext &context, double yGain, - double iqMeanGain); + void computeExposure(IPAContext &context, IPAFrameContext *frameContext, + double yGain, double iqMeanGain); double estimateLuminance(IPAActiveState &activeState, const ipu3_uapi_grid_config &grid, const ipu3_uapi_stats_3a *stats, diff --git a/src/ipa/ipu3/ipa_context.cpp b/src/ipa/ipu3/ipa_context.cpp index 383c2e37..13cdb835 100644 --- a/src/ipa/ipu3/ipa_context.cpp +++ b/src/ipa/ipu3/ipa_context.cpp @@ -58,13 +58,11 @@ namespace libcamera::ipa::ipu3 { * \var IPAContext::configuration * \brief The IPA session configuration, immutable during the session * - * \var IPAContext::frameContext - * \brief The frame context for the frame being processed + * \var IPAContext::frameContexts + * \brief Ring buffer of the IPAFrameContext(s) * * \var IPAContext::activeState * \brief The current state of IPA algorithms - * - * \todo The frame context needs to be turned into real per-frame data storage. */ /** @@ -183,6 +181,26 @@ namespace libcamera::ipa::ipu3 { */ /** + * \brief Default constructor for IPAFrameContext + */ +IPAFrameContext::IPAFrameContext() = default; + +/** + * \brief Construct a IPAFrameContext instance + */ +IPAFrameContext::IPAFrameContext(uint32_t id, const ControlList &reqControls) + : frame(id), frameControls(reqControls) +{ + sensor = {}; +} + +/** + * \var IPAFrameContext::frame + * \brief The frame number + * + * \var IPAFrameContext::frameControls + * \brief Controls sent in by the application while queuing the request + * * \var IPAFrameContext::sensor * \brief Effective sensor values that were applied for the frame * diff --git a/src/ipa/ipu3/ipa_context.h b/src/ipa/ipu3/ipa_context.h index 8d681131..42e11141 100644 --- a/src/ipa/ipu3/ipa_context.h +++ b/src/ipa/ipu3/ipa_context.h @@ -8,16 +8,22 @@ #pragma once +#include <array> + #include <linux/intel-ipu3.h> #include <libcamera/base/utils.h> +#include <libcamera/controls.h> #include <libcamera/geometry.h> namespace libcamera { namespace ipa::ipu3 { +/* Maximum number of frame contexts to be held */ +static constexpr uint32_t kMaxFrameContexts = 16; + struct IPASessionConfiguration { struct { ipu3_uapi_grid_config bdsGrid; @@ -71,17 +77,23 @@ struct IPAActiveState { }; struct IPAFrameContext { + IPAFrameContext(); + IPAFrameContext(uint32_t id, const ControlList &reqControls); + struct { uint32_t exposure; double gain; } sensor; + + uint32_t frame; + ControlList frameControls; }; struct IPAContext { IPASessionConfiguration configuration; IPAActiveState activeState; - IPAFrameContext frameContext; + std::array<IPAFrameContext, kMaxFrameContexts> frameContexts; }; } /* namespace ipa::ipu3 */ diff --git a/src/ipa/ipu3/ipu3.cpp b/src/ipa/ipu3/ipu3.cpp index 16e5028f..2f6bb672 100644 --- a/src/ipa/ipu3/ipu3.cpp +++ b/src/ipa/ipu3/ipu3.cpp @@ -313,7 +313,7 @@ int IPAIPU3::init(const IPASettings &settings, } /* Clean context */ - context_ = {}; + context_.configuration = {}; context_.configuration.sensor.lineDuration = sensorInfo.lineLength * 1.0s / sensorInfo.pixelRate; /* Construct our Algorithms */ @@ -456,7 +456,8 @@ int IPAIPU3::configure(const IPAConfigInfo &configInfo, /* Clean IPAActiveState at each reconfiguration. */ context_.activeState = {}; - context_.frameContext = {}; + IPAFrameContext initFrameContext; + context_.frameContexts.fill(initFrameContext); if (!validateSensorControls()) { LOG(IPAIPU3, Error) << "Sensor control validation failed."; @@ -568,15 +569,20 @@ void IPAIPU3::processStatsBuffer(const uint32_t frame, const ipu3_uapi_stats_3a *stats = reinterpret_cast<ipu3_uapi_stats_3a *>(mem.data()); - context_.frameContext.sensor.exposure = sensorControls.get(V4L2_CID_EXPOSURE).get<int32_t>(); - context_.frameContext.sensor.gain = camHelper_->gain(sensorControls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>()); + IPAFrameContext &frameContext = context_.frameContexts[frame % kMaxFrameContexts]; + + if (frameContext.frame != frame) + LOG(IPAIPU3, Warning) << "Frame " << frame << " does not match its frame context"; + + frameContext.sensor.exposure = sensorControls.get(V4L2_CID_EXPOSURE).get<int32_t>(); + frameContext.sensor.gain = camHelper_->gain(sensorControls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>()); double lineDuration = context_.configuration.sensor.lineDuration.get<std::micro>(); int32_t vBlank = context_.configuration.sensor.defVBlank; ControlList ctrls(controls::controls); for (auto const &algo : algorithms_) - algo->process(context_, nullptr, stats); + algo->process(context_, &frameContext, stats); setControls(frame); @@ -584,11 +590,11 @@ void IPAIPU3::processStatsBuffer(const uint32_t frame, int64_t frameDuration = (vBlank + sensorInfo_.outputSize.height) * lineDuration; ctrls.set(controls::FrameDuration, frameDuration); - ctrls.set(controls::AnalogueGain, context_.frameContext.sensor.gain); + ctrls.set(controls::AnalogueGain, frameContext.sensor.gain); ctrls.set(controls::ColourTemperature, context_.activeState.awb.temperatureK); - ctrls.set(controls::ExposureTime, context_.frameContext.sensor.exposure * lineDuration); + ctrls.set(controls::ExposureTime, frameContext.sensor.exposure * lineDuration); /* * \todo The Metadata provides a path to getting extended data @@ -609,10 +615,10 @@ void IPAIPU3::processStatsBuffer(const uint32_t frame, * Parse the request to handle any IPA-managed controls that were set from the * application such as manual sensor settings. */ -void IPAIPU3::queueRequest([[maybe_unused]] const uint32_t frame, - [[maybe_unused]] const ControlList &controls) +void IPAIPU3::queueRequest(const uint32_t frame, const ControlList &controls) { /* \todo Start processing for 'frame' based on 'controls'. */ + context_.frameContexts[frame % kMaxFrameContexts] = { frame, controls }; } /** |