diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/gstreamer/gstlibcamera-controls.cpp.in | 13 | ||||
-rw-r--r-- | src/ipa/libipa/histogram.cpp | 42 | ||||
-rw-r--r-- | src/ipa/rkisp1/rkisp1.cpp | 3 | ||||
-rw-r--r-- | src/ipa/simple/algorithms/agc.cpp | 11 | ||||
-rw-r--r-- | src/ipa/simple/algorithms/awb.cpp | 21 | ||||
-rw-r--r-- | src/ipa/simple/algorithms/awb.h | 6 | ||||
-rw-r--r-- | src/ipa/simple/algorithms/blc.cpp | 21 | ||||
-rw-r--r-- | src/ipa/simple/algorithms/blc.h | 2 | ||||
-rw-r--r-- | src/ipa/simple/algorithms/lut.cpp | 15 | ||||
-rw-r--r-- | src/ipa/simple/algorithms/lut.h | 5 | ||||
-rw-r--r-- | src/ipa/simple/ipa_context.h | 15 | ||||
-rw-r--r-- | src/ipa/simple/soft_simple.cpp | 24 | ||||
-rw-r--r-- | src/libcamera/camera_manager.cpp | 7 | ||||
-rw-r--r-- | src/libcamera/pipeline/rpi/pisp/pisp.cpp | 4 | ||||
-rw-r--r-- | src/libcamera/pipeline/rpi/vc4/vc4.cpp | 4 | ||||
-rw-r--r-- | src/libcamera/pipeline/simple/simple.cpp | 162 | ||||
-rw-r--r-- | src/libcamera/pipeline/uvcvideo/uvcvideo.cpp | 119 | ||||
-rw-r--r-- | src/libcamera/software_isp/software_isp.cpp | 29 | ||||
-rw-r--r-- | src/libcamera/v4l2_device.cpp | 22 | ||||
-rw-r--r-- | src/py/libcamera/py_camera_manager.h | 2 |
20 files changed, 415 insertions, 112 deletions
diff --git a/src/gstreamer/gstlibcamera-controls.cpp.in b/src/gstreamer/gstlibcamera-controls.cpp.in index d937b19e..89c530da 100644 --- a/src/gstreamer/gstlibcamera-controls.cpp.in +++ b/src/gstreamer/gstlibcamera-controls.cpp.in @@ -223,7 +223,6 @@ bool GstCameraControls::setProperty(guint propId, const GValue *value, {%- for ctrl in ctrls %} case controls::{{ ctrl.namespace }}{{ ctrl.name|snake_case|upper }}: { - ControlValue control; {%- if ctrl.is_array %} size_t size = gst_value_array_get_size(value); {%- if ctrl.size != 0 %} @@ -254,12 +253,9 @@ bool GstCameraControls::setProperty(guint propId, const GValue *value, } {%- if ctrl.size == 0 %} - control.set(Span<const {{ ctrl.element_type }}>(values.data(), - size)); + Span<const {{ ctrl.element_type }}> val(values.data(), size); {%- else %} - control.set(Span<const {{ ctrl.element_type }}, - {{ ctrl.size }}>(values.data(), - {{ ctrl.size }})); + Span<const {{ ctrl.element_type }}, {{ ctrl.size }}> val(values.data(), size); {%- endif %} {%- else %} {%- if ctrl.is_rectangle %} @@ -273,10 +269,9 @@ bool GstCameraControls::setProperty(guint propId, const GValue *value, {%- else %} auto val = g_value_get_{{ ctrl.gtype }}(value); {%- endif %} - control.set(val); {%- endif %} - controls_.set(propId, control); - controls_acc_.set(propId, control); + controls_.set(controls::{{ ctrl.namespace }}{{ ctrl.name }}, val); + controls_acc_.set(controls::{{ ctrl.namespace }}{{ ctrl.name }}, val); return true; } {%- endfor %} diff --git a/src/ipa/libipa/histogram.cpp b/src/ipa/libipa/histogram.cpp index 10e44b54..bcf26390 100644 --- a/src/ipa/libipa/histogram.cpp +++ b/src/ipa/libipa/histogram.cpp @@ -130,7 +130,8 @@ double Histogram::quantile(double q, uint32_t first, uint32_t last) const if (cumulative_[first + 1] == cumulative_[first]) frac = 0; else - frac = (item - cumulative_[first]) / (cumulative_[first + 1] - cumulative_[first]); + frac = (q * total() - cumulative_[first]) + / (cumulative_[first + 1] - cumulative_[first]); return first + frac; } @@ -148,26 +149,37 @@ double Histogram::quantile(double q, uint32_t first, uint32_t last) const double Histogram::interQuantileMean(double lowQuantile, double highQuantile) const { ASSERT(highQuantile > lowQuantile); - /* Proportion of pixels which lies below lowQuantile */ - double lowPoint = quantile(lowQuantile); - /* Proportion of pixels which lies below highQuantile */ - double highPoint = quantile(highQuantile, static_cast<uint32_t>(lowPoint)); - double sumBinFreq = 0, cumulFreq = 0; - - for (double p_next = floor(lowPoint) + 1.0; - p_next <= ceil(highPoint); - lowPoint = p_next, p_next += 1.0) { - int bin = floor(lowPoint); + + /* Proportion of pixels which lies below lowQuantile and highQuantile. */ + const double lowPoint = quantile(lowQuantile); + const double highPoint = quantile(highQuantile, static_cast<uint32_t>(lowPoint)); + + double sumBinFreq = 0; + double cumulFreq = 0; + + /* + * Calculate the mean pixel value between the low and high points by + * summing all the pixels between the two points, and dividing the sum + * by the number of pixels. Given the discrete nature of the histogram + * data, the sum of the pixels is approximated by accumulating the + * product of the bin values (calculated as the mid point of the bin) by + * the number of pixels they contain, for each bin in the internal. + */ + for (unsigned bin = std::floor(lowPoint); bin < std::ceil(highPoint); bin++) { + const double lowBound = std::max<double>(bin, lowPoint); + const double highBound = std::min<double>(bin + 1, highPoint); + double freq = (cumulative_[bin + 1] - cumulative_[bin]) - * (std::min(p_next, highPoint) - lowPoint); + * (highBound - lowBound); /* Accumulate weighted bin */ - sumBinFreq += bin * freq; + sumBinFreq += (highBound + lowBound) / 2 * freq; + /* Accumulate weights */ cumulFreq += freq; } - /* add 0.5 to give an average for bin mid-points */ - return sumBinFreq / cumulFreq + 0.5; + + return sumBinFreq / cumulFreq; } } /* namespace ipa */ diff --git a/src/ipa/rkisp1/rkisp1.cpp b/src/ipa/rkisp1/rkisp1.cpp index cb487ae5..70ce0cba 100644 --- a/src/ipa/rkisp1/rkisp1.cpp +++ b/src/ipa/rkisp1/rkisp1.cpp @@ -211,8 +211,7 @@ int IPARkISP1::init(const IPASettings &settings, unsigned int hwRevision, int IPARkISP1::start() { - setControls(0); - + /* \todo Properly handle startup controls. */ return 0; } diff --git a/src/ipa/simple/algorithms/agc.cpp b/src/ipa/simple/algorithms/agc.cpp index 72aade14..c46bb0eb 100644 --- a/src/ipa/simple/algorithms/agc.cpp +++ b/src/ipa/simple/algorithms/agc.cpp @@ -11,6 +11,8 @@ #include <libcamera/base/log.h> +#include "control_ids.h" + namespace libcamera { LOG_DEFINE_CATEGORY(IPASoftExposure) @@ -97,10 +99,15 @@ void Agc::updateExposure(IPAContext &context, IPAFrameContext &frameContext, dou void Agc::process(IPAContext &context, [[maybe_unused]] const uint32_t frame, - [[maybe_unused]] IPAFrameContext &frameContext, + IPAFrameContext &frameContext, const SwIspStats *stats, - [[maybe_unused]] ControlList &metadata) + ControlList &metadata) { + utils::Duration exposureTime = + context.configuration.agc.lineDuration * frameContext.sensor.exposure; + metadata.set(controls::ExposureTime, exposureTime.get<std::micro>()); + metadata.set(controls::AnalogueGain, frameContext.sensor.gain); + /* * Calculate Mean Sample Value (MSV) according to formula from: * https://www.araa.asn.au/acra/acra2007/papers/paper84final.pdf diff --git a/src/ipa/simple/algorithms/awb.cpp b/src/ipa/simple/algorithms/awb.cpp index ec77c6e5..55719059 100644 --- a/src/ipa/simple/algorithms/awb.cpp +++ b/src/ipa/simple/algorithms/awb.cpp @@ -17,6 +17,8 @@ #include "libipa/colours.h" #include "simple/ipa_context.h" +#include "control_ids.h" + namespace libcamera { LOG_DEFINE_CATEGORY(IPASoftAwb) @@ -32,15 +34,32 @@ int Awb::configure(IPAContext &context, return 0; } +void Awb::prepare(IPAContext &context, + [[maybe_unused]] const uint32_t frame, + IPAFrameContext &frameContext, + [[maybe_unused]] DebayerParams *params) +{ + auto &gains = context.activeState.awb.gains; + frameContext.gains.red = gains.r(); + frameContext.gains.blue = gains.b(); +} + void Awb::process(IPAContext &context, [[maybe_unused]] const uint32_t frame, - [[maybe_unused]] IPAFrameContext &frameContext, + IPAFrameContext &frameContext, const SwIspStats *stats, ControlList &metadata) { const SwIspStats::Histogram &histogram = stats->yHistogram; const uint8_t blackLevel = context.activeState.blc.level; + const float maxGain = 1024.0; + const float mdGains[] = { + static_cast<float>(frameContext.gains.red / maxGain), + static_cast<float>(frameContext.gains.blue / maxGain) + }; + metadata.set(controls::ColourGains, mdGains); + /* * Black level must be subtracted to get the correct AWB ratios, they * would be off if they were computed from the whole brightness range diff --git a/src/ipa/simple/algorithms/awb.h b/src/ipa/simple/algorithms/awb.h index db1496cd..ad993f39 100644 --- a/src/ipa/simple/algorithms/awb.h +++ b/src/ipa/simple/algorithms/awb.h @@ -1,6 +1,6 @@ /* SPDX-License-Identifier: LGPL-2.1-or-later */ /* - * Copyright (C) 2024, Red Hat Inc. + * Copyright (C) 2024-2025 Red Hat Inc. * * Auto white balance */ @@ -20,6 +20,10 @@ public: ~Awb() = default; int configure(IPAContext &context, const IPAConfigInfo &configInfo) override; + void prepare(IPAContext &context, + const uint32_t frame, + IPAFrameContext &frameContext, + DebayerParams *params) override; void process(IPAContext &context, const uint32_t frame, IPAFrameContext &frameContext, diff --git a/src/ipa/simple/algorithms/blc.cpp b/src/ipa/simple/algorithms/blc.cpp index 1d7d370b..8c1e9ed0 100644 --- a/src/ipa/simple/algorithms/blc.cpp +++ b/src/ipa/simple/algorithms/blc.cpp @@ -1,6 +1,6 @@ /* SPDX-License-Identifier: LGPL-2.1-or-later */ /* - * Copyright (C) 2024, Red Hat Inc. + * Copyright (C) 2024-2025, Red Hat Inc. * * Black level handling */ @@ -11,6 +11,8 @@ #include <libcamera/base/log.h> +#include "control_ids.h" + namespace libcamera { namespace ipa::soft::algorithms { @@ -49,13 +51,20 @@ void BlackLevel::process(IPAContext &context, [[maybe_unused]] const uint32_t frame, IPAFrameContext &frameContext, const SwIspStats *stats, - [[maybe_unused]] ControlList &metadata) + ControlList &metadata) { + /* Assign each of the R G G B channels as the same black level. */ + const int32_t blackLevel = context.activeState.blc.level * 256; + const int32_t blackLevels[] = { + blackLevel, blackLevel, blackLevel, blackLevel + }; + metadata.set(controls::SensorBlackLevels, blackLevels); + if (context.configuration.black.level.has_value()) return; - if (frameContext.sensor.exposure == exposure_ && - frameContext.sensor.gain == gain_) { + if (frameContext.sensor.exposure == context.activeState.blc.lastExposure && + frameContext.sensor.gain == context.activeState.blc.lastGain) { return; } @@ -79,8 +88,8 @@ void BlackLevel::process(IPAContext &context, seen += histogram[i]; if (seen >= pixelThreshold) { context.activeState.blc.level = i * histogramRatio; - exposure_ = frameContext.sensor.exposure; - gain_ = frameContext.sensor.gain; + context.activeState.blc.lastExposure = frameContext.sensor.exposure; + context.activeState.blc.lastGain = frameContext.sensor.gain; LOG(IPASoftBL, Debug) << "Auto-set black level: " << i << "/" << SwIspStats::kYHistogramSize diff --git a/src/ipa/simple/algorithms/blc.h b/src/ipa/simple/algorithms/blc.h index 52d59cab..db9e6d63 100644 --- a/src/ipa/simple/algorithms/blc.h +++ b/src/ipa/simple/algorithms/blc.h @@ -30,8 +30,6 @@ public: ControlList &metadata) override; private: - int32_t exposure_; - double gain_; std::optional<uint8_t> definedLevel_; }; diff --git a/src/ipa/simple/algorithms/lut.cpp b/src/ipa/simple/algorithms/lut.cpp index a06cdeba..e8638f27 100644 --- a/src/ipa/simple/algorithms/lut.cpp +++ b/src/ipa/simple/algorithms/lut.cpp @@ -87,9 +87,11 @@ int16_t Lut::ccmValue(unsigned int i, float ccm) const void Lut::prepare(IPAContext &context, [[maybe_unused]] const uint32_t frame, - [[maybe_unused]] IPAFrameContext &frameContext, + IPAFrameContext &frameContext, DebayerParams *params) { + frameContext.contrast = context.activeState.knobs.contrast; + /* * Update the gamma table if needed. This means if black level changes * and since the black level gets updated only if a lower value is @@ -139,6 +141,17 @@ void Lut::prepare(IPAContext &context, } } +void Lut::process([[maybe_unused]] IPAContext &context, + [[maybe_unused]] const uint32_t frame, + [[maybe_unused]] IPAFrameContext &frameContext, + [[maybe_unused]] const SwIspStats *stats, + ControlList &metadata) +{ + const auto &contrast = frameContext.contrast; + if (contrast) + metadata.set(controls::Contrast, contrast.value()); +} + REGISTER_IPA_ALGORITHM(Lut, "Lut") } /* namespace ipa::soft::algorithms */ diff --git a/src/ipa/simple/algorithms/lut.h b/src/ipa/simple/algorithms/lut.h index 77324800..ba8b9021 100644 --- a/src/ipa/simple/algorithms/lut.h +++ b/src/ipa/simple/algorithms/lut.h @@ -30,6 +30,11 @@ public: const uint32_t frame, IPAFrameContext &frameContext, DebayerParams *params) override; + void process(IPAContext &context, + const uint32_t frame, + IPAFrameContext &frameContext, + const SwIspStats *stats, + ControlList &metadata) override; private: void updateGammaTable(IPAContext &context); diff --git a/src/ipa/simple/ipa_context.h b/src/ipa/simple/ipa_context.h index 17bcd4ca..88cc6c35 100644 --- a/src/ipa/simple/ipa_context.h +++ b/src/ipa/simple/ipa_context.h @@ -1,6 +1,6 @@ /* SPDX-License-Identifier: LGPL-2.1-or-later */ /* - * Copyright (C) 2024 Red Hat, Inc. + * Copyright (C) 2024-2025 Red Hat, Inc. * * Simple pipeline IPA Context */ @@ -18,6 +18,8 @@ #include <libipa/fc_queue.h> +#include "core_ipa_interface.h" + namespace libcamera { namespace ipa::soft { @@ -27,6 +29,7 @@ struct IPASessionConfiguration { struct { int32_t exposureMin, exposureMax; double againMin, againMax, againMinStep; + utils::Duration lineDuration; } agc; struct { std::optional<uint8_t> level; @@ -36,6 +39,8 @@ struct IPASessionConfiguration { struct IPAActiveState { struct { uint8_t level; + int32_t lastExposure; + double lastGain; } blc; struct { @@ -70,6 +75,11 @@ struct IPAFrameContext : public FrameContext { int32_t exposure; double gain; } sensor; + struct { + double red; + double blue; + } gains; + std::optional<double> contrast; }; struct IPAContext { @@ -78,11 +88,12 @@ struct IPAContext { { } + IPACameraSensorInfo sensorInfo; IPASessionConfiguration configuration; IPAActiveState activeState; FCQueue<IPAFrameContext> frameContexts; ControlInfoMap::Map ctrlMap; - bool ccmEnabled; + bool ccmEnabled = false; }; } /* namespace ipa::soft */ diff --git a/src/ipa/simple/soft_simple.cpp b/src/ipa/simple/soft_simple.cpp index a87c6cdd..c94c4cd5 100644 --- a/src/ipa/simple/soft_simple.cpp +++ b/src/ipa/simple/soft_simple.cpp @@ -5,6 +5,7 @@ * Simple Software Image Processing Algorithm module */ +#include <chrono> #include <stdint.h> #include <sys/mman.h> @@ -32,6 +33,8 @@ namespace libcamera { LOG_DEFINE_CATEGORY(IPASoft) +using namespace std::literals::chrono_literals; + namespace ipa::soft { /* Maximum number of frame contexts to be held */ @@ -50,7 +53,8 @@ public: int init(const IPASettings &settings, const SharedFD &fdStats, const SharedFD &fdParams, - const ControlInfoMap &sensorInfoMap, + const IPACameraSensorInfo &sensorInfo, + const ControlInfoMap &sensorControls, ControlInfoMap *ipaControls, bool *ccmEnabled) override; int configure(const IPAConfigInfo &configInfo) override; @@ -89,7 +93,8 @@ IPASoftSimple::~IPASoftSimple() int IPASoftSimple::init(const IPASettings &settings, const SharedFD &fdStats, const SharedFD &fdParams, - const ControlInfoMap &sensorInfoMap, + const IPACameraSensorInfo &sensorInfo, + const ControlInfoMap &sensorControls, ControlInfoMap *ipaControls, bool *ccmEnabled) { @@ -100,6 +105,8 @@ int IPASoftSimple::init(const IPASettings &settings, << settings.sensorModel; } + context_.sensorInfo = sensorInfo; + /* Load the tuning data file */ File file(settings.configurationFile); if (!file.open(File::OpenModeFlag::ReadOnly)) { @@ -173,12 +180,12 @@ int IPASoftSimple::init(const IPASettings &settings, * Don't save the min and max control values yet, as e.g. the limits * for V4L2_CID_EXPOSURE depend on the configured sensor resolution. */ - if (sensorInfoMap.find(V4L2_CID_EXPOSURE) == sensorInfoMap.end()) { + if (sensorControls.find(V4L2_CID_EXPOSURE) == sensorControls.end()) { LOG(IPASoft, Error) << "Don't have exposure control"; return -EINVAL; } - if (sensorInfoMap.find(V4L2_CID_ANALOGUE_GAIN) == sensorInfoMap.end()) { + if (sensorControls.find(V4L2_CID_ANALOGUE_GAIN) == sensorControls.end()) { LOG(IPASoft, Error) << "Don't have gain control"; return -EINVAL; } @@ -198,6 +205,8 @@ int IPASoftSimple::configure(const IPAConfigInfo &configInfo) context_.activeState = {}; context_.frameContexts.clear(); + context_.configuration.agc.lineDuration = + context_.sensorInfo.minLineLength * 1.0s / context_.sensorInfo.pixelRate; context_.configuration.agc.exposureMin = exposureInfo.min().get<int32_t>(); context_.configuration.agc.exposureMax = exposureInfo.max().get<int32_t>(); if (!context_.configuration.agc.exposureMin) { @@ -299,15 +308,10 @@ void IPASoftSimple::processStats(const uint32_t frame, int32_t again = sensorControls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>(); frameContext.sensor.gain = camHelper_ ? camHelper_->gain(again) : again; - /* - * Software ISP currently does not produce any metadata. Use an empty - * ControlList for now. - * - * \todo Implement proper metadata handling - */ ControlList metadata(controls::controls); for (auto const &algo : algorithms()) algo->process(context_, frame, frameContext, stats_, metadata); + metadataReady.emit(frame, metadata); /* Sanity check */ if (!sensorControls.contains(V4L2_CID_EXPOSURE) || diff --git a/src/libcamera/camera_manager.cpp b/src/libcamera/camera_manager.cpp index 400109f1..e62e7193 100644 --- a/src/libcamera/camera_manager.cpp +++ b/src/libcamera/camera_manager.cpp @@ -239,10 +239,7 @@ void CameraManager::Private::removeCamera(std::shared_ptr<Camera> camera) { MutexLocker locker(mutex_); - auto iter = std::find_if(cameras_.begin(), cameras_.end(), - [camera](std::shared_ptr<Camera> &c) { - return c.get() == camera.get(); - }); + auto iter = std::find(cameras_.begin(), cameras_.end(), camera); if (iter == cameras_.end()) return; @@ -384,7 +381,7 @@ std::vector<std::shared_ptr<Camera>> CameraManager::cameras() const * * \return Shared pointer to Camera object or nullptr if camera not found */ -std::shared_ptr<Camera> CameraManager::get(const std::string &id) +std::shared_ptr<Camera> CameraManager::get(std::string_view id) { Private *const d = _d(); diff --git a/src/libcamera/pipeline/rpi/pisp/pisp.cpp b/src/libcamera/pipeline/rpi/pisp/pisp.cpp index 42ca7c80..91e7f4c9 100644 --- a/src/libcamera/pipeline/rpi/pisp/pisp.cpp +++ b/src/libcamera/pipeline/rpi/pisp/pisp.cpp @@ -1350,9 +1350,9 @@ int PiSPCameraData::platformPipelineConfigure(const std::unique_ptr<YamlObject> } std::optional<std::string> target = (*root)["target"].get<std::string>(); - if (!target || *target != "pisp") { + if (target != "pisp") { LOG(RPI, Error) << "Unexpected target reported: expected \"pisp\", got " - << *target; + << (target ? target->c_str() : "(unknown)"); return -EINVAL; } diff --git a/src/libcamera/pipeline/rpi/vc4/vc4.cpp b/src/libcamera/pipeline/rpi/vc4/vc4.cpp index fd8d84b1..fe910bdf 100644 --- a/src/libcamera/pipeline/rpi/vc4/vc4.cpp +++ b/src/libcamera/pipeline/rpi/vc4/vc4.cpp @@ -510,9 +510,9 @@ int Vc4CameraData::platformPipelineConfigure(const std::unique_ptr<YamlObject> & } std::optional<std::string> target = (*root)["target"].get<std::string>(); - if (!target || *target != "bcm2835") { + if (target != "bcm2835") { LOG(RPI, Error) << "Unexpected target reported: expected \"bcm2835\", got " - << *target; + << (target ? target->c_str() : "(unknown)"); return -EINVAL; } diff --git a/src/libcamera/pipeline/simple/simple.cpp b/src/libcamera/pipeline/simple/simple.cpp index 6e039bf3..efb07051 100644 --- a/src/libcamera/pipeline/simple/simple.cpp +++ b/src/libcamera/pipeline/simple/simple.cpp @@ -181,6 +181,56 @@ LOG_DEFINE_CATEGORY(SimplePipeline) class SimplePipelineHandler; +struct SimpleFrameInfo { + SimpleFrameInfo(uint32_t f, Request *r, bool m) + : frame(f), request(r), metadataRequired(m), metadataProcessed(false) + { + } + + uint32_t frame; + Request *request; + bool metadataRequired; + bool metadataProcessed; +}; + +class SimpleFrames +{ +public: + void create(Request *request, bool metadataRequested); + void destroy(uint32_t frame); + void clear(); + + SimpleFrameInfo *find(uint32_t frame); + +private: + std::map<uint32_t, SimpleFrameInfo> frameInfo_; +}; + +void SimpleFrames::create(Request *request, bool metadataRequired) +{ + const uint32_t frame = request->sequence(); + auto [it, inserted] = frameInfo_.try_emplace(frame, frame, request, metadataRequired); + ASSERT(inserted); +} + +void SimpleFrames::destroy(uint32_t frame) +{ + frameInfo_.erase(frame); +} + +void SimpleFrames::clear() +{ + frameInfo_.clear(); +} + +SimpleFrameInfo *SimpleFrames::find(uint32_t frame) +{ + auto info = frameInfo_.find(frame); + if (info == frameInfo_.end()) + return nullptr; + return &info->second; +} + struct SimplePipelineInfo { const char *driver; /* @@ -277,6 +327,7 @@ public: std::list<Entity> entities_; std::unique_ptr<CameraSensor> sensor_; V4L2VideoDevice *video_; + V4L2Subdevice *frameStartEmitter_; std::vector<Configuration> configs_; std::map<PixelFormat, std::vector<const Configuration *>> formats_; @@ -293,15 +344,18 @@ public: std::unique_ptr<Converter> converter_; std::unique_ptr<SoftwareIsp> swIsp_; + SimpleFrames frameInfo_; private: void tryPipeline(unsigned int code, const Size &size); static std::vector<const MediaPad *> routedSourcePads(MediaPad *sink); + void tryCompleteRequest(Request *request); void conversionInputDone(FrameBuffer *buffer); void conversionOutputDone(FrameBuffer *buffer); void ispStatsReady(uint32_t frame, uint32_t bufferId); + void metadataReady(uint32_t frame, const ControlList &metadata); void setSensorControls(const ControlList &sensorControls); }; @@ -488,6 +542,13 @@ SimpleCameraData::SimpleCameraData(SimplePipelineHandler *pipe, if (!sensor_) return; + const CameraSensorProperties::SensorDelays &delays = sensor_->sensorDelays(); + std::unordered_map<uint32_t, DelayedControls::ControlParams> params = { + { V4L2_CID_ANALOGUE_GAIN, { delays.gainDelay, false } }, + { V4L2_CID_EXPOSURE, { delays.exposureDelay, false } }, + }; + delayedCtrls_ = std::make_unique<DelayedControls>(sensor_->device(), params); + LOG(SimplePipeline, Debug) << "Found pipeline: " << utils::join(entities_, " -> ", @@ -540,6 +601,7 @@ int SimpleCameraData::init() swIsp_->inputBufferReady.connect(this, &SimpleCameraData::conversionInputDone); swIsp_->outputBufferReady.connect(this, &SimpleCameraData::conversionOutputDone); swIsp_->ispStatsReady.connect(this, &SimpleCameraData::ispStatsReady); + swIsp_->metadataReady.connect(this, &SimpleCameraData::metadataReady); swIsp_->setSensorControls.connect(this, &SimpleCameraData::setSensorControls); } } @@ -579,6 +641,20 @@ int SimpleCameraData::init() properties_ = sensor_->properties(); + /* Find the first subdev that can generate a frame start signal, if any. */ + frameStartEmitter_ = nullptr; + for (const Entity &entity : entities_) { + V4L2Subdevice *sd = pipe->subdev(entity.entity); + if (!sd || !sd->supportsFrameStartEvent()) + continue; + + LOG(SimplePipeline, Debug) + << "Using frameStart signal from '" + << entity.entity->name() << "'"; + frameStartEmitter_ = sd; + break; + } + return 0; } @@ -785,7 +861,7 @@ void SimpleCameraData::imageBufferReady(FrameBuffer *buffer) /* No conversion, just complete the request. */ Request *request = buffer->request(); pipe->completeBuffer(request, buffer); - pipe->completeRequest(request); + tryCompleteRequest(request); return; } @@ -803,7 +879,10 @@ void SimpleCameraData::imageBufferReady(FrameBuffer *buffer) const RequestOutputs &outputs = conversionQueue_.front(); for (auto &[stream, buf] : outputs.outputs) pipe->completeBuffer(outputs.request, buf); - pipe->completeRequest(outputs.request); + SimpleFrameInfo *info = frameInfo_.find(outputs.request->sequence()); + if (info) + info->metadataRequired = false; + tryCompleteRequest(outputs.request); conversionQueue_.pop(); return; @@ -861,7 +940,7 @@ void SimpleCameraData::imageBufferReady(FrameBuffer *buffer) /* Otherwise simply complete the request. */ pipe->completeBuffer(request, buffer); - pipe->completeRequest(request); + tryCompleteRequest(request); } void SimpleCameraData::clearIncompleteRequests() @@ -872,6 +951,24 @@ void SimpleCameraData::clearIncompleteRequests() } } +void SimpleCameraData::tryCompleteRequest(Request *request) +{ + if (request->hasPendingBuffers()) + return; + + SimpleFrameInfo *info = frameInfo_.find(request->sequence()); + if (!info) { + /* Something is really wrong, let's return. */ + return; + } + + if (info->metadataRequired && !info->metadataProcessed) + return; + + frameInfo_.destroy(info->frame); + pipe()->completeRequest(request); +} + void SimpleCameraData::conversionInputDone(FrameBuffer *buffer) { /* Queue the input buffer back for capture. */ @@ -885,7 +982,7 @@ void SimpleCameraData::conversionOutputDone(FrameBuffer *buffer) /* Complete the buffer and the request. */ Request *request = buffer->request(); if (pipe->completeBuffer(request, buffer)) - pipe->completeRequest(request); + tryCompleteRequest(request); } void SimpleCameraData::ispStatsReady(uint32_t frame, uint32_t bufferId) @@ -894,11 +991,32 @@ void SimpleCameraData::ispStatsReady(uint32_t frame, uint32_t bufferId) delayedCtrls_->get(frame)); } +void SimpleCameraData::metadataReady(uint32_t frame, const ControlList &metadata) +{ + SimpleFrameInfo *info = frameInfo_.find(frame); + if (!info) + return; + + info->request->metadata().merge(metadata); + info->metadataProcessed = true; + tryCompleteRequest(info->request); +} + void SimpleCameraData::setSensorControls(const ControlList &sensorControls) { delayedCtrls_->push(sensorControls); - ControlList ctrls(sensorControls); - sensor_->setControls(&ctrls); + /* + * Directly apply controls now if there is no frameStart signal. + * + * \todo Applying controls directly not only increases the risk of + * applying them to the wrong frame (or across a frame boundary), + * but it also bypasses delayedCtrls_, creating AGC regulation issues. + * Both problems should be fixed. + */ + if (!frameStartEmitter_) { + ControlList ctrls(sensorControls); + sensor_->setControls(&ctrls); + } } /* Retrieve all source pads connected to a sink pad through active routes. */ @@ -1277,17 +1395,6 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c) if (outputCfgs.empty()) return 0; - const CameraSensorProperties::SensorDelays &delays = data->sensor_->sensorDelays(); - std::unordered_map<uint32_t, DelayedControls::ControlParams> params = { - { V4L2_CID_ANALOGUE_GAIN, { delays.gainDelay, false } }, - { V4L2_CID_EXPOSURE, { delays.exposureDelay, false } }, - }; - data->delayedCtrls_ = - std::make_unique<DelayedControls>(data->sensor_->device(), - params); - data->video_->frameStart.connect(data->delayedCtrls_.get(), - &DelayedControls::applyControls); - StreamConfiguration inputCfg; inputCfg.pixelFormat = pipeConfig->captureFormat; inputCfg.size = pipeConfig->captureSize; @@ -1325,6 +1432,7 @@ int SimplePipelineHandler::start(Camera *camera, [[maybe_unused]] const ControlL { SimpleCameraData *data = cameraData(camera); V4L2VideoDevice *video = data->video_; + V4L2Subdevice *frameStartEmitter = data->frameStartEmitter_; int ret; const MediaPad *pad = acquirePipeline(data); @@ -1354,6 +1462,17 @@ int SimplePipelineHandler::start(Camera *camera, [[maybe_unused]] const ControlL video->bufferReady.connect(data, &SimpleCameraData::imageBufferReady); + data->delayedCtrls_->reset(); + if (frameStartEmitter) { + ret = frameStartEmitter->setFrameStartEnabled(true); + if (ret) { + stop(camera); + return ret; + } + frameStartEmitter->frameStart.connect(data->delayedCtrls_.get(), + &DelayedControls::applyControls); + } + ret = video->streamOn(); if (ret < 0) { stop(camera); @@ -1385,6 +1504,13 @@ void SimplePipelineHandler::stopDevice(Camera *camera) { SimpleCameraData *data = cameraData(camera); V4L2VideoDevice *video = data->video_; + V4L2Subdevice *frameStartEmitter = data->frameStartEmitter_; + + if (frameStartEmitter) { + frameStartEmitter->setFrameStartEnabled(false); + frameStartEmitter->frameStart.disconnect(data->delayedCtrls_.get(), + &DelayedControls::applyControls); + } if (data->useConversion_) { if (data->converter_) @@ -1398,6 +1524,7 @@ void SimplePipelineHandler::stopDevice(Camera *camera) video->bufferReady.disconnect(data, &SimpleCameraData::imageBufferReady); + data->frameInfo_.clear(); data->clearIncompleteRequests(); data->conversionBuffers_.clear(); @@ -1426,6 +1553,7 @@ int SimplePipelineHandler::queueRequestDevice(Camera *camera, Request *request) } } + data->frameInfo_.create(request, !!data->swIsp_); if (data->useConversion_) { data->conversionQueue_.push({ request, std::move(buffers) }); if (data->swIsp_) diff --git a/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp b/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp index 7470b562..5adc89fd 100644 --- a/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp +++ b/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp @@ -6,10 +6,12 @@ */ #include <algorithm> +#include <bitset> #include <cmath> #include <fstream> #include <map> #include <memory> +#include <optional> #include <set> #include <string> #include <vector> @@ -56,6 +58,9 @@ public: Stream stream_; std::map<PixelFormat, std::vector<SizeRange>> formats_; + std::optional<v4l2_exposure_auto_type> autoExposureMode_; + std::optional<v4l2_exposure_auto_type> manualExposureMode_; + private: bool generateId(); @@ -93,8 +98,8 @@ public: bool match(DeviceEnumerator *enumerator) override; private: - int processControl(ControlList *controls, unsigned int id, - const ControlValue &value); + int processControl(const UVCCameraData *data, ControlList *controls, + unsigned int id, const ControlValue &value); int processControls(UVCCameraData *data, Request *request); bool acquireDevice(Camera *camera) override; @@ -106,6 +111,26 @@ private: } }; +namespace { + +std::optional<controls::ExposureTimeModeEnum> v4l2ToExposureMode(int32_t x) +{ + using namespace controls; + + switch (x) { + case V4L2_EXPOSURE_AUTO: + case V4L2_EXPOSURE_APERTURE_PRIORITY: + return ExposureTimeModeAuto; + case V4L2_EXPOSURE_MANUAL: + case V4L2_EXPOSURE_SHUTTER_PRIORITY: + return ExposureTimeModeManual; + default: + return {}; + } +} + +} /* namespace */ + UVCCameraConfiguration::UVCCameraConfiguration(UVCCameraData *data) : CameraConfiguration(), data_(data) { @@ -287,8 +312,8 @@ void PipelineHandlerUVC::stopDevice(Camera *camera) data->video_->releaseBuffers(); } -int PipelineHandlerUVC::processControl(ControlList *controls, unsigned int id, - const ControlValue &value) +int PipelineHandlerUVC::processControl(const UVCCameraData *data, ControlList *controls, + unsigned int id, const ControlValue &value) { uint32_t cid; @@ -332,10 +357,21 @@ int PipelineHandlerUVC::processControl(ControlList *controls, unsigned int id, } case V4L2_CID_EXPOSURE_AUTO: { - int32_t ivalue = value.get<bool>() - ? V4L2_EXPOSURE_APERTURE_PRIORITY - : V4L2_EXPOSURE_MANUAL; - controls->set(V4L2_CID_EXPOSURE_AUTO, ivalue); + std::optional<v4l2_exposure_auto_type> mode; + + switch (value.get<int32_t>()) { + case controls::ExposureTimeModeAuto: + mode = data->autoExposureMode_; + break; + case controls::ExposureTimeModeManual: + mode = data->manualExposureMode_; + break; + } + + if (!mode) + return -EINVAL; + + controls->set(V4L2_CID_EXPOSURE_AUTO, static_cast<int32_t>(*mode)); break; } @@ -373,7 +409,7 @@ int PipelineHandlerUVC::processControls(UVCCameraData *data, Request *request) ControlList controls(data->video_->controls()); for (const auto &[id, value] : request->controls()) - processControl(&controls, id, value); + processControl(data, &controls, id, value); for (const auto &ctrl : controls) LOG(UVC, Debug) @@ -723,25 +759,52 @@ void UVCCameraData::addControl(uint32_t cid, const ControlInfo &v4l2Info, * ExposureTimeModeManual = { V4L2_EXPOSURE_MANUAL, * V4L2_EXPOSURE_SHUTTER_PRIORITY } */ - std::array<int32_t, 2> values{}; - - auto it = std::find_if(v4l2Values.begin(), v4l2Values.end(), - [&](const ControlValue &val) { - return (val.get<int32_t>() == V4L2_EXPOSURE_APERTURE_PRIORITY || - val.get<int32_t>() == V4L2_EXPOSURE_AUTO) ? true : false; - }); - if (it != v4l2Values.end()) - values.back() = static_cast<int32_t>(controls::ExposureTimeModeAuto); - - it = std::find_if(v4l2Values.begin(), v4l2Values.end(), - [&](const ControlValue &val) { - return (val.get<int32_t>() == V4L2_EXPOSURE_SHUTTER_PRIORITY || - val.get<int32_t>() == V4L2_EXPOSURE_MANUAL) ? true : false; - }); - if (it != v4l2Values.end()) - values.back() = static_cast<int32_t>(controls::ExposureTimeModeManual); - - info = ControlInfo{Span<int32_t>{values}, values[0]}; + + std::bitset< + std::max(V4L2_EXPOSURE_AUTO, + std::max(V4L2_EXPOSURE_APERTURE_PRIORITY, + std::max(V4L2_EXPOSURE_MANUAL, + V4L2_EXPOSURE_SHUTTER_PRIORITY))) + 1 + > exposureModes; + std::optional<controls::ExposureTimeModeEnum> lcDef; + + for (const ControlValue &value : v4l2Values) { + const auto x = value.get<int32_t>(); + + if (0 <= x && static_cast<std::size_t>(x) < exposureModes.size()) { + exposureModes[x] = true; + + if (x == def) + lcDef = v4l2ToExposureMode(x); + } + } + + if (exposureModes[V4L2_EXPOSURE_AUTO]) + autoExposureMode_ = V4L2_EXPOSURE_AUTO; + else if (exposureModes[V4L2_EXPOSURE_APERTURE_PRIORITY]) + autoExposureMode_ = V4L2_EXPOSURE_APERTURE_PRIORITY; + + if (exposureModes[V4L2_EXPOSURE_SHUTTER_PRIORITY]) + manualExposureMode_ = V4L2_EXPOSURE_SHUTTER_PRIORITY; + else if (exposureModes[V4L2_EXPOSURE_MANUAL]) + manualExposureMode_ = V4L2_EXPOSURE_MANUAL; + + std::array<ControlValue, 2> values; + std::size_t count = 0; + + if (autoExposureMode_) + values[count++] = controls::ExposureTimeModeAuto; + + if (manualExposureMode_) + values[count++] = controls::ExposureTimeModeManual; + + if (count == 0) + return; + + info = ControlInfo{ + Span<const ControlValue>{ values.data(), count }, + !lcDef ? values.front() : *lcDef, + }; break; } case V4L2_CID_EXPOSURE_ABSOLUTE: diff --git a/src/libcamera/software_isp/software_isp.cpp b/src/libcamera/software_isp/software_isp.cpp index 4a74dcb6..28e2a360 100644 --- a/src/libcamera/software_isp/software_isp.cpp +++ b/src/libcamera/software_isp/software_isp.cpp @@ -56,6 +56,11 @@ LOG_DEFINE_CATEGORY(SoftwareIsp) */ /** + * \var SoftwareIsp::metadataReady + * \brief A signal emitted when the metadata for IPA is ready + */ + +/** * \var SoftwareIsp::setSensorControls * \brief A signal emitted when the values to write to the sensor controls are * ready @@ -128,12 +133,20 @@ SoftwareIsp::SoftwareIsp(PipelineHandler *pipe, const CameraSensor *sensor, std::string ipaTuningFile = ipa_->configurationFile(sensor->model() + ".yaml", "uncalibrated.yaml"); - int ret = ipa_->init(IPASettings{ ipaTuningFile, sensor->model() }, - debayer_->getStatsFD(), - sharedParams_.fd(), - sensor->controls(), - ipaControls, - &ccmEnabled_); + IPACameraSensorInfo sensorInfo{}; + int ret = sensor->sensorInfo(&sensorInfo); + if (ret) { + LOG(SoftwareIsp, Error) << "Camera sensor information not available"; + return; + } + + ret = ipa_->init(IPASettings{ ipaTuningFile, sensor->model() }, + debayer_->getStatsFD(), + sharedParams_.fd(), + sensorInfo, + sensor->controls(), + ipaControls, + &ccmEnabled_); if (ret) { LOG(SoftwareIsp, Error) << "IPA init failed"; debayer_.reset(); @@ -141,6 +154,10 @@ SoftwareIsp::SoftwareIsp(PipelineHandler *pipe, const CameraSensor *sensor, } ipa_->setIspParams.connect(this, &SoftwareIsp::saveIspParams); + ipa_->metadataReady.connect(this, + [this](uint32_t frame, const ControlList &metadata) { + metadataReady.emit(frame, metadata); + }); ipa_->setSensorControls.connect(this, &SoftwareIsp::setSensorCtrls); debayer_->moveToThread(&ispWorkerThread_); diff --git a/src/libcamera/v4l2_device.cpp b/src/libcamera/v4l2_device.cpp index 2f65a43a..0db92c19 100644 --- a/src/libcamera/v4l2_device.cpp +++ b/src/libcamera/v4l2_device.cpp @@ -450,6 +450,28 @@ std::string V4L2Device::devicePath() const } /** + * \brief Check if frame start event is supported + * + * Due to limitations in the kernel API, this function may disable the frame + * start event as a side effect. It should only be called during initialization, + * before enabling the frame start event with setFrameStartEnabled(). + * + * \return True if frame start event is supported, false otherwise + */ +bool V4L2Device::supportsFrameStartEvent() +{ + struct v4l2_event_subscription event{}; + event.type = V4L2_EVENT_FRAME_SYNC; + + int ret = ioctl(VIDIOC_SUBSCRIBE_EVENT, &event); + if (ret) + return false; + + ioctl(VIDIOC_UNSUBSCRIBE_EVENT, &event); + return true; +} + +/** * \brief Enable or disable frame start event notification * \param[in] enable True to enable frame start events, false to disable them * diff --git a/src/py/libcamera/py_camera_manager.h b/src/py/libcamera/py_camera_manager.h index 3574db23..af69b915 100644 --- a/src/py/libcamera/py_camera_manager.h +++ b/src/py/libcamera/py_camera_manager.h @@ -20,7 +20,7 @@ public: ~PyCameraManager(); pybind11::list cameras(); - std::shared_ptr<Camera> get(const std::string &name) { return cameraManager_->get(name); } + std::shared_ptr<Camera> get(std::string_view name) { return cameraManager_->get(name); } static const std::string &version() { return CameraManager::version(); } |