summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/ipa/libipa/histogram.cpp42
-rw-r--r--src/libcamera/pipeline/rpi/pisp/pisp.cpp4
-rw-r--r--src/libcamera/pipeline/rpi/vc4/vc4.cpp4
-rw-r--r--src/libcamera/pipeline/simple/simple.cpp66
-rw-r--r--src/libcamera/v4l2_device.cpp22
5 files changed, 106 insertions, 32 deletions
diff --git a/src/ipa/libipa/histogram.cpp b/src/ipa/libipa/histogram.cpp
index 10e44b54..bcf26390 100644
--- a/src/ipa/libipa/histogram.cpp
+++ b/src/ipa/libipa/histogram.cpp
@@ -130,7 +130,8 @@ double Histogram::quantile(double q, uint32_t first, uint32_t last) const
if (cumulative_[first + 1] == cumulative_[first])
frac = 0;
else
- frac = (item - cumulative_[first]) / (cumulative_[first + 1] - cumulative_[first]);
+ frac = (q * total() - cumulative_[first])
+ / (cumulative_[first + 1] - cumulative_[first]);
return first + frac;
}
@@ -148,26 +149,37 @@ double Histogram::quantile(double q, uint32_t first, uint32_t last) const
double Histogram::interQuantileMean(double lowQuantile, double highQuantile) const
{
ASSERT(highQuantile > lowQuantile);
- /* Proportion of pixels which lies below lowQuantile */
- double lowPoint = quantile(lowQuantile);
- /* Proportion of pixels which lies below highQuantile */
- double highPoint = quantile(highQuantile, static_cast<uint32_t>(lowPoint));
- double sumBinFreq = 0, cumulFreq = 0;
-
- for (double p_next = floor(lowPoint) + 1.0;
- p_next <= ceil(highPoint);
- lowPoint = p_next, p_next += 1.0) {
- int bin = floor(lowPoint);
+
+ /* Proportion of pixels which lies below lowQuantile and highQuantile. */
+ const double lowPoint = quantile(lowQuantile);
+ const double highPoint = quantile(highQuantile, static_cast<uint32_t>(lowPoint));
+
+ double sumBinFreq = 0;
+ double cumulFreq = 0;
+
+ /*
+ * Calculate the mean pixel value between the low and high points by
+ * summing all the pixels between the two points, and dividing the sum
+ * by the number of pixels. Given the discrete nature of the histogram
+ * data, the sum of the pixels is approximated by accumulating the
+ * product of the bin values (calculated as the mid point of the bin) by
+ * the number of pixels they contain, for each bin in the internal.
+ */
+ for (unsigned bin = std::floor(lowPoint); bin < std::ceil(highPoint); bin++) {
+ const double lowBound = std::max<double>(bin, lowPoint);
+ const double highBound = std::min<double>(bin + 1, highPoint);
+
double freq = (cumulative_[bin + 1] - cumulative_[bin])
- * (std::min(p_next, highPoint) - lowPoint);
+ * (highBound - lowBound);
/* Accumulate weighted bin */
- sumBinFreq += bin * freq;
+ sumBinFreq += (highBound + lowBound) / 2 * freq;
+
/* Accumulate weights */
cumulFreq += freq;
}
- /* add 0.5 to give an average for bin mid-points */
- return sumBinFreq / cumulFreq + 0.5;
+
+ return sumBinFreq / cumulFreq;
}
} /* namespace ipa */
diff --git a/src/libcamera/pipeline/rpi/pisp/pisp.cpp b/src/libcamera/pipeline/rpi/pisp/pisp.cpp
index 42ca7c80..91e7f4c9 100644
--- a/src/libcamera/pipeline/rpi/pisp/pisp.cpp
+++ b/src/libcamera/pipeline/rpi/pisp/pisp.cpp
@@ -1350,9 +1350,9 @@ int PiSPCameraData::platformPipelineConfigure(const std::unique_ptr<YamlObject>
}
std::optional<std::string> target = (*root)["target"].get<std::string>();
- if (!target || *target != "pisp") {
+ if (target != "pisp") {
LOG(RPI, Error) << "Unexpected target reported: expected \"pisp\", got "
- << *target;
+ << (target ? target->c_str() : "(unknown)");
return -EINVAL;
}
diff --git a/src/libcamera/pipeline/rpi/vc4/vc4.cpp b/src/libcamera/pipeline/rpi/vc4/vc4.cpp
index fd8d84b1..fe910bdf 100644
--- a/src/libcamera/pipeline/rpi/vc4/vc4.cpp
+++ b/src/libcamera/pipeline/rpi/vc4/vc4.cpp
@@ -510,9 +510,9 @@ int Vc4CameraData::platformPipelineConfigure(const std::unique_ptr<YamlObject> &
}
std::optional<std::string> target = (*root)["target"].get<std::string>();
- if (!target || *target != "bcm2835") {
+ if (target != "bcm2835") {
LOG(RPI, Error) << "Unexpected target reported: expected \"bcm2835\", got "
- << *target;
+ << (target ? target->c_str() : "(unknown)");
return -EINVAL;
}
diff --git a/src/libcamera/pipeline/simple/simple.cpp b/src/libcamera/pipeline/simple/simple.cpp
index fd0ccdca..efb07051 100644
--- a/src/libcamera/pipeline/simple/simple.cpp
+++ b/src/libcamera/pipeline/simple/simple.cpp
@@ -327,6 +327,7 @@ public:
std::list<Entity> entities_;
std::unique_ptr<CameraSensor> sensor_;
V4L2VideoDevice *video_;
+ V4L2Subdevice *frameStartEmitter_;
std::vector<Configuration> configs_;
std::map<PixelFormat, std::vector<const Configuration *>> formats_;
@@ -541,6 +542,13 @@ SimpleCameraData::SimpleCameraData(SimplePipelineHandler *pipe,
if (!sensor_)
return;
+ const CameraSensorProperties::SensorDelays &delays = sensor_->sensorDelays();
+ std::unordered_map<uint32_t, DelayedControls::ControlParams> params = {
+ { V4L2_CID_ANALOGUE_GAIN, { delays.gainDelay, false } },
+ { V4L2_CID_EXPOSURE, { delays.exposureDelay, false } },
+ };
+ delayedCtrls_ = std::make_unique<DelayedControls>(sensor_->device(), params);
+
LOG(SimplePipeline, Debug)
<< "Found pipeline: "
<< utils::join(entities_, " -> ",
@@ -633,6 +641,20 @@ int SimpleCameraData::init()
properties_ = sensor_->properties();
+ /* Find the first subdev that can generate a frame start signal, if any. */
+ frameStartEmitter_ = nullptr;
+ for (const Entity &entity : entities_) {
+ V4L2Subdevice *sd = pipe->subdev(entity.entity);
+ if (!sd || !sd->supportsFrameStartEvent())
+ continue;
+
+ LOG(SimplePipeline, Debug)
+ << "Using frameStart signal from '"
+ << entity.entity->name() << "'";
+ frameStartEmitter_ = sd;
+ break;
+ }
+
return 0;
}
@@ -983,8 +1005,18 @@ void SimpleCameraData::metadataReady(uint32_t frame, const ControlList &metadata
void SimpleCameraData::setSensorControls(const ControlList &sensorControls)
{
delayedCtrls_->push(sensorControls);
- ControlList ctrls(sensorControls);
- sensor_->setControls(&ctrls);
+ /*
+ * Directly apply controls now if there is no frameStart signal.
+ *
+ * \todo Applying controls directly not only increases the risk of
+ * applying them to the wrong frame (or across a frame boundary),
+ * but it also bypasses delayedCtrls_, creating AGC regulation issues.
+ * Both problems should be fixed.
+ */
+ if (!frameStartEmitter_) {
+ ControlList ctrls(sensorControls);
+ sensor_->setControls(&ctrls);
+ }
}
/* Retrieve all source pads connected to a sink pad through active routes. */
@@ -1363,17 +1395,6 @@ int SimplePipelineHandler::configure(Camera *camera, CameraConfiguration *c)
if (outputCfgs.empty())
return 0;
- const CameraSensorProperties::SensorDelays &delays = data->sensor_->sensorDelays();
- std::unordered_map<uint32_t, DelayedControls::ControlParams> params = {
- { V4L2_CID_ANALOGUE_GAIN, { delays.gainDelay, false } },
- { V4L2_CID_EXPOSURE, { delays.exposureDelay, false } },
- };
- data->delayedCtrls_ =
- std::make_unique<DelayedControls>(data->sensor_->device(),
- params);
- data->video_->frameStart.connect(data->delayedCtrls_.get(),
- &DelayedControls::applyControls);
-
StreamConfiguration inputCfg;
inputCfg.pixelFormat = pipeConfig->captureFormat;
inputCfg.size = pipeConfig->captureSize;
@@ -1411,6 +1432,7 @@ int SimplePipelineHandler::start(Camera *camera, [[maybe_unused]] const ControlL
{
SimpleCameraData *data = cameraData(camera);
V4L2VideoDevice *video = data->video_;
+ V4L2Subdevice *frameStartEmitter = data->frameStartEmitter_;
int ret;
const MediaPad *pad = acquirePipeline(data);
@@ -1440,6 +1462,17 @@ int SimplePipelineHandler::start(Camera *camera, [[maybe_unused]] const ControlL
video->bufferReady.connect(data, &SimpleCameraData::imageBufferReady);
+ data->delayedCtrls_->reset();
+ if (frameStartEmitter) {
+ ret = frameStartEmitter->setFrameStartEnabled(true);
+ if (ret) {
+ stop(camera);
+ return ret;
+ }
+ frameStartEmitter->frameStart.connect(data->delayedCtrls_.get(),
+ &DelayedControls::applyControls);
+ }
+
ret = video->streamOn();
if (ret < 0) {
stop(camera);
@@ -1471,6 +1504,13 @@ void SimplePipelineHandler::stopDevice(Camera *camera)
{
SimpleCameraData *data = cameraData(camera);
V4L2VideoDevice *video = data->video_;
+ V4L2Subdevice *frameStartEmitter = data->frameStartEmitter_;
+
+ if (frameStartEmitter) {
+ frameStartEmitter->setFrameStartEnabled(false);
+ frameStartEmitter->frameStart.disconnect(data->delayedCtrls_.get(),
+ &DelayedControls::applyControls);
+ }
if (data->useConversion_) {
if (data->converter_)
diff --git a/src/libcamera/v4l2_device.cpp b/src/libcamera/v4l2_device.cpp
index 2f65a43a..0db92c19 100644
--- a/src/libcamera/v4l2_device.cpp
+++ b/src/libcamera/v4l2_device.cpp
@@ -450,6 +450,28 @@ std::string V4L2Device::devicePath() const
}
/**
+ * \brief Check if frame start event is supported
+ *
+ * Due to limitations in the kernel API, this function may disable the frame
+ * start event as a side effect. It should only be called during initialization,
+ * before enabling the frame start event with setFrameStartEnabled().
+ *
+ * \return True if frame start event is supported, false otherwise
+ */
+bool V4L2Device::supportsFrameStartEvent()
+{
+ struct v4l2_event_subscription event{};
+ event.type = V4L2_EVENT_FRAME_SYNC;
+
+ int ret = ioctl(VIDIOC_SUBSCRIBE_EVENT, &event);
+ if (ret)
+ return false;
+
+ ioctl(VIDIOC_UNSUBSCRIBE_EVENT, &event);
+ return true;
+}
+
+/**
* \brief Enable or disable frame start event notification
* \param[in] enable True to enable frame start events, false to disable them
*