summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNaushir Patuck <naush@raspberrypi.com>2021-03-02 15:11:08 +0000
committerLaurent Pinchart <laurent.pinchart@ideasonboard.com>2021-03-04 04:14:54 +0200
commitcd07b604baa62ab89265d54afeac968322a98a34 (patch)
treed02b60667e65172df822c3f60c7526127c754834
parentf490a87fd339fc7443f5d8467ba56a35c750a5f7 (diff)
pipeline: ipa: raspberrypi: Pass exposure/gain values to IPA though controls
When running with sensors that had no embedded data, the pipeline handler would fill a dummy embedded data buffer with gain/exposure values, and pass this buffer to the IPA together with the bayer buffer. The IPA would extract these values for use in the controller algorithms. Rework this logic entirely by having a new RPiCameraData::BayerFrame queue to replace the existing bayer queue. In addition to storing the FrameBuffer pointer, this also stores all the controls tracked by DelayedControls for that frame in a ControlList. This includes include exposure and gain values. On signalling RPi::IPA_EVENT_SIGNAL_ISP_PREPARE IPA event, the pipeline handler now passes this ControlList from the RPiCameraData::BayerFrame queue. The IPA now extracts the gain and exposure values from the ControlList instead of using RPiController::MdParserRPi to parse the embedded data buffer. Signed-off-by: Naushir Patuck <naush@raspberrypi.com> Tested-by: David Plowman <david.plowman@raspberrypi.com> Reviewed-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
-rw-r--r--include/libcamera/ipa/raspberrypi.mojom2
-rw-r--r--src/ipa/raspberrypi/raspberrypi.cpp132
-rw-r--r--src/libcamera/pipeline/raspberrypi/raspberrypi.cpp62
3 files changed, 111 insertions, 85 deletions
diff --git a/include/libcamera/ipa/raspberrypi.mojom b/include/libcamera/ipa/raspberrypi.mojom
index 5a27b1e4..f733a2cd 100644
--- a/include/libcamera/ipa/raspberrypi.mojom
+++ b/include/libcamera/ipa/raspberrypi.mojom
@@ -29,6 +29,8 @@ struct SensorConfig {
struct ISPConfig {
uint32 embeddedBufferId;
uint32 bayerBufferId;
+ bool embeddedBufferPresent;
+ ControlList controls;
};
struct ConfigInput {
diff --git a/src/ipa/raspberrypi/raspberrypi.cpp b/src/ipa/raspberrypi/raspberrypi.cpp
index 1a3716c3..6348d071 100644
--- a/src/ipa/raspberrypi/raspberrypi.cpp
+++ b/src/ipa/raspberrypi/raspberrypi.cpp
@@ -1,6 +1,6 @@
/* SPDX-License-Identifier: BSD-2-Clause */
/*
- * Copyright (C) 2019-2020, Raspberry Pi (Trading) Ltd.
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Ltd.
*
* rpi.cpp - Raspberry Pi Image Processing Algorithms
*/
@@ -101,9 +101,11 @@ private:
bool validateIspControls();
void queueRequest(const ControlList &controls);
void returnEmbeddedBuffer(unsigned int bufferId);
- void prepareISP(unsigned int bufferId);
+ void prepareISP(const ipa::RPi::ISPConfig &data);
void reportMetadata();
bool parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &deviceStatus);
+ void fillDeviceStatus(uint32_t exposureLines, uint32_t gainCode,
+ struct DeviceStatus &deviceStatus);
void processStats(unsigned int bufferId);
void applyFrameDurations(double minFrameDuration, double maxFrameDuration);
void applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls);
@@ -447,7 +449,7 @@ void IPARPi::signalIspPrepare(const ipa::RPi::ISPConfig &data)
* avoid running the control algos for a few frames in case
* they are "unreliable".
*/
- prepareISP(data.embeddedBufferId);
+ prepareISP(data);
frameCount_++;
/* Ready to push the input buffer into the ISP. */
@@ -913,67 +915,84 @@ void IPARPi::returnEmbeddedBuffer(unsigned int bufferId)
embeddedComplete.emit(bufferId & ipa::RPi::MaskID);
}
-void IPARPi::prepareISP(unsigned int bufferId)
+void IPARPi::prepareISP(const ipa::RPi::ISPConfig &data)
{
struct DeviceStatus deviceStatus = {};
- bool success = parseEmbeddedData(bufferId, deviceStatus);
+ bool success = false;
- /* Done with embedded data now, return to pipeline handler asap. */
- returnEmbeddedBuffer(bufferId);
+ if (data.embeddedBufferPresent) {
+ /*
+ * Pipeline handler has supplied us with an embedded data buffer,
+ * so parse it and extract the exposure and gain.
+ */
+ success = parseEmbeddedData(data.embeddedBufferId, deviceStatus);
- if (success) {
- ControlList ctrls(ispCtrls_);
+ /* Done with embedded data now, return to pipeline handler asap. */
+ returnEmbeddedBuffer(data.embeddedBufferId);
+ }
- rpiMetadata_.Clear();
- rpiMetadata_.Set("device.status", deviceStatus);
- controller_.Prepare(&rpiMetadata_);
+ if (!success) {
+ /*
+ * Pipeline handler has not supplied an embedded data buffer,
+ * or embedded data buffer parsing has failed for some reason,
+ * so pull the exposure and gain values from the control list.
+ */
+ int32_t exposureLines = data.controls.get(V4L2_CID_EXPOSURE).get<int32_t>();
+ int32_t gainCode = data.controls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>();
+ fillDeviceStatus(exposureLines, gainCode, deviceStatus);
+ }
- /* Lock the metadata buffer to avoid constant locks/unlocks. */
- std::unique_lock<RPiController::Metadata> lock(rpiMetadata_);
+ ControlList ctrls(ispCtrls_);
- AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
- if (awbStatus)
- applyAWB(awbStatus, ctrls);
+ rpiMetadata_.Clear();
+ rpiMetadata_.Set("device.status", deviceStatus);
+ controller_.Prepare(&rpiMetadata_);
- CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
- if (ccmStatus)
- applyCCM(ccmStatus, ctrls);
+ /* Lock the metadata buffer to avoid constant locks/unlocks. */
+ std::unique_lock<RPiController::Metadata> lock(rpiMetadata_);
- AgcStatus *dgStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
- if (dgStatus)
- applyDG(dgStatus, ctrls);
+ AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
+ if (awbStatus)
+ applyAWB(awbStatus, ctrls);
- AlscStatus *lsStatus = rpiMetadata_.GetLocked<AlscStatus>("alsc.status");
- if (lsStatus)
- applyLS(lsStatus, ctrls);
+ CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
+ if (ccmStatus)
+ applyCCM(ccmStatus, ctrls);
- ContrastStatus *contrastStatus = rpiMetadata_.GetLocked<ContrastStatus>("contrast.status");
- if (contrastStatus)
- applyGamma(contrastStatus, ctrls);
+ AgcStatus *dgStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
+ if (dgStatus)
+ applyDG(dgStatus, ctrls);
- BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
- if (blackLevelStatus)
- applyBlackLevel(blackLevelStatus, ctrls);
+ AlscStatus *lsStatus = rpiMetadata_.GetLocked<AlscStatus>("alsc.status");
+ if (lsStatus)
+ applyLS(lsStatus, ctrls);
- GeqStatus *geqStatus = rpiMetadata_.GetLocked<GeqStatus>("geq.status");
- if (geqStatus)
- applyGEQ(geqStatus, ctrls);
+ ContrastStatus *contrastStatus = rpiMetadata_.GetLocked<ContrastStatus>("contrast.status");
+ if (contrastStatus)
+ applyGamma(contrastStatus, ctrls);
- DenoiseStatus *denoiseStatus = rpiMetadata_.GetLocked<DenoiseStatus>("denoise.status");
- if (denoiseStatus)
- applyDenoise(denoiseStatus, ctrls);
+ BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
+ if (blackLevelStatus)
+ applyBlackLevel(blackLevelStatus, ctrls);
- SharpenStatus *sharpenStatus = rpiMetadata_.GetLocked<SharpenStatus>("sharpen.status");
- if (sharpenStatus)
- applySharpen(sharpenStatus, ctrls);
+ GeqStatus *geqStatus = rpiMetadata_.GetLocked<GeqStatus>("geq.status");
+ if (geqStatus)
+ applyGEQ(geqStatus, ctrls);
- DpcStatus *dpcStatus = rpiMetadata_.GetLocked<DpcStatus>("dpc.status");
- if (dpcStatus)
- applyDPC(dpcStatus, ctrls);
+ DenoiseStatus *denoiseStatus = rpiMetadata_.GetLocked<DenoiseStatus>("denoise.status");
+ if (denoiseStatus)
+ applyDenoise(denoiseStatus, ctrls);
- if (!ctrls.empty())
- setIspControls.emit(ctrls);
- }
+ SharpenStatus *sharpenStatus = rpiMetadata_.GetLocked<SharpenStatus>("sharpen.status");
+ if (sharpenStatus)
+ applySharpen(sharpenStatus, ctrls);
+
+ DpcStatus *dpcStatus = rpiMetadata_.GetLocked<DpcStatus>("dpc.status");
+ if (dpcStatus)
+ applyDPC(dpcStatus, ctrls);
+
+ if (!ctrls.empty())
+ setIspControls.emit(ctrls);
}
bool IPARPi::parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &deviceStatus)
@@ -989,6 +1008,7 @@ bool IPARPi::parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &devic
RPiController::MdParser::Status status = helper_->Parser().Parse(mem.data());
if (status != RPiController::MdParser::Status::OK) {
LOG(IPARPI, Error) << "Embedded Buffer parsing failed, error " << status;
+ return false;
} else {
uint32_t exposureLines, gainCode;
if (helper_->Parser().GetExposureLines(exposureLines) != RPiController::MdParser::Status::OK) {
@@ -996,21 +1016,29 @@ bool IPARPi::parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &devic
return false;
}
- deviceStatus.shutter_speed = helper_->Exposure(exposureLines);
if (helper_->Parser().GetGainCode(gainCode) != RPiController::MdParser::Status::OK) {
LOG(IPARPI, Error) << "Gain failed";
return false;
}
- deviceStatus.analogue_gain = helper_->Gain(gainCode);
- LOG(IPARPI, Debug) << "Metadata - Exposure : "
- << deviceStatus.shutter_speed << " Gain : "
- << deviceStatus.analogue_gain;
+ fillDeviceStatus(exposureLines, gainCode, deviceStatus);
}
return true;
}
+void IPARPi::fillDeviceStatus(uint32_t exposureLines, uint32_t gainCode,
+ struct DeviceStatus &deviceStatus)
+{
+ deviceStatus.shutter_speed = helper_->Exposure(exposureLines);
+ deviceStatus.analogue_gain = helper_->Gain(gainCode);
+
+ LOG(IPARPI, Debug) << "Metadata - Exposure : "
+ << deviceStatus.shutter_speed
+ << " Gain : "
+ << deviceStatus.analogue_gain;
+}
+
void IPARPi::processStats(unsigned int bufferId)
{
auto it = buffers_.find(bufferId);
diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
index 46b7c6db..218da71f 100644
--- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
+++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
@@ -1,6 +1,6 @@
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
- * Copyright (C) 2019-2020, Raspberry Pi (Trading) Ltd.
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Ltd.
*
* raspberrypi.cpp - Pipeline handler for Raspberry Pi devices
*/
@@ -197,7 +197,13 @@ public:
*/
enum class State { Stopped, Idle, Busy, IpaComplete };
State state_;
- std::queue<FrameBuffer *> bayerQueue_;
+
+ struct BayerFrame {
+ FrameBuffer *buffer;
+ ControlList controls;
+ };
+
+ std::queue<BayerFrame> bayerQueue_;
std::queue<FrameBuffer *> embeddedQueue_;
std::deque<Request *> requestQueue_;
@@ -222,7 +228,7 @@ public:
private:
void checkRequestCompleted();
void tryRunPipeline();
- bool findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *&embeddedBuffer);
+ bool findMatchingBuffers(BayerFrame &bayerFrame, FrameBuffer *&embeddedBuffer);
unsigned int ispOutputCount_;
};
@@ -1356,7 +1362,7 @@ void RPiCameraData::setIspControls(const ControlList &controls)
void RPiCameraData::setDelayedControls(const ControlList &controls)
{
if (!delayedCtrls_->push(controls))
- LOG(RPI, Error) << "V4L2 staggered set failed";
+ LOG(RPI, Error) << "V4L2 DelayedControl set failed";
handleState();
}
@@ -1384,29 +1390,14 @@ void RPiCameraData::unicamBufferDequeue(FrameBuffer *buffer)
<< ", timestamp: " << buffer->metadata().timestamp;
if (stream == &unicam_[Unicam::Image]) {
- bayerQueue_.push(buffer);
- } else {
- embeddedQueue_.push(buffer);
-
- ControlList ctrl = delayedCtrls_->get(buffer->metadata().sequence);
-
/*
- * Sensor metadata is unavailable, so put the expected ctrl
- * values (accounting for the staggered delays) into the empty
- * metadata buffer.
+ * Lookup the sensor controls used for this frame sequence from
+ * DelayedControl and queue them along with the frame buffer.
*/
- if (!sensorMetadata_) {
- unsigned int bufferId = unicam_[Unicam::Embedded].getBufferId(buffer);
- auto it = mappedEmbeddedBuffers_.find(bufferId);
- if (it != mappedEmbeddedBuffers_.end()) {
- uint32_t *mem = reinterpret_cast<uint32_t *>(it->second.maps()[0].data());
- mem[0] = ctrl.get(V4L2_CID_EXPOSURE).get<int32_t>();
- mem[1] = ctrl.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>();
- } else {
- LOG(RPI, Warning) << "Failed to find embedded buffer "
- << bufferId;
- }
- }
+ ControlList ctrl = delayedCtrls_->get(buffer->metadata().sequence);
+ bayerQueue_.push({ buffer, std::move(ctrl) });
+ } else {
+ embeddedQueue_.push(buffer);
}
handleState();
@@ -1657,14 +1648,15 @@ void RPiCameraData::applyScalerCrop(const ControlList &controls)
void RPiCameraData::tryRunPipeline()
{
- FrameBuffer *bayerBuffer, *embeddedBuffer;
+ FrameBuffer *embeddedBuffer;
+ BayerFrame bayerFrame;
/* If any of our request or buffer queues are empty, we cannot proceed. */
if (state_ != State::Idle || requestQueue_.empty() ||
bayerQueue_.empty() || embeddedQueue_.empty())
return;
- if (!findMatchingBuffers(bayerBuffer, embeddedBuffer))
+ if (!findMatchingBuffers(bayerFrame, embeddedBuffer))
return;
/* Take the first request from the queue and action the IPA. */
@@ -1683,7 +1675,7 @@ void RPiCameraData::tryRunPipeline()
/* Set our state to say the pipeline is active. */
state_ = State::Busy;
- unsigned int bayerId = unicam_[Unicam::Image].getBufferId(bayerBuffer);
+ unsigned int bayerId = unicam_[Unicam::Image].getBufferId(bayerFrame.buffer);
unsigned int embeddedId = unicam_[Unicam::Embedded].getBufferId(embeddedBuffer);
LOG(RPI, Debug) << "Signalling signalIspPrepare:"
@@ -1693,17 +1685,19 @@ void RPiCameraData::tryRunPipeline()
ipa::RPi::ISPConfig ispPrepare;
ispPrepare.embeddedBufferId = ipa::RPi::MaskEmbeddedData | embeddedId;
ispPrepare.bayerBufferId = ipa::RPi::MaskBayerData | bayerId;
+ ispPrepare.embeddedBufferPresent = sensorMetadata_;
+ ispPrepare.controls = std::move(bayerFrame.controls);
ipa_->signalIspPrepare(ispPrepare);
}
-bool RPiCameraData::findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *&embeddedBuffer)
+bool RPiCameraData::findMatchingBuffers(BayerFrame &bayerFrame, FrameBuffer *&embeddedBuffer)
{
unsigned int embeddedRequeueCount = 0, bayerRequeueCount = 0;
/* Loop until we find a matching bayer and embedded data buffer. */
while (!bayerQueue_.empty()) {
/* Start with the front of the bayer queue. */
- bayerBuffer = bayerQueue_.front();
+ FrameBuffer *bayerBuffer = bayerQueue_.front().buffer;
/*
* Find the embedded data buffer with a matching timestamp to pass to
@@ -1740,7 +1734,7 @@ bool RPiCameraData::findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *
* the front of the queue. This buffer is now orphaned, so requeue
* it back to the device.
*/
- unicam_[Unicam::Image].queueBuffer(bayerQueue_.front());
+ unicam_[Unicam::Image].queueBuffer(bayerQueue_.front().buffer);
bayerQueue_.pop();
bayerRequeueCount++;
LOG(RPI, Warning) << "Dropping unmatched input frame in stream "
@@ -1758,7 +1752,7 @@ bool RPiCameraData::findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *
LOG(RPI, Warning) << "Flushing bayer stream!";
while (!bayerQueue_.empty()) {
- unicam_[Unicam::Image].queueBuffer(bayerQueue_.front());
+ unicam_[Unicam::Image].queueBuffer(bayerQueue_.front().buffer);
bayerQueue_.pop();
}
flushedBuffers = true;
@@ -1791,8 +1785,10 @@ bool RPiCameraData::findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *
} else {
/*
* We have found a matching bayer and embedded data buffer, so
- * nothing more to do apart from popping the buffers from the queue.
+ * nothing more to do apart from assigning the bayer frame and
+ * popping the buffers from the queue.
*/
+ bayerFrame = std::move(bayerQueue_.front());
bayerQueue_.pop();
embeddedQueue_.pop();
return true;