summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/libcamera/pipeline/rpi/common/pipeline_base.cpp66
-rw-r--r--src/libcamera/pipeline/rpi/common/pipeline_base.h4
-rw-r--r--src/libcamera/pipeline/rpi/vc4/vc4.cpp31
3 files changed, 87 insertions, 14 deletions
diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp
index 51fa1bbf..40630ddf 100644
--- a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp
+++ b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp
@@ -180,6 +180,15 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
if (config_.empty())
return Invalid;
+ /*
+ * Make sure that if a sensor configuration has been requested it
+ * is valid.
+ */
+ if (sensorConfig && !sensorConfig->isValid()) {
+ LOG(RPI, Error) << "Invalid sensor configuration request";
+ return Invalid;
+ }
+
status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace);
/*
@@ -207,19 +216,43 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
std::sort(outStreams.begin(), outStreams.end(),
[](auto &l, auto &r) { return l.cfg->size > r.cfg->size; });
- /* Compute the sensor configuration. */
- unsigned int bitDepth = defaultRawBitDepth;
- if (!rawStreams.empty()) {
+ /* Compute the sensor's format then do any platform specific fixups. */
+ unsigned int bitDepth;
+ Size sensorSize;
+
+ if (sensorConfig) {
+ /* Use the application provided sensor configuration. */
+ bitDepth = sensorConfig->bitDepth;
+ sensorSize = sensorConfig->outputSize;
+ } else if (!rawStreams.empty()) {
+ /* Use the RAW stream format and size. */
BayerFormat bayerFormat = BayerFormat::fromPixelFormat(rawStreams[0].cfg->pixelFormat);
bitDepth = bayerFormat.bitDepth;
+ sensorSize = rawStreams[0].cfg->size;
+ } else {
+ bitDepth = defaultRawBitDepth;
+ sensorSize = outStreams[0].cfg->size;
}
- sensorFormat_ = data_->findBestFormat(rawStreams.empty() ? outStreams[0].cfg->size
- : rawStreams[0].cfg->size,
- bitDepth);
+ sensorFormat_ = data_->findBestFormat(sensorSize, bitDepth);
+
+ /*
+ * If a sensor configuration has been requested, it should apply
+ * without modifications.
+ */
+ if (sensorConfig) {
+ BayerFormat bayer = BayerFormat::fromMbusCode(sensorFormat_.mbus_code);
+
+ if (bayer.bitDepth != sensorConfig->bitDepth ||
+ sensorFormat_.size != sensorConfig->outputSize) {
+ LOG(RPI, Error) << "Invalid sensor configuration: "
+ << "bitDepth/size mismatch";
+ return Invalid;
+ }
+ }
/* Do any platform specific fixups. */
- status = data_->platformValidate(rawStreams, outStreams);
+ status = data_->platformValidate(this, rawStreams, outStreams);
if (status == Invalid)
return Invalid;
@@ -467,12 +500,25 @@ int PipelineHandlerBase::configure(Camera *camera, CameraConfiguration *config)
std::sort(ispStreams.begin(), ispStreams.end(),
[](auto &l, auto &r) { return l.cfg->size > r.cfg->size; });
- /* Apply the format on the sensor with any cached transform. */
+ /*
+ * Apply the format on the sensor with any cached transform.
+ *
+ * If the application has provided a sensor configuration apply it
+ * instead of just applying a format.
+ */
const RPiCameraConfiguration *rpiConfig =
static_cast<const RPiCameraConfiguration *>(config);
- V4L2SubdeviceFormat sensorFormat = rpiConfig->sensorFormat_;
+ V4L2SubdeviceFormat sensorFormat;
- ret = data->sensor_->setFormat(&sensorFormat, rpiConfig->combinedTransform_);
+ if (rpiConfig->sensorConfig) {
+ ret = data->sensor_->applyConfiguration(*rpiConfig->sensorConfig,
+ rpiConfig->combinedTransform_,
+ &sensorFormat);
+ } else {
+ sensorFormat = rpiConfig->sensorFormat_;
+ ret = data->sensor_->setFormat(&sensorFormat,
+ rpiConfig->combinedTransform_);
+ }
if (ret)
return ret;
diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.h b/src/libcamera/pipeline/rpi/common/pipeline_base.h
index dbabc61e..81b2b7d2 100644
--- a/src/libcamera/pipeline/rpi/common/pipeline_base.h
+++ b/src/libcamera/pipeline/rpi/common/pipeline_base.h
@@ -42,6 +42,7 @@ namespace RPi {
/* Map of mbus codes to supported sizes reported by the sensor. */
using SensorFormats = std::map<unsigned int, std::vector<Size>>;
+class RPiCameraConfiguration;
class CameraData : public Camera::Private
{
public:
@@ -72,7 +73,8 @@ public:
V4L2VideoDevice *dev;
};
- virtual CameraConfiguration::Status platformValidate(std::vector<StreamParams> &rawStreams,
+ virtual CameraConfiguration::Status platformValidate(RPiCameraConfiguration *rpiConfig,
+ std::vector<StreamParams> &rawStreams,
std::vector<StreamParams> &outStreams) const = 0;
virtual int platformConfigure(const V4L2SubdeviceFormat &sensorFormat,
std::optional<BayerFormat::Packing> packing,
diff --git a/src/libcamera/pipeline/rpi/vc4/vc4.cpp b/src/libcamera/pipeline/rpi/vc4/vc4.cpp
index 018cf488..46504636 100644
--- a/src/libcamera/pipeline/rpi/vc4/vc4.cpp
+++ b/src/libcamera/pipeline/rpi/vc4/vc4.cpp
@@ -65,7 +65,8 @@ public:
{
}
- CameraConfiguration::Status platformValidate(std::vector<StreamParams> &rawStreams,
+ CameraConfiguration::Status platformValidate(RPi::RPiCameraConfiguration *rpiConfig,
+ std::vector<StreamParams> &rawStreams,
std::vector<StreamParams> &outStreams) const override;
int platformPipelineConfigure(const std::unique_ptr<YamlObject> &root) override;
@@ -394,7 +395,8 @@ int PipelineHandlerVc4::platformRegister(std::unique_ptr<RPi::CameraData> &camer
return 0;
}
-CameraConfiguration::Status Vc4CameraData::platformValidate(std::vector<StreamParams> &rawStreams,
+CameraConfiguration::Status Vc4CameraData::platformValidate(RPi::RPiCameraConfiguration *rpiConfig,
+ std::vector<StreamParams> &rawStreams,
std::vector<StreamParams> &outStreams) const
{
CameraConfiguration::Status status = CameraConfiguration::Status::Valid;
@@ -405,9 +407,27 @@ CameraConfiguration::Status Vc4CameraData::platformValidate(std::vector<StreamPa
return CameraConfiguration::Status::Invalid;
}
- if (!rawStreams.empty())
+ if (!rawStreams.empty()) {
rawStreams[0].dev = unicam_[Unicam::Image].dev();
+ /* Adjust the RAW stream to match the computed sensor format. */
+ StreamConfiguration *rawStream = rawStreams[0].cfg;
+ BayerFormat rawBayer = BayerFormat::fromMbusCode(rpiConfig->sensorFormat_.mbus_code);
+
+ /* Handle flips to make sure to match the RAW stream format. */
+ if (flipsAlterBayerOrder_)
+ rawBayer = rawBayer.transform(rpiConfig->combinedTransform_);
+ PixelFormat rawFormat = rawBayer.toPixelFormat();
+
+ if (rawStream->pixelFormat != rawFormat ||
+ rawStream->size != rpiConfig->sensorFormat_.size) {
+ rawStream->pixelFormat = rawFormat;
+ rawStream->size = rpiConfig->sensorFormat_.size;
+
+ status = CameraConfiguration::Adjusted;
+ }
+ }
+
/*
* For the two ISP outputs, one stream must be equal or smaller than the
* other in all dimensions.
@@ -417,6 +437,11 @@ CameraConfiguration::Status Vc4CameraData::platformValidate(std::vector<StreamPa
for (unsigned int i = 0; i < outStreams.size(); i++) {
Size size;
+ /*
+ * \todo Should we warn if upscaling, as it reduces the image
+ * quality and is usually undesired ?
+ */
+
size.width = std::min(outStreams[i].cfg->size.width,
outStreams[0].cfg->size.width);
size.height = std::min(outStreams[i].cfg->size.height,