summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/libcamera/pipeline/raspberrypi/raspberrypi.cpp116
1 files changed, 115 insertions, 1 deletions
diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
index c912fd16..4323c98a 100644
--- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
+++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
@@ -312,6 +312,7 @@ class RPiCameraConfiguration : public CameraConfiguration
public:
RPiCameraConfiguration(const RPiCameraData *data);
+ CameraConfiguration::Status validateColorSpaces(ColorSpaceFlags flags);
Status validate() override;
/* Cache the combinedTransform_ that will be applied to the sensor */
@@ -319,6 +320,14 @@ public:
private:
const RPiCameraData *data_;
+
+ /*
+ * Store the colour spaces that all our streams will have. RGB format streams
+ * will have the same colorspace as YUV streams, with YCbCr field cleared and
+ * range set to full.
+ */
+ std::optional<ColorSpace> yuvColorSpace_;
+ std::optional<ColorSpace> rgbColorSpace_;
};
class PipelineHandlerRPi : public PipelineHandler
@@ -359,6 +368,105 @@ RPiCameraConfiguration::RPiCameraConfiguration(const RPiCameraData *data)
{
}
+static const std::vector<ColorSpace> validColorSpaces = {
+ ColorSpace::Sycc,
+ ColorSpace::Smpte170m,
+ ColorSpace::Rec709
+};
+
+static std::optional<ColorSpace> findValidColorSpace(const ColorSpace &colourSpace)
+{
+ for (auto cs : validColorSpaces) {
+ if (colourSpace.primaries == cs.primaries &&
+ colourSpace.transferFunction == cs.transferFunction)
+ return cs;
+ }
+
+ return std::nullopt;
+}
+
+static bool isRgb(const PixelFormat &pixFmt)
+{
+ const PixelFormatInfo &info = PixelFormatInfo::info(pixFmt);
+ return info.colourEncoding == PixelFormatInfo::ColourEncodingRGB;
+}
+
+static bool isYuv(const PixelFormat &pixFmt)
+{
+ /* The code below would return true for raw mono streams, so weed those out first. */
+ if (isRaw(pixFmt))
+ return false;
+
+ const PixelFormatInfo &info = PixelFormatInfo::info(pixFmt);
+ return info.colourEncoding == PixelFormatInfo::ColourEncodingYUV;
+}
+
+/*
+ * Raspberry Pi drivers expect the following colour spaces:
+ * - V4L2_COLORSPACE_RAW for raw streams.
+ * - One of V4L2_COLORSPACE_JPEG, V4L2_COLORSPACE_SMPTE170M, V4L2_COLORSPACE_REC709 for
+ * non-raw streams. Other fields such as transfer function, YCbCr encoding and
+ * quantisation are not used.
+ *
+ * The libcamera colour spaces that we wish to use corresponding to these are therefore:
+ * - ColorSpace::Raw for V4L2_COLORSPACE_RAW
+ * - ColorSpace::Sycc for V4L2_COLORSPACE_JPEG
+ * - ColorSpace::Smpte170m for V4L2_COLORSPACE_SMPTE170M
+ * - ColorSpace::Rec709 for V4L2_COLORSPACE_REC709
+ */
+
+CameraConfiguration::Status RPiCameraConfiguration::validateColorSpaces([[maybe_unused]] ColorSpaceFlags flags)
+{
+ Status status = Valid;
+ yuvColorSpace_.reset();
+
+ for (auto cfg : config_) {
+ /* First fix up raw streams to have the "raw" colour space. */
+ if (isRaw(cfg.pixelFormat)) {
+ /* If there was no value here, that doesn't count as "adjusted". */
+ if (cfg.colorSpace && cfg.colorSpace != ColorSpace::Raw) {
+ status = Adjusted;
+ cfg.colorSpace = ColorSpace::Raw;
+ }
+ continue;
+ }
+
+ /* Next we need to find our shared colour space. The first valid one will do. */
+ if (cfg.colorSpace && !yuvColorSpace_)
+ yuvColorSpace_ = findValidColorSpace(cfg.colorSpace.value());
+ }
+
+ /* If no colour space was given anywhere, choose sYCC. */
+ if (!yuvColorSpace_)
+ yuvColorSpace_ = ColorSpace::Sycc;
+
+ /* Note the version of this that any RGB streams will have to use. */
+ rgbColorSpace_ = yuvColorSpace_;
+ rgbColorSpace_->ycbcrEncoding = ColorSpace::YcbcrEncoding::None;
+ rgbColorSpace_->range = ColorSpace::Range::Full;
+
+ /* Go through the streams again and force everyone to the same colour space. */
+ for (auto cfg : config_) {
+ if (cfg.colorSpace == ColorSpace::Raw)
+ continue;
+
+ if (isYuv(cfg.pixelFormat) && cfg.colorSpace != yuvColorSpace_) {
+ /* Again, no value means "not adjusted". */
+ if (cfg.colorSpace)
+ status = Adjusted;
+ cfg.colorSpace = yuvColorSpace_;
+ }
+ if (isRgb(cfg.pixelFormat) && cfg.colorSpace != rgbColorSpace_) {
+ /* Be nice, and let the YUV version count as non-adjusted too. */
+ if (cfg.colorSpace && cfg.colorSpace != yuvColorSpace_)
+ status = Adjusted;
+ cfg.colorSpace = rgbColorSpace_;
+ }
+ }
+
+ return status;
+}
+
CameraConfiguration::Status RPiCameraConfiguration::validate()
{
Status status = Valid;
@@ -490,7 +598,8 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
V4L2DeviceFormat format;
format.fourcc = dev->toV4L2PixelFormat(cfg.pixelFormat);
format.size = cfg.size;
- format.colorSpace = cfg.colorSpace;
+ /* We want to send the associated YCbCr info through to the driver. */
+ format.colorSpace = yuvColorSpace_;
LOG(RPI, Debug)
<< "Try color space " << ColorSpace::toString(cfg.colorSpace);
@@ -499,6 +608,11 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
if (ret)
return Invalid;
+ /*
+ * But for RGB streams, the YCbCr info gets overwritten on the way back
+ * so we must check against what the stream cfg says, not what we actually
+ * requested (which carefully included the YCbCr info)!
+ */
if (cfg.colorSpace != format.colorSpace) {
status = Adjusted;
LOG(RPI, Debug)