diff options
-rw-r--r-- | src/libcamera/pipeline/raspberrypi/raspberrypi.cpp | 40 |
1 files changed, 40 insertions, 0 deletions
diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp index 9c171cea..b5c687da 100644 --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp @@ -96,6 +96,7 @@ V4L2DeviceFormat toV4L2DeviceFormat(const V4L2SubdeviceFormat &format, deviceFormat.fourcc = V4L2PixelFormat::fromPixelFormat(pix); deviceFormat.size = format.size; + deviceFormat.colorSpace = format.colorSpace; return deviceFormat; } @@ -132,6 +133,7 @@ V4L2SubdeviceFormat findBestFormat(const SensorFormats &formatsMap, const Size & { double bestScore = std::numeric_limits<double>::max(), score; V4L2SubdeviceFormat bestFormat; + bestFormat.colorSpace = ColorSpace::Raw; constexpr float penaltyAr = 1500.0; constexpr float penaltyBitDepth = 500.0; @@ -329,6 +331,8 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() if (config_.empty()) return Invalid; + status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace); + /* * What if the platform has a non-90 degree rotation? We can't even * "adjust" the configuration and carry on. Alternatively, raising an @@ -496,11 +500,25 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() V4L2DeviceFormat format; format.fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); format.size = cfg.size; + format.colorSpace = cfg.colorSpace; + + LOG(RPI, Debug) + << "Try color space " << ColorSpace::toString(cfg.colorSpace); int ret = dev->tryFormat(&format); if (ret) return Invalid; + if (cfg.colorSpace != format.colorSpace) { + status = Adjusted; + LOG(RPI, Debug) + << "Color space changed from " + << ColorSpace::toString(cfg.colorSpace) << " to " + << ColorSpace::toString(format.colorSpace); + } + + cfg.colorSpace = format.colorSpace; + cfg.stride = format.planes[0].bpl; cfg.frameSize = format.planes[0].size; @@ -524,6 +542,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, PixelFormat pixelFormat; V4L2VideoDevice::Formats fmts; Size size; + std::optional<ColorSpace> colorSpace; if (roles.empty()) return config; @@ -539,6 +558,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, pixelFormat = mbusCodeToPixelFormat(sensorFormat.mbus_code, BayerFormat::Packing::CSI2); ASSERT(pixelFormat.isValid()); + colorSpace = ColorSpace::Raw; bufferCount = 2; rawCount++; break; @@ -546,6 +566,12 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, case StreamRole::StillCapture: fmts = data->isp_[Isp::Output0].dev()->formats(); pixelFormat = formats::NV12; + /* + * Still image codecs usually expect the JPEG color space. + * Even RGB codecs will be fine as the RGB we get with the + * JPEG color space is the same as sRGB. + */ + colorSpace = ColorSpace::Jpeg; /* Return the largest sensor resolution. */ size = sensorSize; bufferCount = 1; @@ -563,6 +589,11 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, */ fmts = data->isp_[Isp::Output0].dev()->formats(); pixelFormat = formats::YUV420; + /* + * Choose a color space appropriate for video recording. + * Rec.709 will be a good default for HD resolutions. + */ + colorSpace = ColorSpace::Rec709; size = { 1920, 1080 }; bufferCount = 4; outCount++; @@ -571,6 +602,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, case StreamRole::Viewfinder: fmts = data->isp_[Isp::Output0].dev()->formats(); pixelFormat = formats::ARGB8888; + colorSpace = ColorSpace::Jpeg; size = { 800, 600 }; bufferCount = 4; outCount++; @@ -617,6 +649,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera, StreamConfiguration cfg(formats); cfg.size = size; cfg.pixelFormat = pixelFormat; + cfg.colorSpace = colorSpace; cfg.bufferCount = bufferCount; config->addConfiguration(cfg); } @@ -724,6 +757,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) V4L2PixelFormat fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat); format.size = cfg.size; format.fourcc = fourcc; + format.colorSpace = cfg.colorSpace; LOG(RPI, Debug) << "Setting " << stream->name() << " to " << format.toString(); @@ -739,6 +773,10 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) return -EINVAL; } + LOG(RPI, Debug) + << "Stream " << stream->name() << " has color space " + << ColorSpace::toString(cfg.colorSpace); + cfg.setStream(stream); stream->setExternal(true); @@ -763,6 +801,8 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config) format = {}; format.size = maxSize; format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420); + /* No one asked for output, so the color space doesn't matter. */ + format.colorSpace = ColorSpace::Jpeg; ret = data->isp_[Isp::Output0].dev()->setFormat(&format); if (ret) { LOG(RPI, Error) |