diff options
-rw-r--r-- | src/libcamera/pipeline/rpi/common/pipeline_base.cpp | 48 | ||||
-rw-r--r-- | src/libcamera/pipeline/rpi/common/pipeline_base.h | 5 | ||||
-rw-r--r-- | src/libcamera/pipeline/rpi/vc4/vc4.cpp | 35 |
3 files changed, 34 insertions, 54 deletions
diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp index 154ea8ab..f4c83dad 100644 --- a/src/libcamera/pipeline/rpi/common/pipeline_base.cpp +++ b/src/libcamera/pipeline/rpi/common/pipeline_base.cpp @@ -238,16 +238,10 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() /* Further fixups on the RAW streams. */ for (auto &raw : rawStreams_) { - StreamConfiguration &cfg = config_.at(raw.index); - - V4L2DeviceFormat rawFormat; - rawFormat.fourcc = raw.dev->toV4L2PixelFormat(cfg.pixelFormat); - rawFormat.size = cfg.size; - rawFormat.colorSpace = cfg.colorSpace; - - int ret = raw.dev->tryFormat(&rawFormat); + int ret = raw.dev->tryFormat(&raw.format); if (ret) return Invalid; + /* * Some sensors change their Bayer order when they are h-flipped * or v-flipped, according to the transform. If this one does, we @@ -255,23 +249,15 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() * Note how we must fetch the "native" (i.e. untransformed) Bayer * order, because the sensor may currently be flipped! */ - V4L2PixelFormat fourcc = rawFormat.fourcc; + BayerFormat bayer = BayerFormat::fromPixelFormat(raw.cfg->pixelFormat); if (data_->flipsAlterBayerOrder_) { - BayerFormat bayer = BayerFormat::fromV4L2PixelFormat(fourcc); bayer.order = data_->nativeBayerOrder_; bayer = bayer.transform(combinedTransform_); - fourcc = bayer.toV4L2PixelFormat(); } + raw.cfg->pixelFormat = bayer.toPixelFormat(); - PixelFormat inputPixFormat = fourcc.toPixelFormat(); - if (raw.cfg->size != rawFormat.size || raw.cfg->pixelFormat != inputPixFormat) { - raw.cfg->size = rawFormat.size; - raw.cfg->pixelFormat = inputPixFormat; + if (RPi::PipelineHandlerBase::updateStreamConfig(raw.cfg, raw.format)) status = Adjusted; - } - - raw.cfg->stride = rawFormat.planes[0].bpl; - raw.cfg->frameSize = rawFormat.planes[0].size; } /* Further fixups on the ISP output streams. */ @@ -548,35 +534,25 @@ int PipelineHandlerBase::configure(Camera *camera, CameraConfiguration *config) * If the application has provided a sensor configuration apply it * instead of just applying a format. */ - const RPiCameraConfiguration *rpiConfig = - static_cast<const RPiCameraConfiguration *>(config); - V4L2SubdeviceFormat sensorFormat; + RPiCameraConfiguration *rpiConfig = static_cast<RPiCameraConfiguration *>(config); + V4L2SubdeviceFormat *sensorFormat = &rpiConfig->sensorFormat_; if (rpiConfig->sensorConfig) { ret = data->sensor_->applyConfiguration(*rpiConfig->sensorConfig, rpiConfig->combinedTransform_, - &sensorFormat); + sensorFormat); } else { - sensorFormat = rpiConfig->sensorFormat_; - ret = data->sensor_->setFormat(&sensorFormat, + ret = data->sensor_->setFormat(sensorFormat, rpiConfig->combinedTransform_); } if (ret) return ret; - /* Use the user requested packing/bit-depth. */ - std::optional<BayerFormat::Packing> packing; - if (!rpiConfig->rawStreams_.empty()) { - BayerFormat bayerFormat = - BayerFormat::fromPixelFormat(rpiConfig->rawStreams_[0].cfg->pixelFormat); - packing = bayerFormat.packing; - } - /* * Platform specific internal stream configuration. This also assigns * external streams which get configured below. */ - ret = data->platformConfigure(sensorFormat, packing, rpiConfig); + ret = data->platformConfigure(rpiConfig); if (ret) return ret; @@ -640,11 +616,11 @@ int PipelineHandlerBase::configure(Camera *camera, CameraConfiguration *config) */ link->setEnabled(true); const MediaPad *sinkPad = link->sink(); - ret = device->setFormat(sinkPad->index(), &sensorFormat); + ret = device->setFormat(sinkPad->index(), sensorFormat); if (ret) { LOG(RPI, Error) << "Failed to set format on " << device->entity()->name() << " pad " << sinkPad->index() - << " with format " << sensorFormat + << " with format " << *sensorFormat << ": " << ret; return ret; } diff --git a/src/libcamera/pipeline/rpi/common/pipeline_base.h b/src/libcamera/pipeline/rpi/common/pipeline_base.h index 491c5e98..0c4a59b7 100644 --- a/src/libcamera/pipeline/rpi/common/pipeline_base.h +++ b/src/libcamera/pipeline/rpi/common/pipeline_base.h @@ -58,9 +58,7 @@ public: } virtual CameraConfiguration::Status platformValidate(RPiCameraConfiguration *rpiConfig) const = 0; - virtual int platformConfigure(const V4L2SubdeviceFormat &sensorFormat, - std::optional<BayerFormat::Packing> packing, - const RPiCameraConfiguration *rpiConfig) = 0; + virtual int platformConfigure(const RPiCameraConfiguration *rpiConfig) = 0; virtual void platformStart() = 0; virtual void platformStop() = 0; @@ -269,6 +267,7 @@ public: unsigned int index; StreamConfiguration *cfg; V4L2VideoDevice *dev; + V4L2DeviceFormat format; }; std::vector<StreamParams> rawStreams_; diff --git a/src/libcamera/pipeline/rpi/vc4/vc4.cpp b/src/libcamera/pipeline/rpi/vc4/vc4.cpp index 1c860d7f..785f0712 100644 --- a/src/libcamera/pipeline/rpi/vc4/vc4.cpp +++ b/src/libcamera/pipeline/rpi/vc4/vc4.cpp @@ -115,9 +115,7 @@ private: isp_[Isp::Input].dev()->setSelection(V4L2_SEL_TGT_CROP, &ispCrop_); } - int platformConfigure(const V4L2SubdeviceFormat &sensorFormat, - std::optional<BayerFormat::Packing> packing, - const RPi::RPiCameraConfiguration *rpiConfig) override; + int platformConfigure(const RPi::RPiCameraConfiguration *rpiConfig) override; int platformConfigureIpa(ipa::RPi::ConfigParams ¶ms) override; int platformInitIpa([[maybe_unused]] ipa::RPi::InitParams ¶ms) override @@ -416,6 +414,9 @@ CameraConfiguration::Status Vc4CameraData::platformValidate(RPi::RPiCameraConfig /* Handle flips to make sure to match the RAW stream format. */ if (flipsAlterBayerOrder_) rawBayer = rawBayer.transform(rpiConfig->combinedTransform_); + + /* Apply the user requested packing. */ + rawBayer.packing = BayerFormat::fromPixelFormat(rawStream->pixelFormat).packing; PixelFormat rawFormat = rawBayer.toPixelFormat(); if (rawStream->pixelFormat != rawFormat || @@ -425,6 +426,9 @@ CameraConfiguration::Status Vc4CameraData::platformValidate(RPi::RPiCameraConfig status = CameraConfiguration::Adjusted; } + + rawStreams[0].format = + RPi::PipelineHandlerBase::toV4L2DeviceFormat(unicam_[Unicam::Image].dev(), rawStream); } /* @@ -503,23 +507,14 @@ int Vc4CameraData::platformPipelineConfigure(const std::unique_ptr<YamlObject> & return 0; } -int Vc4CameraData::platformConfigure(const V4L2SubdeviceFormat &sensorFormat, - std::optional<BayerFormat::Packing> packing, - const RPi::RPiCameraConfiguration *rpiConfig) +int Vc4CameraData::platformConfigure(const RPi::RPiCameraConfiguration *rpiConfig) { const std::vector<StreamParams> &rawStreams = rpiConfig->rawStreams_; const std::vector<StreamParams> &outStreams = rpiConfig->outStreams_; int ret; - if (!packing) - packing = BayerFormat::Packing::CSI2; - V4L2VideoDevice *unicam = unicam_[Unicam::Image].dev(); - V4L2DeviceFormat unicamFormat = RPi::PipelineHandlerBase::toV4L2DeviceFormat(unicam, sensorFormat, *packing); - - ret = unicam->setFormat(&unicamFormat); - if (ret) - return ret; + V4L2DeviceFormat unicamFormat; /* * See which streams are requested, and route the user @@ -528,14 +523,24 @@ int Vc4CameraData::platformConfigure(const V4L2SubdeviceFormat &sensorFormat, if (!rawStreams.empty()) { rawStreams[0].cfg->setStream(&unicam_[Unicam::Image]); unicam_[Unicam::Image].setFlags(StreamFlag::External); + unicamFormat = rawStreams[0].format; + } else { + unicamFormat = + RPi::PipelineHandlerBase::toV4L2DeviceFormat(unicam, + rpiConfig->sensorFormat_, + BayerFormat::Packing::CSI2); } + ret = unicam->setFormat(&unicamFormat); + if (ret) + return ret; + ret = isp_[Isp::Input].dev()->setFormat(&unicamFormat); if (ret) return ret; LOG(RPI, Info) << "Sensor: " << sensor_->id() - << " - Selected sensor format: " << sensorFormat + << " - Selected sensor format: " << rpiConfig->sensorFormat_ << " - Selected unicam format: " << unicamFormat; /* Use a sensible small default size if no output streams are configured. */ |