From af543d3c9ee4ed336c0db09a965086c418896326 Mon Sep 17 00:00:00 2001 From: Jacopo Mondi Date: Thu, 24 Nov 2022 10:10:26 +0100 Subject: libcamera: camera_sensor: Validate Transform The two pipeline handlers that currently support Transform (IPU3 and RkISP1) implement it by operating H/V flips on the image sensor. Centralize the code that validates a Transform request against the sensor rotation capabilities in the CameraSensor class. The implementation in the IPU3 pipeline handler was copied from the RaspberryPi implementation, and is now centralized in CameraSensor to make it easier for other platforms. The CameraSensor::validateTransform() implementation comes directly from the RaspberryPi pipeline handler, no functional changes intended. Signed-off-by: Jacopo Mondi --- include/libcamera/internal/camera_sensor.h | 4 ++ src/libcamera/camera_sensor.cpp | 65 ++++++++++++++++++++++ src/libcamera/pipeline/ipu3/ipu3.cpp | 45 ++------------- src/libcamera/pipeline/raspberrypi/raspberrypi.cpp | 59 +++----------------- 4 files changed, 82 insertions(+), 91 deletions(-) diff --git a/include/libcamera/internal/camera_sensor.h b/include/libcamera/internal/camera_sensor.h index 878f3c28..bea52bad 100644 --- a/include/libcamera/internal/camera_sensor.h +++ b/include/libcamera/internal/camera_sensor.h @@ -29,6 +29,8 @@ class BayerFormat; class CameraLens; class MediaEntity; +enum class Transform; + struct CameraSensorProperties; class CameraSensor : protected Loggable @@ -68,6 +70,8 @@ public: CameraLens *focusLens() { return focusLens_.get(); } + Transform validateTransform(Transform *transform) const; + protected: std::string logPrefix() const override; diff --git a/src/libcamera/camera_sensor.cpp b/src/libcamera/camera_sensor.cpp index 83ac075a..3518d3e3 100644 --- a/src/libcamera/camera_sensor.cpp +++ b/src/libcamera/camera_sensor.cpp @@ -962,6 +962,71 @@ void CameraSensor::updateControlInfo() * connected to the sensor */ +/** + * \brief Validate a transform request against the sensor capabilities + * \param[inout] transform The requested transformation, updated to match + * the sensor capabilities + * + * The requested \a transform is adjusted to compensate for the sensor's + * mounting rotation and validated agains the sensor capabilities. + * + * For example, if the requested \a transform is Transform::Identity and the + * sensor rotation is 180 degrees, the resulting transform returned by the + * function is Transform::Rot180 to automatically correct the image, but only if + * the sensor can actually apply horizontal and vertical flips. + * + * \return A Transform instance that represents which transformation has been + * applied to the camera sensor + */ +Transform CameraSensor::validateTransform(Transform *transform) const +{ + /* Adjust the requested transform to compensate the sensor rotation. */ + int32_t rotation = properties().get(properties::Rotation).value_or(0); + bool success; + + Transform rotationTransform = transformFromRotation(rotation, &success); + if (!success) + LOG(CameraSensor, Warning) << "Invalid rotation of " << rotation + << " degrees - ignoring"; + + Transform combined = *transform * rotationTransform; + + /* + * We combine the platform and user transform, but must "adjust away" + * any combined result that includes a transform, as we can't do those. + * In this case, flipping only the transpose bit is helpful to + * applications - they either get the transform they requested, or have + * to do a simple transpose themselves (they don't have to worry about + * the other possible cases). + */ + if (!!(combined & Transform::Transpose)) { + /* + * Flipping the transpose bit in "transform" flips it in the + * combined result too (as it's the last thing that happens), + * which is of course clearing it. + */ + *transform ^= Transform::Transpose; + combined &= ~Transform::Transpose; + } + + /* + * We also check if the sensor doesn't do h/vflips at all, in which + * case we clear them, and the application will have to do everything. + */ + if (!supportFlips_ && !!combined) { + /* + * If the sensor can do no transforms, then combined must be + * changed to the identity. The only user transform that gives + * rise to this the inverse of the rotation. (Recall that + * combined = transform * rotationTransform.) + */ + *transform = -rotationTransform; + combined = Transform::Identity; + } + + return combined; +} + std::string CameraSensor::logPrefix() const { return "'" + entity_->name() + "'"; diff --git a/src/libcamera/pipeline/ipu3/ipu3.cpp b/src/libcamera/pipeline/ipu3/ipu3.cpp index e4d79ea4..a424ac91 100644 --- a/src/libcamera/pipeline/ipu3/ipu3.cpp +++ b/src/libcamera/pipeline/ipu3/ipu3.cpp @@ -184,48 +184,15 @@ CameraConfiguration::Status IPU3CameraConfiguration::validate() if (config_.empty()) return Invalid; - Transform combined = transform * data_->rotationTransform_; - - /* - * We combine the platform and user transform, but must "adjust away" - * any combined result that includes a transposition, as we can't do - * those. In this case, flipping only the transpose bit is helpful to - * applications - they either get the transform they requested, or have - * to do a simple transpose themselves (they don't have to worry about - * the other possible cases). - */ - if (!!(combined & Transform::Transpose)) { - /* - * Flipping the transpose bit in "transform" flips it in the - * combined result too (as it's the last thing that happens), - * which is of course clearing it. - */ - transform ^= Transform::Transpose; - combined &= ~Transform::Transpose; - status = Adjusted; - } - /* - * We also check if the sensor doesn't do h/vflips at all, in which - * case we clear them, and the application will have to do everything. + * Validate the requested transform against the sensor capabilities and + * rotation and store the final combined transform that configure() will + * need to apply to the sensor to save us working it out again. */ - if (!data_->supportsFlips_ && !!combined) { - /* - * If the sensor can do no transforms, then combined must be - * changed to the identity. The only user transform that gives - * rise to this is the inverse of the rotation. (Recall that - * combined = transform * rotationTransform.) - */ - transform = -data_->rotationTransform_; - combined = Transform::Identity; + Transform requestedTransform = transform; + combinedTransform_ = data_->cio2_.sensor()->validateTransform(&transform); + if (transform != requestedTransform) status = Adjusted; - } - - /* - * Store the final combined transform that configure() will need to - * apply to the sensor to save us working it out again. - */ - combinedTransform_ = combined; /* Cap the number of entries to the available streams. */ if (config_.size() > kMaxStreams) { diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp index 8569df17..c086a69a 100644 --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp @@ -367,59 +367,14 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace); /* - * What if the platform has a non-90 degree rotation? We can't even - * "adjust" the configuration and carry on. Alternatively, raising an - * error means the platform can never run. Let's just print a warning - * and continue regardless; the rotation is effectively set to zero. + * Validate the requested transform against the sensor capabilities and + * rotation and store the final combined transform that configure() will + * need to apply to the sensor to save us working it out again. */ - int32_t rotation = data_->sensor_->properties().get(properties::Rotation).value_or(0); - bool success; - Transform rotationTransform = transformFromRotation(rotation, &success); - if (!success) - LOG(RPI, Warning) << "Invalid rotation of " << rotation - << " degrees - ignoring"; - Transform combined = transform * rotationTransform; - - /* - * We combine the platform and user transform, but must "adjust away" - * any combined result that includes a transform, as we can't do those. - * In this case, flipping only the transpose bit is helpful to - * applications - they either get the transform they requested, or have - * to do a simple transpose themselves (they don't have to worry about - * the other possible cases). - */ - if (!!(combined & Transform::Transpose)) { - /* - * Flipping the transpose bit in "transform" flips it in the - * combined result too (as it's the last thing that happens), - * which is of course clearing it. - */ - transform ^= Transform::Transpose; - combined &= ~Transform::Transpose; - status = Adjusted; - } - - /* - * We also check if the sensor doesn't do h/vflips at all, in which - * case we clear them, and the application will have to do everything. - */ - if (!data_->supportsFlips_ && !!combined) { - /* - * If the sensor can do no transforms, then combined must be - * changed to the identity. The only user transform that gives - * rise to this the inverse of the rotation. (Recall that - * combined = transform * rotationTransform.) - */ - transform = -rotationTransform; - combined = Transform::Identity; + Transform requestedTransform = transform; + combinedTransform_ = data_->sensor_->validateTransform(&transform); + if (transform != requestedTransform) status = Adjusted; - } - - /* - * Store the final combined transform that configure() will need to - * apply to the sensor to save us working it out again. - */ - combinedTransform_ = combined; unsigned int rawCount = 0, outCount = 0, count = 0, maxIndex = 0; std::pair outSize[2]; @@ -454,7 +409,7 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() if (data_->flipsAlterBayerOrder_) { BayerFormat bayer = BayerFormat::fromV4L2PixelFormat(fourcc); bayer.order = data_->nativeBayerOrder_; - bayer = bayer.transform(combined); + bayer = bayer.transform(combinedTransform_); fourcc = bayer.toV4L2PixelFormat(); } -- cgit v1.2.1