diff options
-rw-r--r-- | include/libcamera/internal/camera_sensor.h | 4 | ||||
-rw-r--r-- | src/libcamera/camera_sensor.cpp | 72 | ||||
-rw-r--r-- | src/libcamera/pipeline/ipu3/ipu3.cpp | 45 | ||||
-rw-r--r-- | src/libcamera/pipeline/raspberrypi/raspberrypi.cpp | 59 |
4 files changed, 89 insertions, 91 deletions
diff --git a/include/libcamera/internal/camera_sensor.h b/include/libcamera/internal/camera_sensor.h index 878f3c28..bea52bad 100644 --- a/include/libcamera/internal/camera_sensor.h +++ b/include/libcamera/internal/camera_sensor.h @@ -29,6 +29,8 @@ class BayerFormat; class CameraLens; class MediaEntity; +enum class Transform; + struct CameraSensorProperties; class CameraSensor : protected Loggable @@ -68,6 +70,8 @@ public: CameraLens *focusLens() { return focusLens_.get(); } + Transform validateTransform(Transform *transform) const; + protected: std::string logPrefix() const override; diff --git a/src/libcamera/camera_sensor.cpp b/src/libcamera/camera_sensor.cpp index 83ac075a..a8668547 100644 --- a/src/libcamera/camera_sensor.cpp +++ b/src/libcamera/camera_sensor.cpp @@ -16,6 +16,7 @@ #include <string.h> #include <libcamera/property_ids.h> +#include <libcamera/transform.h> #include <libcamera/base/utils.h> @@ -962,6 +963,77 @@ void CameraSensor::updateControlInfo() * connected to the sensor */ +/** + * \brief Validate a transform request against the sensor capabilities + * \param[inout] transform The requested transformation, updated to match + * the sensor capabilities + * + * The input \a transform is the transform that the caller wants, and it is + * adjusted according to the capabilities of the sensor to represent the + * "nearest" transform that can actually be delivered. + * + * The returned Transform is the transform applied to the sensor in order to + * produce the input \a transform, It is also validated against the sensor's + * ability to perform horizontal and vertical flips. + * + * For example, if the requested \a transform is Transform::Identity and the + * sensor rotation is 180 degrees, the output transform will be + * Transform::Rot180 to correct the images so that they appear to have + * Transform::Identity, but only if the sensor can apply horizontal and vertical + * flips. + * + * \return A Transform instance that represents which transformation has been + * applied to the camera sensor + */ +Transform CameraSensor::validateTransform(Transform *transform) const +{ + /* Adjust the requested transform to compensate the sensor rotation. */ + int32_t rotation = properties().get(properties::Rotation).value_or(0); + bool success; + + Transform rotationTransform = transformFromRotation(rotation, &success); + if (!success) + LOG(CameraSensor, Warning) << "Invalid rotation of " << rotation + << " degrees - ignoring"; + + Transform combined = *transform * rotationTransform; + + /* + * We combine the platform and user transform, but must "adjust away" + * any combined result that includes a transform, as we can't do those. + * In this case, flipping only the transpose bit is helpful to + * applications - they either get the transform they requested, or have + * to do a simple transpose themselves (they don't have to worry about + * the other possible cases). + */ + if (!!(combined & Transform::Transpose)) { + /* + * Flipping the transpose bit in "transform" flips it in the + * combined result too (as it's the last thing that happens), + * which is of course clearing it. + */ + *transform ^= Transform::Transpose; + combined &= ~Transform::Transpose; + } + + /* + * We also check if the sensor doesn't do h/vflips at all, in which + * case we clear them, and the application will have to do everything. + */ + if (!supportFlips_ && !!combined) { + /* + * If the sensor can do no transforms, then combined must be + * changed to the identity. The only user transform that gives + * rise to this is the inverse of the rotation. (Recall that + * combined = transform * rotationTransform.) + */ + *transform = -rotationTransform; + combined = Transform::Identity; + } + + return combined; +} + std::string CameraSensor::logPrefix() const { return "'" + entity_->name() + "'"; diff --git a/src/libcamera/pipeline/ipu3/ipu3.cpp b/src/libcamera/pipeline/ipu3/ipu3.cpp index e4d79ea4..a424ac91 100644 --- a/src/libcamera/pipeline/ipu3/ipu3.cpp +++ b/src/libcamera/pipeline/ipu3/ipu3.cpp @@ -184,48 +184,15 @@ CameraConfiguration::Status IPU3CameraConfiguration::validate() if (config_.empty()) return Invalid; - Transform combined = transform * data_->rotationTransform_; - - /* - * We combine the platform and user transform, but must "adjust away" - * any combined result that includes a transposition, as we can't do - * those. In this case, flipping only the transpose bit is helpful to - * applications - they either get the transform they requested, or have - * to do a simple transpose themselves (they don't have to worry about - * the other possible cases). - */ - if (!!(combined & Transform::Transpose)) { - /* - * Flipping the transpose bit in "transform" flips it in the - * combined result too (as it's the last thing that happens), - * which is of course clearing it. - */ - transform ^= Transform::Transpose; - combined &= ~Transform::Transpose; - status = Adjusted; - } - /* - * We also check if the sensor doesn't do h/vflips at all, in which - * case we clear them, and the application will have to do everything. + * Validate the requested transform against the sensor capabilities and + * rotation and store the final combined transform that configure() will + * need to apply to the sensor to save us working it out again. */ - if (!data_->supportsFlips_ && !!combined) { - /* - * If the sensor can do no transforms, then combined must be - * changed to the identity. The only user transform that gives - * rise to this is the inverse of the rotation. (Recall that - * combined = transform * rotationTransform.) - */ - transform = -data_->rotationTransform_; - combined = Transform::Identity; + Transform requestedTransform = transform; + combinedTransform_ = data_->cio2_.sensor()->validateTransform(&transform); + if (transform != requestedTransform) status = Adjusted; - } - - /* - * Store the final combined transform that configure() will need to - * apply to the sensor to save us working it out again. - */ - combinedTransform_ = combined; /* Cap the number of entries to the available streams. */ if (config_.size() > kMaxStreams) { diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp index 8569df17..c086a69a 100644 --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp @@ -367,59 +367,14 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace); /* - * What if the platform has a non-90 degree rotation? We can't even - * "adjust" the configuration and carry on. Alternatively, raising an - * error means the platform can never run. Let's just print a warning - * and continue regardless; the rotation is effectively set to zero. + * Validate the requested transform against the sensor capabilities and + * rotation and store the final combined transform that configure() will + * need to apply to the sensor to save us working it out again. */ - int32_t rotation = data_->sensor_->properties().get(properties::Rotation).value_or(0); - bool success; - Transform rotationTransform = transformFromRotation(rotation, &success); - if (!success) - LOG(RPI, Warning) << "Invalid rotation of " << rotation - << " degrees - ignoring"; - Transform combined = transform * rotationTransform; - - /* - * We combine the platform and user transform, but must "adjust away" - * any combined result that includes a transform, as we can't do those. - * In this case, flipping only the transpose bit is helpful to - * applications - they either get the transform they requested, or have - * to do a simple transpose themselves (they don't have to worry about - * the other possible cases). - */ - if (!!(combined & Transform::Transpose)) { - /* - * Flipping the transpose bit in "transform" flips it in the - * combined result too (as it's the last thing that happens), - * which is of course clearing it. - */ - transform ^= Transform::Transpose; - combined &= ~Transform::Transpose; - status = Adjusted; - } - - /* - * We also check if the sensor doesn't do h/vflips at all, in which - * case we clear them, and the application will have to do everything. - */ - if (!data_->supportsFlips_ && !!combined) { - /* - * If the sensor can do no transforms, then combined must be - * changed to the identity. The only user transform that gives - * rise to this the inverse of the rotation. (Recall that - * combined = transform * rotationTransform.) - */ - transform = -rotationTransform; - combined = Transform::Identity; + Transform requestedTransform = transform; + combinedTransform_ = data_->sensor_->validateTransform(&transform); + if (transform != requestedTransform) status = Adjusted; - } - - /* - * Store the final combined transform that configure() will need to - * apply to the sensor to save us working it out again. - */ - combinedTransform_ = combined; unsigned int rawCount = 0, outCount = 0, count = 0, maxIndex = 0; std::pair<int, Size> outSize[2]; @@ -454,7 +409,7 @@ CameraConfiguration::Status RPiCameraConfiguration::validate() if (data_->flipsAlterBayerOrder_) { BayerFormat bayer = BayerFormat::fromV4L2PixelFormat(fourcc); bayer.order = data_->nativeBayerOrder_; - bayer = bayer.transform(combined); + bayer = bayer.transform(combinedTransform_); fourcc = bayer.toV4L2PixelFormat(); } |