summaryrefslogtreecommitdiff
path: root/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/libcamera/pipeline/uvcvideo/uvcvideo.cpp')
-rw-r--r--src/libcamera/pipeline/uvcvideo/uvcvideo.cpp604
1 files changed, 487 insertions, 117 deletions
diff --git a/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp b/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp
index ffbddf27..8c2c6baf 100644
--- a/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp
+++ b/src/libcamera/pipeline/uvcvideo/uvcvideo.cpp
@@ -2,58 +2,75 @@
/*
* Copyright (C) 2019, Google Inc.
*
- * uvcvideo.cpp - Pipeline handler for uvcvideo devices
+ * Pipeline handler for uvcvideo devices
*/
#include <algorithm>
-#include <iomanip>
-#include <sys/sysmacros.h>
-#include <tuple>
+#include <cmath>
+#include <fstream>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <vector>
+
+#include <libcamera/base/log.h>
+#include <libcamera/base/mutex.h>
+#include <libcamera/base/utils.h>
#include <libcamera/camera.h>
#include <libcamera/control_ids.h>
#include <libcamera/controls.h>
+#include <libcamera/property_ids.h>
#include <libcamera/request.h>
#include <libcamera/stream.h>
-#include "device_enumerator.h"
-#include "log.h"
-#include "media_device.h"
-#include "pipeline_handler.h"
-#include "utils.h"
-#include "v4l2_controls.h"
-#include "v4l2_videodevice.h"
+#include "libcamera/internal/camera.h"
+#include "libcamera/internal/device_enumerator.h"
+#include "libcamera/internal/media_device.h"
+#include "libcamera/internal/pipeline_handler.h"
+#include "libcamera/internal/sysfs.h"
+#include "libcamera/internal/v4l2_videodevice.h"
namespace libcamera {
LOG_DEFINE_CATEGORY(UVC)
-class UVCCameraData : public CameraData
+class UVCCameraData : public Camera::Private
{
public:
UVCCameraData(PipelineHandler *pipe)
- : CameraData(pipe), video_(nullptr)
+ : Camera::Private(pipe)
{
}
- ~UVCCameraData()
- {
- delete video_;
- }
+ int init(MediaDevice *media);
+ void addControl(uint32_t cid, const ControlInfo &v4l2info,
+ ControlInfoMap::Map *ctrls);
+ void imageBufferReady(FrameBuffer *buffer);
- int init(MediaEntity *entity);
- void bufferReady(FrameBuffer *buffer);
+ const std::string &id() const { return id_; }
- V4L2VideoDevice *video_;
+ Mutex openLock_;
+ std::unique_ptr<V4L2VideoDevice> video_;
Stream stream_;
+ std::map<PixelFormat, std::vector<SizeRange>> formats_;
+
+private:
+ bool generateId();
+
+ std::string id_;
};
class UVCCameraConfiguration : public CameraConfiguration
{
public:
- UVCCameraConfiguration();
+ UVCCameraConfiguration(UVCCameraData *data);
Status validate() override;
+
+private:
+ UVCCameraData *data_;
};
class PipelineHandlerUVC : public PipelineHandler
@@ -61,32 +78,36 @@ class PipelineHandlerUVC : public PipelineHandler
public:
PipelineHandlerUVC(CameraManager *manager);
- CameraConfiguration *generateConfiguration(Camera *camera,
- const StreamRoles &roles) override;
+ std::unique_ptr<CameraConfiguration> generateConfiguration(Camera *camera,
+ Span<const StreamRole> roles) override;
int configure(Camera *camera, CameraConfiguration *config) override;
int exportFrameBuffers(Camera *camera, Stream *stream,
std::vector<std::unique_ptr<FrameBuffer>> *buffers) override;
- int start(Camera *camera) override;
- void stop(Camera *camera) override;
+ int start(Camera *camera, const ControlList *controls) override;
+ void stopDevice(Camera *camera) override;
int queueRequestDevice(Camera *camera, Request *request) override;
bool match(DeviceEnumerator *enumerator) override;
private:
+ int processControl(ControlList *controls, unsigned int id,
+ const ControlValue &value);
int processControls(UVCCameraData *data, Request *request);
- UVCCameraData *cameraData(const Camera *camera)
+ bool acquireDevice(Camera *camera) override;
+ void releaseDevice(Camera *camera) override;
+
+ UVCCameraData *cameraData(Camera *camera)
{
- return static_cast<UVCCameraData *>(
- PipelineHandler::cameraData(camera));
+ return static_cast<UVCCameraData *>(camera->_d());
}
};
-UVCCameraConfiguration::UVCCameraConfiguration()
- : CameraConfiguration()
+UVCCameraConfiguration::UVCCameraConfiguration(UVCCameraData *data)
+ : CameraConfiguration(), data_(data)
{
}
@@ -97,6 +118,11 @@ CameraConfiguration::Status UVCCameraConfiguration::validate()
if (config_.empty())
return Invalid;
+ if (orientation != Orientation::Rotate0) {
+ orientation = Orientation::Rotate0;
+ status = Adjusted;
+ }
+
/* Cap the number of entries to the available streams. */
if (config_.size() > 1) {
config_.resize(1);
@@ -113,9 +139,8 @@ CameraConfiguration::Status UVCCameraConfiguration::validate()
if (iter == pixelFormats.end()) {
cfg.pixelFormat = pixelFormats.front();
LOG(UVC, Debug)
- << "Adjusting pixel format from "
- << pixelFormat.toString() << " to "
- << cfg.pixelFormat.toString();
+ << "Adjusting pixel format from " << pixelFormat
+ << " to " << cfg.pixelFormat;
status = Adjusted;
}
@@ -130,13 +155,48 @@ CameraConfiguration::Status UVCCameraConfiguration::validate()
if (cfg.size != size) {
LOG(UVC, Debug)
- << "Adjusting size from " << size.toString()
- << " to " << cfg.size.toString();
+ << "Adjusting size from " << size << " to " << cfg.size;
status = Adjusted;
}
cfg.bufferCount = 4;
+ V4L2DeviceFormat format;
+ format.fourcc = data_->video_->toV4L2PixelFormat(cfg.pixelFormat);
+ format.size = cfg.size;
+
+ /*
+ * For power-consumption reasons video_ is closed when the camera is not
+ * acquired. Open it here if necessary.
+ */
+ {
+ bool opened = false;
+
+ MutexLocker locker(data_->openLock_);
+
+ if (!data_->video_->isOpen()) {
+ int ret = data_->video_->open();
+ if (ret)
+ return Invalid;
+
+ opened = true;
+ }
+
+ int ret = data_->video_->tryFormat(&format);
+ if (opened)
+ data_->video_->close();
+ if (ret)
+ return Invalid;
+ }
+
+ cfg.stride = format.planes[0].bpl;
+ cfg.frameSize = format.planes[0].size;
+
+ if (cfg.colorSpace != format.colorSpace) {
+ cfg.colorSpace = format.colorSpace;
+ status = Adjusted;
+ }
+
return status;
}
@@ -145,28 +205,18 @@ PipelineHandlerUVC::PipelineHandlerUVC(CameraManager *manager)
{
}
-CameraConfiguration *PipelineHandlerUVC::generateConfiguration(Camera *camera,
- const StreamRoles &roles)
+std::unique_ptr<CameraConfiguration>
+PipelineHandlerUVC::generateConfiguration(Camera *camera,
+ Span<const StreamRole> roles)
{
UVCCameraData *data = cameraData(camera);
- CameraConfiguration *config = new UVCCameraConfiguration();
+ std::unique_ptr<CameraConfiguration> config =
+ std::make_unique<UVCCameraConfiguration>(data);
if (roles.empty())
return config;
- std::map<V4L2PixelFormat, std::vector<SizeRange>> v4l2Formats =
- data->video_->formats();
- std::map<PixelFormat, std::vector<SizeRange>> deviceFormats;
- std::transform(v4l2Formats.begin(), v4l2Formats.end(),
- std::inserter(deviceFormats, deviceFormats.begin()),
- [&](const decltype(v4l2Formats)::value_type &format) {
- return decltype(deviceFormats)::value_type{
- data->video_->toPixelFormat(format.first),
- format.second
- };
- });
-
- StreamFormats formats(deviceFormats);
+ StreamFormats formats(data->formats_);
StreamConfiguration cfg(formats);
cfg.pixelFormat = formats.pixelformats().front();
@@ -186,7 +236,7 @@ int PipelineHandlerUVC::configure(Camera *camera, CameraConfiguration *config)
StreamConfiguration &cfg = config->at(0);
int ret;
- V4L2DeviceFormat format = {};
+ V4L2DeviceFormat format;
format.fourcc = data->video_->toV4L2PixelFormat(cfg.pixelFormat);
format.size = cfg.size;
@@ -212,7 +262,7 @@ int PipelineHandlerUVC::exportFrameBuffers(Camera *camera, Stream *stream,
return data->video_->exportBuffers(count, buffers);
}
-int PipelineHandlerUVC::start(Camera *camera)
+int PipelineHandlerUVC::start(Camera *camera, [[maybe_unused]] const ControlList *controls)
{
UVCCameraData *data = cameraData(camera);
unsigned int count = data->stream_.configuration().bufferCount;
@@ -230,35 +280,101 @@ int PipelineHandlerUVC::start(Camera *camera)
return 0;
}
-void PipelineHandlerUVC::stop(Camera *camera)
+void PipelineHandlerUVC::stopDevice(Camera *camera)
{
UVCCameraData *data = cameraData(camera);
data->video_->streamOff();
data->video_->releaseBuffers();
}
-int PipelineHandlerUVC::processControls(UVCCameraData *data, Request *request)
+int PipelineHandlerUVC::processControl(ControlList *controls, unsigned int id,
+ const ControlValue &value)
{
- ControlList controls(data->video_->controls());
+ uint32_t cid;
+
+ if (id == controls::Brightness)
+ cid = V4L2_CID_BRIGHTNESS;
+ else if (id == controls::Contrast)
+ cid = V4L2_CID_CONTRAST;
+ else if (id == controls::Saturation)
+ cid = V4L2_CID_SATURATION;
+ else if (id == controls::AeEnable)
+ cid = V4L2_CID_EXPOSURE_AUTO;
+ else if (id == controls::ExposureTime)
+ cid = V4L2_CID_EXPOSURE_ABSOLUTE;
+ else if (id == controls::AnalogueGain)
+ cid = V4L2_CID_GAIN;
+ else
+ return -EINVAL;
+
+ const ControlInfo &v4l2Info = controls->infoMap()->at(cid);
+ int32_t min = v4l2Info.min().get<int32_t>();
+ int32_t def = v4l2Info.def().get<int32_t>();
+ int32_t max = v4l2Info.max().get<int32_t>();
+
+ /*
+ * See UVCCameraData::addControl() for explanations of the different
+ * value mappings.
+ */
+ switch (cid) {
+ case V4L2_CID_BRIGHTNESS: {
+ float scale = std::max(max - def, def - min);
+ float fvalue = value.get<float>() * scale + def;
+ controls->set(cid, static_cast<int32_t>(std::lround(fvalue)));
+ break;
+ }
+
+ case V4L2_CID_SATURATION: {
+ float scale = def - min;
+ float fvalue = value.get<float>() * scale + min;
+ controls->set(cid, static_cast<int32_t>(std::lround(fvalue)));
+ break;
+ }
- for (auto it : request->controls()) {
- unsigned int id = it.first;
- ControlValue &value = it.second;
-
- if (id == controls::Brightness) {
- controls.set(V4L2_CID_BRIGHTNESS, value);
- } else if (id == controls::Contrast) {
- controls.set(V4L2_CID_CONTRAST, value);
- } else if (id == controls::Saturation) {
- controls.set(V4L2_CID_SATURATION, value);
- } else if (id == controls::ManualExposure) {
- controls.set(V4L2_CID_EXPOSURE_AUTO, static_cast<int32_t>(1));
- controls.set(V4L2_CID_EXPOSURE_ABSOLUTE, value);
- } else if (id == controls::ManualGain) {
- controls.set(V4L2_CID_GAIN, value);
+ case V4L2_CID_EXPOSURE_AUTO: {
+ int32_t ivalue = value.get<bool>()
+ ? V4L2_EXPOSURE_APERTURE_PRIORITY
+ : V4L2_EXPOSURE_MANUAL;
+ controls->set(V4L2_CID_EXPOSURE_AUTO, ivalue);
+ break;
+ }
+
+ case V4L2_CID_EXPOSURE_ABSOLUTE:
+ controls->set(cid, value.get<int32_t>() / 100);
+ break;
+
+ case V4L2_CID_CONTRAST:
+ case V4L2_CID_GAIN: {
+ float m = (4.0f - 1.0f) / (max - def);
+ float p = 1.0f - m * def;
+
+ if (m * min + p < 0.5f) {
+ m = (1.0f - 0.5f) / (def - min);
+ p = 1.0f - m * def;
}
+
+ float fvalue = (value.get<float>() - p) / m;
+ controls->set(cid, static_cast<int32_t>(std::lround(fvalue)));
+ break;
+ }
+
+ default: {
+ int32_t ivalue = value.get<int32_t>();
+ controls->set(cid, ivalue);
+ break;
+ }
}
+ return 0;
+}
+
+int PipelineHandlerUVC::processControls(UVCCameraData *data, Request *request)
+{
+ ControlList controls(data->video_->controls());
+
+ for (const auto &[id, value] : request->controls())
+ processControl(&controls, id, value);
+
for (const auto &ctrl : controls)
LOG(UVC, Debug)
<< "Setting control " << utils::hex(ctrl.first)
@@ -306,26 +422,15 @@ bool PipelineHandlerUVC::match(DeviceEnumerator *enumerator)
std::unique_ptr<UVCCameraData> data = std::make_unique<UVCCameraData>(this);
- /* Locate and initialise the camera data with the default video node. */
- const std::vector<MediaEntity *> &entities = media->entities();
- auto entity = std::find_if(entities.begin(), entities.end(),
- [](MediaEntity *entity) {
- return entity->flags() & MEDIA_ENT_FL_DEFAULT;
- });
- if (entity == entities.end()) {
- LOG(UVC, Error) << "Could not find a default video device";
+ if (data->init(media))
return false;
- }
-
- if (data->init(*entity))
- return false;
-
- dev_t devnum = makedev((*entity)->deviceMajor(), (*entity)->deviceMinor());
/* Create and register the camera. */
+ std::string id = data->id();
std::set<Stream *> streams{ &data->stream_ };
- std::shared_ptr<Camera> camera = Camera::create(this, media->model(), streams);
- registerCamera(std::move(camera), std::move(data), devnum);
+ std::shared_ptr<Camera> camera =
+ Camera::create(std::move(data), id, streams);
+ registerCamera(std::move(camera));
/* Enable hot-unplug notifications. */
hotplugMediaDevice(media);
@@ -333,62 +438,327 @@ bool PipelineHandlerUVC::match(DeviceEnumerator *enumerator)
return true;
}
-int UVCCameraData::init(MediaEntity *entity)
+bool PipelineHandlerUVC::acquireDevice(Camera *camera)
+{
+ UVCCameraData *data = cameraData(camera);
+
+ MutexLocker locker(data->openLock_);
+
+ return data->video_->open() == 0;
+}
+
+void PipelineHandlerUVC::releaseDevice(Camera *camera)
+{
+ UVCCameraData *data = cameraData(camera);
+
+ MutexLocker locker(data->openLock_);
+ data->video_->close();
+}
+
+int UVCCameraData::init(MediaDevice *media)
{
int ret;
+ /* Locate and initialise the camera data with the default video node. */
+ const std::vector<MediaEntity *> &entities = media->entities();
+ auto entity = std::find_if(entities.begin(), entities.end(),
+ [](MediaEntity *e) {
+ return e->flags() & MEDIA_ENT_FL_DEFAULT;
+ });
+ if (entity == entities.end()) {
+ LOG(UVC, Error) << "Could not find a default video device";
+ return -ENODEV;
+ }
+
/* Create and open the video device. */
- video_ = new V4L2VideoDevice(entity);
+ video_ = std::make_unique<V4L2VideoDevice>(*entity);
ret = video_->open();
if (ret)
return ret;
- video_->bufferReady.connect(this, &UVCCameraData::bufferReady);
+ video_->bufferReady.connect(this, &UVCCameraData::imageBufferReady);
+
+ /* Generate the camera ID. */
+ if (!generateId()) {
+ LOG(UVC, Error) << "Failed to generate camera ID";
+ return -EINVAL;
+ }
+
+ /*
+ * Populate the map of supported formats, and infer the camera sensor
+ * resolution from the largest size it advertises.
+ */
+ Size resolution;
+ for (const auto &format : video_->formats()) {
+ PixelFormat pixelFormat = format.first.toPixelFormat();
+ if (!pixelFormat.isValid())
+ continue;
+
+ formats_[pixelFormat] = format.second;
+
+ const std::vector<SizeRange> &sizeRanges = format.second;
+ for (const SizeRange &sizeRange : sizeRanges) {
+ if (sizeRange.max > resolution)
+ resolution = sizeRange.max;
+ }
+ }
+
+ if (formats_.empty()) {
+ LOG(UVC, Error)
+ << "Camera " << id_ << " (" << media->model()
+ << ") doesn't expose any supported format";
+ return -EINVAL;
+ }
+
+ /* Populate the camera properties. */
+ properties_.set(properties::Model, utils::toAscii(media->model()));
+
+ /*
+ * Derive the location from the device removable attribute in sysfs.
+ * Non-removable devices are assumed to be front as we lack detailed
+ * location information, and removable device are considered external.
+ *
+ * The sysfs removable attribute is derived from the ACPI _UPC attribute
+ * if available, or from the USB hub descriptors otherwise. ACPI data
+ * may not be very reliable, and the USB hub descriptors may not be
+ * accurate on DT-based platforms. A heuristic may need to be
+ * implemented later if too many devices end up being miscategorized.
+ *
+ * \todo Find a way to tell front and back devices apart. This could
+ * come from the ACPI _PLD, but that may be even more unreliable than
+ * the _UPC.
+ */
+ properties::LocationEnum location = properties::CameraLocationExternal;
+ std::ifstream file(video_->devicePath() + "/../removable");
+ if (file.is_open()) {
+ std::string value;
+ std::getline(file, value);
+ file.close();
+
+ if (value == "fixed")
+ location = properties::CameraLocationFront;
+ }
+
+ properties_.set(properties::Location, location);
+
+ properties_.set(properties::PixelArraySize, resolution);
+ properties_.set(properties::PixelArrayActiveAreas, { Rectangle(resolution) });
/* Initialise the supported controls. */
- const ControlInfoMap &controls = video_->controls();
ControlInfoMap::Map ctrls;
- for (const auto &ctrl : controls) {
+ for (const auto &ctrl : video_->controls()) {
+ uint32_t cid = ctrl.first->id();
const ControlInfo &info = ctrl.second;
- const ControlId *id;
- switch (ctrl.first->id()) {
- case V4L2_CID_BRIGHTNESS:
- id = &controls::Brightness;
- break;
- case V4L2_CID_CONTRAST:
- id = &controls::Contrast;
- break;
- case V4L2_CID_SATURATION:
- id = &controls::Saturation;
- break;
- case V4L2_CID_EXPOSURE_ABSOLUTE:
- id = &controls::ManualExposure;
- break;
- case V4L2_CID_GAIN:
- id = &controls::ManualGain;
+ addControl(cid, info, &ctrls);
+ }
+
+ controlInfo_ = ControlInfoMap(std::move(ctrls), controls::controls);
+
+ /*
+ * Close to allow camera to go into runtime-suspend, video_ will be
+ * re-opened from acquireDevice() and validate().
+ */
+ video_->close();
+
+ return 0;
+}
+
+bool UVCCameraData::generateId()
+{
+ const std::string path = video_->devicePath();
+
+ /* Create a controller ID from first device described in firmware. */
+ std::string controllerId;
+ std::string searchPath = path;
+ while (true) {
+ std::string::size_type pos = searchPath.rfind('/');
+ if (pos <= 1) {
+ LOG(UVC, Error) << "Can not find controller ID";
+ return false;
+ }
+
+ searchPath = searchPath.substr(0, pos);
+
+ controllerId = sysfs::firmwareNodePath(searchPath);
+ if (!controllerId.empty())
break;
- default:
- continue;
+ }
+
+ /*
+ * Create a USB ID from the device path which has the known format:
+ *
+ * path = bus, "-", ports, ":", config, ".", interface ;
+ * bus = number ;
+ * ports = port, [ ".", ports ] ;
+ * port = number ;
+ * config = number ;
+ * interface = number ;
+ *
+ * Example: 3-2.4:1.0
+ *
+ * The bus is not guaranteed to be stable and needs to be stripped from
+ * the USB ID. The final USB ID is built up of the ports, config and
+ * interface properties.
+ *
+ * Example 2.4:1.0.
+ */
+ std::string usbId = utils::basename(path.c_str());
+ usbId = usbId.substr(usbId.find('-') + 1);
+
+ /* Creata a device ID from the USB devices vendor and product ID. */
+ std::string deviceId;
+ for (const char *name : { "idVendor", "idProduct" }) {
+ std::ifstream file(path + "/../" + name);
+
+ if (!file.is_open())
+ return false;
+
+ std::string value;
+ std::getline(file, value);
+ file.close();
+
+ if (!deviceId.empty())
+ deviceId += ":";
+
+ deviceId += value;
+ }
+
+ id_ = controllerId + "-" + usbId + "-" + deviceId;
+ return true;
+}
+
+void UVCCameraData::addControl(uint32_t cid, const ControlInfo &v4l2Info,
+ ControlInfoMap::Map *ctrls)
+{
+ const ControlId *id;
+ ControlInfo info;
+
+ /* Map the control ID. */
+ switch (cid) {
+ case V4L2_CID_BRIGHTNESS:
+ id = &controls::Brightness;
+ break;
+ case V4L2_CID_CONTRAST:
+ id = &controls::Contrast;
+ break;
+ case V4L2_CID_SATURATION:
+ id = &controls::Saturation;
+ break;
+ case V4L2_CID_EXPOSURE_AUTO:
+ id = &controls::AeEnable;
+ break;
+ case V4L2_CID_EXPOSURE_ABSOLUTE:
+ id = &controls::ExposureTime;
+ break;
+ case V4L2_CID_GAIN:
+ id = &controls::AnalogueGain;
+ break;
+ default:
+ return;
+ }
+
+ /* Map the control info. */
+ int32_t min = v4l2Info.min().get<int32_t>();
+ int32_t max = v4l2Info.max().get<int32_t>();
+ int32_t def = v4l2Info.def().get<int32_t>();
+
+ switch (cid) {
+ case V4L2_CID_BRIGHTNESS: {
+ /*
+ * The Brightness control is a float, with 0.0 mapped to the
+ * default value. The control range is [-1.0, 1.0], but the V4L2
+ * default may not be in the middle of the V4L2 range.
+ * Accommodate this by restricting the range of the libcamera
+ * control, but always within the maximum limits.
+ */
+ float scale = std::max(max - def, def - min);
+
+ info = ControlInfo{
+ { static_cast<float>(min - def) / scale },
+ { static_cast<float>(max - def) / scale },
+ { 0.0f }
+ };
+ break;
+ }
+
+ case V4L2_CID_SATURATION:
+ /*
+ * The Saturation control is a float, with 0.0 mapped to the
+ * minimum value (corresponding to a fully desaturated image)
+ * and 1.0 mapped to the default value. Calculate the maximum
+ * value accordingly.
+ */
+ info = ControlInfo{
+ { 0.0f },
+ { static_cast<float>(max - min) / (def - min) },
+ { 1.0f }
+ };
+ break;
+
+ case V4L2_CID_EXPOSURE_AUTO:
+ info = ControlInfo{ false, true, true };
+ break;
+
+ case V4L2_CID_EXPOSURE_ABSOLUTE:
+ /*
+ * ExposureTime is in units of 1 µs, and UVC expects
+ * V4L2_CID_EXPOSURE_ABSOLUTE in units of 100 µs.
+ */
+ info = ControlInfo{
+ { min * 100 },
+ { max * 100 },
+ { def * 100 }
+ };
+ break;
+
+ case V4L2_CID_CONTRAST:
+ case V4L2_CID_GAIN: {
+ /*
+ * The Contrast and AnalogueGain controls are floats, with 1.0
+ * mapped to the default value. UVC doesn't specify units, and
+ * cameras have been seen to expose very different ranges for
+ * the controls. Arbitrarily assume that the minimum and
+ * maximum values are respectively no lower than 0.5 and no
+ * higher than 4.0.
+ */
+ float m = (4.0f - 1.0f) / (max - def);
+ float p = 1.0f - m * def;
+
+ if (m * min + p < 0.5f) {
+ m = (1.0f - 0.5f) / (def - min);
+ p = 1.0f - m * def;
}
- ctrls.emplace(id, info);
+ info = ControlInfo{
+ { m * min + p },
+ { m * max + p },
+ { 1.0f }
+ };
+ break;
}
- controlInfo_ = std::move(ctrls);
+ default:
+ info = v4l2Info;
+ break;
+ }
- return 0;
+ ctrls->emplace(id, info);
}
-void UVCCameraData::bufferReady(FrameBuffer *buffer)
+void UVCCameraData::imageBufferReady(FrameBuffer *buffer)
{
Request *request = buffer->request();
- pipe_->completeBuffer(camera_, request, buffer);
- pipe_->completeRequest(camera_, request);
+ /* \todo Use the UVC metadata to calculate a more precise timestamp */
+ request->metadata().set(controls::SensorTimestamp,
+ buffer->metadata().timestamp);
+
+ pipe()->completeBuffer(request, buffer);
+ pipe()->completeRequest(request);
}
-REGISTER_PIPELINE_HANDLER(PipelineHandlerUVC);
+REGISTER_PIPELINE_HANDLER(PipelineHandlerUVC, "uvcvideo")
} /* namespace libcamera */