diff options
Diffstat (limited to 'src/v4l2/v4l2_camera.h')
0 files changed, 0 insertions, 0 deletions
![]() |
index : libcamera/libcamera.git | |
libcamera official repository | git repository hosting on libcamera.org |
summaryrefslogtreecommitdiff |
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2019, Google Inc.
*
* Proxy to V4L2 compatibility camera
*/
#include "v4l2_camera_proxy.h"
#include <algorithm>
#include <errno.h>
#include <numeric>
#include <set>
#include <string.h>
#include <sys/mman.h>
#include <unistd.h>
#include <linux/videodev2.h>
#include <libcamera/base/log.h>
#include <libcamera/base/object.h>
#include <libcamera/base/utils.h>
#include <libcamera/camera.h>
#include <libcamera/control_ids.h>
#include <libcamera/controls.h>
#include <libcamera/formats.h>
#include "libcamera/internal/v4l2_pixelformat.h"
#include "v4l2_camera.h"
#include "v4l2_camera_file.h"
#include "v4l2_compat_manager.h"
#define KERNEL_VERSION(a, b, c) (((a) << 16) + ((b) << 8) + (c))
using namespace libcamera;
using namespace std::literals::chrono_literals;
LOG_DECLARE_CATEGORY(V4L2Compat)
V4L2CameraProxy::V4L2CameraProxy(unsigned int index,
std::shared_ptr<Camera> camera)
: refcount_(0), index_(index), bufferCount_(0), currentBuf_(0),
vcam_(std::make_unique<V4L2Camera>(camera)), owner_(nullptr)
{
querycap(camera);
}
int V4L2CameraProxy::open(V4L2CameraFile *file)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
MutexLocker locker(proxyMutex_);
if (refcount_++) {
files_.insert(file);
return 0;
}
/*
* We open the camera here, once, and keep it open until the last
* V4L2CameraFile is closed. The proxy is initially not owned by any
* file. The first file that calls reqbufs with count > 0 or s_fmt
* will become the owner, and no other file will be allowed to call
* buffer-related ioctls (except querybuf), set the format, or start or
* stop the stream until ownership is released with a call to reqbufs
* with count = 0.
*/
int ret = vcam_->open(&streamConfig_);
if (ret < 0) {
refcount_--;
return ret;
}
setFmtFromConfig(streamConfig_);
files_.insert(file);
return 0;
}
void V4L2CameraProxy::close(V4L2CameraFile *file)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
MutexLocker locker(proxyMutex_);
files_.erase(file);
release(file);
if (--refcount_ > 0)
return;
vcam_->close();
}
void *V4L2CameraProxy::mmap(V4L2CameraFile *file, void *addr, size_t length,
int prot, int flags, off64_t offset)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
MutexLocker locker(proxyMutex_);
/*
* Mimic the videobuf2 behaviour, which requires PROT_READ and
* MAP_SHARED.
*/
if (!(prot & PROT_READ)) {
errno = EINVAL;
return MAP_FAILED;
}
if (!(flags & MAP_SHARED)) {
errno = EINVAL;
return MAP_FAILED;
}
unsigned int index = offset / sizeimage_;
if (static_cast<off_t>(index * sizeimage_) != offset ||
length != sizeimage_) {
errno = EINVAL;
return MAP_FAILED;
}
int fd = vcam_->getBufferFd(index);
if (fd < 0) {
errno = EINVAL;
return MAP_FAILED;
}
void *map = V4L2CompatManager::instance()->fops().mmap(addr, length, prot,
flags, fd, 0);
if (map == MAP_FAILED)
return map;
buffers_[index].flags |= V4L2_BUF_FLAG_MAPPED;
mmaps_[map] = index;
return map;
}
int V4L2CameraProxy::munmap(V4L2CameraFile *file, void *addr, size_t length)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
MutexLocker locker(proxyMutex_);
auto iter = mmaps_.find(addr);
if (iter == mmaps_.end() || length != sizeimage_) {
errno = EINVAL;
return -1;
}
if (V4L2CompatManager::instance()->fops().munmap(addr, length))
LOG(V4L2Compat, Error) << "Failed to unmap " << addr
<< " with length " << length;
buffers_[iter->second].flags &= ~V4L2_BUF_FLAG_MAPPED;
mmaps_.erase(iter);
return 0;
}
bool V4L2CameraProxy::validateBufferType(uint32_t type)
{
return type == V4L2_BUF_TYPE_VIDEO_CAPTURE;
}
bool V4L2CameraProxy::validateMemoryType(uint32_t memory)
{
return memory == V4L2_MEMORY_MMAP;
}
void V4L2CameraProxy::setFmtFromConfig(const StreamConfiguration &streamConfig)
{
const Size &size = streamConfig.size;
v4l2PixFormat_.width = size.width;
v4l2PixFormat_.height = size.height;
v4l2PixFormat_.pixelformat = V4L2PixelFormat::fromPixelFormat(streamConfig.pixelFormat)[0];
v4l2PixFormat_.field = V4L2_FIELD_NONE;
v4l2PixFormat_.bytesperline = streamConfig.stride;
v4l2PixFormat_.sizeimage = streamConfig.frameSize;
v4l2PixFormat_.colorspace = V4L2_COLORSPACE_SRGB;
v4l2PixFormat_.priv = V4L2_PIX_FMT_PRIV_MAGIC;
v4l2PixFormat_.ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
v4l2PixFormat_.quantization = V4L2_QUANTIZATION_DEFAULT;
v4l2PixFormat_.xfer_func = V4L2_XFER_FUNC_DEFAULT;
sizeimage_ = streamConfig.frameSize;
const ControlInfoMap &controls = vcam_->controlInfo();
const auto &it = controls.find(&controls::FrameDurationLimits);
if (it != controls.end()) {
const int64_t duration = it->second.def().get<int64_t>();
v4l2TimePerFrame_.numerator = duration;
v4l2TimePerFrame_.denominator = 1000000;
} else {
/*
* Default to 30fps if the camera doesn't expose the
* FrameDurationLimits control.
*
* \todo Remove this once all pipeline handlers implement the
* control
*/
LOG(V4L2Compat, Warning)
<< "Camera does not support FrameDurationLimits";
v4l2TimePerFrame_.numerator = 333333;
v4l2TimePerFrame_.denominator = 1000000;
}
}
void V4L2CameraProxy::querycap(std::shared_ptr<Camera> camera)
{
std::string driver = "libcamera";
std::string bus_info = driver + ":" + std::to_string(index_);
utils::strlcpy(reinterpret_cast<char *>(capabilities_.driver), driver.c_str(),
sizeof(capabilities_.driver));
utils::strlcpy(reinterpret_cast<char *>(capabilities_.card), camera->id().c_str(),
sizeof(capabilities_.card));
utils::strlcpy(reinterpret_cast<char *>(capabilities_.bus_info), bus_info.c_str(),
sizeof(capabilities_.bus_info));
/* \todo Put this in a header/config somewhere. */
capabilities_.version = KERNEL_VERSION(5, 2, 0);
capabilities_.device_caps = V4L2_CAP_VIDEO_CAPTURE
| V4L2_CAP_STREAMING
| V4L2_CAP_EXT_PIX_FORMAT;
capabilities_.capabilities = capabilities_.device_caps
| V4L2_CAP_DEVICE_CAPS;
memset(capabilities_.reserved, 0, sizeof(capabilities_.reserved));
}
void V4L2CameraProxy::updateBuffers()
{
std::vector<V4L2Camera::Buffer> completedBuffers = vcam_->completedBuffers();
for (const V4L2Camera::Buffer &buffer : completedBuffers) {
const FrameMetadata &fmd = buffer.data_;
struct v4l2_buffer &buf = buffers_[buffer.index_];
switch (fmd.status) {
case FrameMetadata::FrameSuccess:
buf.bytesused = std::accumulate(fmd.planes().begin(),
fmd.planes().end(), 0,
[](unsigned int total, const auto &plane) {
return total + plane.bytesused;
});
buf.field = V4L2_FIELD_NONE;
buf.timestamp.tv_sec = fmd.timestamp / 1000000000;
buf.timestamp.tv_usec = (fmd.timestamp / 1000) % 1000000;
buf.sequence = fmd.sequence;
buf.flags |= V4L2_BUF_FLAG_DONE;
break;
case FrameMetadata::FrameError:
buf.flags |= V4L2_BUF_FLAG_ERROR;
break;
default:
break;
}
}
}
int V4L2CameraProxy::vidioc_querycap(V4L2CameraFile *file, struct v4l2_capability *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
*arg = capabilities_;
return 0;
}
int V4L2CameraProxy::vidioc_enum_framesizes(V4L2CameraFile *file, struct v4l2_frmsizeenum *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
V4L2PixelFormat v4l2Format = V4L2PixelFormat(arg->pixel_format);
PixelFormat format = v4l2Format.toPixelFormat();
/*
* \todo This might need to be expanded as few pipeline handlers
* report StreamFormats.
*/
const std::vector<Size> &frameSizes = streamConfig_.formats().sizes(format);
if (arg->index >= frameSizes.size())
return -EINVAL;
arg->type = V4L2_FRMSIZE_TYPE_DISCRETE;
arg->discrete.width = frameSizes[arg->index].width;
arg->discrete.height = frameSizes[arg->index].height;
memset(arg->reserved, 0, sizeof(arg->reserved));
return 0;
}
int V4L2CameraProxy::vidioc_enum_fmt(V4L2CameraFile *file, struct v4l2_fmtdesc *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (!validateBufferType(arg->type) ||
arg->index >= streamConfig_.formats().pixelformats().size())
return -EINVAL;
PixelFormat format = streamConfig_.formats().pixelformats()[arg->index];
V4L2PixelFormat v4l2Format = V4L2PixelFormat::fromPixelFormat(format)[0];
arg->flags = format == formats::MJPEG ? V4L2_FMT_FLAG_COMPRESSED : 0;
utils::strlcpy(reinterpret_cast<char *>(arg->description),
v4l2Format.description(), sizeof(arg->description));
arg->pixelformat = v4l2Format;
memset(arg->reserved, 0, sizeof(arg->reserved));
return 0;
}
int V4L2CameraProxy::vidioc_g_fmt(V4L2CameraFile *file, struct v4l2_format *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (!validateBufferType(arg->type))
return -EINVAL;
memset(&arg->fmt, 0, sizeof(arg->fmt));
arg->fmt.pix = v4l2PixFormat_;
return 0;
}
int V4L2CameraProxy::tryFormat(struct v4l2_format *arg)
{
V4L2PixelFormat v4l2Format = V4L2PixelFormat(arg->fmt.pix.pixelformat);
PixelFormat format = v4l2Format.toPixelFormat();
Size size(arg->fmt.pix.width, arg->fmt.pix.height);
StreamConfiguration config;
int ret = vcam_->validateConfiguration(format, size, &config);
if (ret < 0) {
LOG(V4L2Compat, Error)
<< "Failed to negotiate a valid format: "
<< format;
return -EINVAL;
}
arg->fmt.pix.width = config.size.width;
arg->fmt.pix.height = config.size.height;
arg->fmt.pix.pixelformat = V4L2PixelFormat::fromPixelFormat(config.pixelFormat)[0];
arg->fmt.pix.field = V4L2_FIELD_NONE;
arg->fmt.pix.bytesperline = config.stride;
arg->fmt.pix.sizeimage = config.frameSize;
arg->fmt.pix.colorspace = V4L2_COLORSPACE_SRGB;
arg->fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC;
arg->fmt.pix.ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
arg->fmt.pix.quantization = V4L2_QUANTIZATION_DEFAULT;
arg->fmt.pix.xfer_func = V4L2_XFER_FUNC_DEFAULT;
return 0;
}
int V4L2CameraProxy::vidioc_s_fmt(V4L2CameraFile *file, struct v4l2_format *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (!validateBufferType(arg->type))
return -EINVAL;
if (file->priority() < maxPriority())
return -EBUSY;
int ret = acquire(file);
if (ret < 0)
return ret;
ret = tryFormat(arg);
if (ret < 0)
return ret;
Size size(arg->fmt.pix.width, arg->fmt.pix.height);
V4L2PixelFormat v4l2Format = V4L2PixelFormat(arg->fmt.pix.pixelformat);
ret = vcam_->configure(&streamConfig_, size, v4l2Format.toPixelFormat(),
bufferCount_);
if (ret < 0)
return -EINVAL;
setFmtFromConfig(streamConfig_);
return 0;
}
int V4L2CameraProxy::vidioc_try_fmt(V4L2CameraFile *file, struct v4l2_format *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (!validateBufferType(arg->type))
return -EINVAL;
int ret = tryFormat(arg);
if (ret < 0)
return ret;
return 0;
}
enum v4l2_priority V4L2CameraProxy::maxPriority()
{
auto max = std::max_element(files_.begin(), files_.end(),
[](const V4L2CameraFile *a, const V4L2CameraFile *b) {
return a->priority() < b->priority();
});
return max != files_.end() ? (*max)->priority() : V4L2_PRIORITY_UNSET;
}
int V4L2CameraProxy::vidioc_g_priority(V4L2CameraFile *file, enum v4l2_priority *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
*arg = maxPriority();
return 0;
}
int V4L2CameraProxy::vidioc_s_priority(V4L2CameraFile *file, enum v4l2_priority *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (*arg > V4L2_PRIORITY_RECORD)
return -EINVAL;
if (file->priority() < maxPriority())
return -EBUSY;
file->setPriority(*arg);
return 0;
}
int V4L2CameraProxy::vidioc_enuminput(V4L2CameraFile *file, struct v4l2_input *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";
if (arg->index != 0)
return -EINVAL;
memset(arg, 0, sizeof(*arg));
utils::strlcpy(reinterpret_cast<char *>(arg->name),
reinterpret_cast<char *>(capabilities_.card),
sizeof(arg->name));
arg->type = V4L2_INPUT_TYPE_CAMERA;
return 0;
}
int V4L2CameraProxy::vidioc_g_input(V4L2CameraFile *file, int *arg)
{
LOG(V4L2Compat, Debug)
<< "[" << file->description() << "] " << __func__ << "()";