summaryrefslogtreecommitdiff
path: root/src/cam/capture_script.cpp
blob: 9f22d5f77d3b2c9c787c39693e44d023db4399d3 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
/* SPDX-License-Identifier: GPL-2.0-or-later */
/*
 * Copyright (C) 2022, Ideas on Board Oy
 *
 * capture_script.cpp - Capture session configuration script
 */

#include "capture_script.h"

#include <iostream>
#include <stdio.h>
#include <stdlib.h>

using namespace libcamera;

CaptureScript::CaptureScript(std::shared_ptr<Camera> camera,
			     const std::string &fileName)
	: camera_(camera), valid_(false)
{
	FILE *fh = fopen(fileName.c_str(), "r");
	if (!fh) {
		int ret = -errno;
		std::cerr << "Failed to open capture script " << fileName
			  << ": " << strerror(-ret) << std::endl;
		return;
	}

	/*
	 * Map the camera's controls to their name so that they can be
	 * easily identified when parsing the script file.
	 */
	for (const auto &[control, info] : camera_->controls())
		controls_[control->name()] = control;

	int ret = parseScript(fh);
	fclose(fh);
	if (ret)
		return;

	valid_ = true;
}

/* Retrieve the control list associated with a frame number. */
const ControlList &CaptureScript::frameControls(unsigned int frame)
{
	static ControlList controls{};

	auto it = frameControls_.find(frame);
	if (it == frameControls_.end())
		return controls;

	return it->second;
}

CaptureScript::EventPtr CaptureScript::nextEvent(yaml_event_type_t expectedType)
{
	EventPtr event(new yaml_event_t);

	if (!yaml_parser_parse(&parser_, event.get()))
		return nullptr;

	if (expectedType != YAML_NO_EVENT && !checkEvent(event, expectedType))
		return nullptr;

	return event;
}

bool CaptureScript::checkEvent(const EventPtr &event, yaml_event_type_t expectedType) const
{
	if (event->type != expectedType) {
		std::cerr << "Capture script error on line " << event->start_mark.line
			  << " column " << event->start_mark.column << ": "
			  << "Expected " << eventTypeName(expectedType)
			  << " event, got " << eventTypeName(event->type)
			  << std::endl;
		return false;
	}

	return true;
}

std::string CaptureScript::eventScalarValue(const EventPtr &event)
{
	return std::string(reinterpret_cast<char *>(event->data.scalar.value),
			   event->data.scalar.length);
}

std::string CaptureScript::eventTypeName(yaml_event_type_t type)
{
	static const std::map<yaml_event_type_t, std::string> typeNames = {
		{ YAML_STREAM_START_EVENT, "stream-start" },
		{ YAML_STREAM_END_EVENT, "stream-end" },
		{ YAML_DOCUMENT_START_EVENT, "document-start" },
		{ YAML_DOCUMENT_END_EVENT, "document-end" },
		{ YAML_ALIAS_EVENT, "alias" },
		{ YAML_SCALAR_EVENT, "scalar" },
		{ YAML_SEQUENCE_START_EVENT, "sequence-start" },
		{ YAML_SEQUENCE_END_EVENT, "sequence-end" },
		{ YAML_MAPPING_START_EVENT, "mapping-start" },
		{ YAML_MAPPING_END_EVENT, "mapping-end" },
	};

	auto it = typeNames.find(type);
	if (it == typeNames.end())
		return "[type " + std::to_string(type) + "]";

	return it->second;
}

int CaptureScript::parseScript(FILE *script)
{
	int ret = yaml_parser_initialize(&parser_);
	if (!ret) {
		std::cerr << "Failed to initialize yaml parser" << std::endl;
		return ret;
	}

	/* Delete the parser upon function exit. */
	struct ParserDeleter {
		ParserDeleter(yaml_parser_t *parser) : parser_(parser) { }
		~ParserDeleter() { yaml_parser_delete(parser_); }
		yaml_parser_t *parser_;
	} deleter(&parser_);

	yaml_parser_set_input_file(&parser_, script);

	EventPtr event = nextEvent(YAML_STREAM_START_EVENT);
	if (!event)
		return -EINVAL;

	event = nextEvent(YAML_DOCUMENT_START_EVENT);
	if (!event)
		return -EINVAL;

	event = nextEvent(YAML_MAPPING_START_EVENT);
	if (!event)
		return -EINVAL;

	while (1) {
		event = nextEvent();
		if (!event)
			return -EINVAL;

		if (event->type == YAML_MAPPING_END_EVENT)
			return 0;

		if (!checkEvent(event, YAML_SCALAR_EVENT))
			return -EINVAL;

		std::string section = eventScalarValue(event);

		if (section == "frames") {
			parseFrames();
		} else {
			std::cerr << "Unsupported section '" << section << "'"
				  << std::endl;
			return -EINVAL;
		}
	}
}

int CaptureScript::parseFrames()
{
	EventPtr event = nextEvent(YAML_SEQUENCE_START_EVENT);
	if (!event)
		return -EINVAL;

	while (1) {
		event = nextEvent();
		if (!event)
			return -EINVAL;

		if (event->type == YAML_SEQUENCE_END_EVENT)
			return 0;

		int ret = parseFrame(std::move(event));
		if (ret)
			return ret;
	}
}

int CaptureScript::parseFrame(EventPtr event)
{
	if (!checkEvent(event, YAML_MAPPING_START_EVENT))
		return -EINVAL;

	std::string key = parseScalar();
	if (key.empty())
		return -EINVAL;

	unsigned int frameId = atoi(key.c_str());

	event = nextEvent(YAML_MAPPING_START_EVENT);
	if (!event)
		return -EINVAL;

	ControlList controls{};

	while (1) {
		event = nextEvent();
		if (!event)
			return -EINVAL;

		if (event->type == YAML_MAPPING_END_EVENT)
			break;

		int ret = parseControl(std::move(event), controls);
		if (ret)
			return ret;
	}

	frameControls_[frameId] = std::move(controls);

	event = nextEvent(YAML_MAPPING_END_EVENT);
	if (!event)
		return -EINVAL;

	return 0;
}

int CaptureScript::parseControl(EventPtr event, ControlList &controls)
{
	/* We expect a value after a key. */
	std::string name = eventScalarValue(event);
	if (name.empty())
		return -EINVAL;

	/* If the camera does not support the control just ignore it. */
	auto it = controls_.find(name);
	if (it == controls_.end()) {
		std::cerr << "Unsupported control '" << name << "'" << std::endl;
		return -EINVAL;
	}

	std::string value = parseScalar();
	if (value.empty())
		return -EINVAL;

	const ControlId *controlId = it->second;
	ControlValue val = unpackControl(controlId, value);
	controls.set(controlId->id(), val);

	return 0;
}

std::string CaptureScript::parseScalar()
{
	EventPtr event = nextEvent(YAML_SCALAR_EVENT);
	if (!event)
		return "";

	return eventScalarValue(event);
}

void CaptureScript::unpackFailure(const ControlId *id, const std::string &repr)
{
	static const std::map<unsigned int, const char *> typeNames = {
		{ ControlTypeNone, "none" },
		{ ControlTypeBool, "bool" },
		{ ControlTypeByte, "byte" },
		{ ControlTypeInteger32, "int32" },
		{ ControlTypeInteger64, "int64" },
		{ ControlTypeFloat, "float" },
		{ ControlTypeString, "string" },
		{ ControlTypeRectangle, "Rectangle" },
		{ ControlTypeSize, "Size" },
	};

	const char *typeName;
	auto it = typeNames.find(id->type());
	if (it != typeNames.end())
		typeName = it->second;
	else
		typeName = "unknown";

	std::cerr << "Unsupported control '" << repr << "' for "
		  << typeName << " control " << id->name() << std::endl;
}

ControlValue CaptureScript::unpackControl(const ControlId *id,
					  const std::string &repr)
{
	ControlValue value{};

	switch (id->type()) {
	case ControlTypeNone:
		break;
	case ControlTypeBool: {
		bool val;

		if (repr == "true") {
			val = true;
		} else if (repr == "false") {
			val = false;
		} else {
			unpackFailure(id, repr);
			return value;
		}

		value.set<bool>(val);
		break;
	}
	case ControlTypeByte: {
		uint8_t val = strtol(repr.c_str(), NULL, 10);
		value.set<uint8_t>(val);
		break;
	}
	case ControlTypeInteger32: {
		int32_t val = strtol(repr.c_str(), NULL, 10);
		value.set<int32_t>(val);
		break;
	}
	case ControlTypeInteger64: {
		int64_t val = strtoll(repr.c_str(), NULL, 10);
		value.set<int64_t>(val);
		break;
	}
	case ControlTypeFloat: {
		float val = strtof(repr.c_str(), NULL);
		value.set<float>(val);
		break;
	}
	case ControlTypeString: {
		value.set<std::string>(repr);
		break;
	}
	case ControlTypeRectangle:
		/* \todo Parse rectangles. */
		break;
	case ControlTypeSize:
		/* \todo Parse Sizes. */
		break;
	}

	return value;
}
V4L2) /** * \struct V4L2Capability * \brief struct v4l2_capability object wrapper and helpers * * The V4L2Capability structure manages the information returned by the * VIDIOC_QUERYCAP ioctl. */ /** * \fn V4L2Capability::driver() * \brief Retrieve the driver module name * \return The string containing the name of the driver module */ /** * \fn V4L2Capability::card() * \brief Retrieve the video device card name * \return The string containing the video device name */ /** * \fn V4L2Capability::bus_info() * \brief Retrieve the location of the video device in the system * \return The string containing the video device location */ /** * \fn V4L2Capability::device_caps() * \brief Retrieve the capabilities of the video device * \return The video device specific capabilities if V4L2_CAP_DEVICE_CAPS is * set or driver capabilities otherwise */ /** * \fn V4L2Capability::isMultiplanar() * \brief Identify if the video device implements the V4L2 multiplanar APIs * \return True if the video device supports multiplanar APIs */ /** * \fn V4L2Capability::isCapture() * \brief Identify if the video device captures data * \return True if the video device can capture data */ /** * \fn V4L2Capability::isOutput() * \brief Identify if the video device outputs data * \return True if the video device can output data */ /** * \fn V4L2Capability::isVideo() * \brief Identify if the video device captures or outputs images * \return True if the video device can capture or output images */ /** * \fn V4L2Capability::isM2M() * \brief Identify if the device is a Memory-to-Memory device * \return True if the device can capture and output images using the M2M API */ /** * \fn V4L2Capability::isMeta() * \brief Identify if the video device captures or outputs image meta-data * \return True if the video device can capture or output image meta-data */ /** * \fn V4L2Capability::isVideoCapture() * \brief Identify if the video device captures images * \return True if the video device can capture images */ /** * \fn V4L2Capability::isVideoOutput() * \brief Identify if the video device outputs images * \return True if the video device can output images */ /** * \fn V4L2Capability::isMetaCapture() * \brief Identify if the video device captures image meta-data * \return True if the video device can capture image meta-data */ /** * \fn V4L2Capability::isMetaOutput() * \brief Identify if the video device outputs image meta-data * \return True if the video device can output image meta-data */ /** * \fn V4L2Capability::hasStreaming() * \brief Determine if the video device can perform Streaming I/O * \return True if the video device provides Streaming I/O IOCTLs */ /** * \class V4L2DeviceFormat * \brief The V4L2 video device image format and sizes * * This class describes the image format and resolution to be programmed on a * V4L2 video device. The image format is defined by a fourcc code (as specified * by the V4L2 API with the V4L2_PIX_FMT_* macros), a resolution (width and * height) and one to three planes with configurable line stride and a total * per-plane size in bytes. * * Image formats, as defined by the V4L2 APIs, are categorised as packed, * semi-planar and planar, and describe the layout of the image pixel components * stored in memory. * * Packed image formats store pixel components one after the other, in a * contiguous memory area. Examples of packed image formats are YUYV * permutations, RGB with different pixel sub-sampling ratios such as RGB565 or * RGB666 or Raw-Bayer formats such as SRGGB8 or SGRBG12. * * Semi-planar and planar image formats store the pixel components in separate * and possibly non-contiguous memory areas, named planes, whose sizes depend on * the pixel components sub-sampling ratios, which are defined by the format. * Semi-planar formats use two planes to store pixel components and notable * examples of such formats are the NV12 and NV16 formats, while planar formats * use three planes to store pixel components and notable examples are YUV422 * and YUV420. * * Image formats supported by the V4L2 API are defined and described in Section * number 2 of the "Part I - Video for Linux API" chapter of the "Linux Media * Infrastructure userspace API", part of the Linux kernel documentation. * * In the context of this document, packed image formats are referred to as * "packed formats" and semi-planar and planar image formats are referred to as * "planar formats". * * V4L2 also defines two different sets of APIs to work with devices that store * planes in contiguous or separate memory areas. They are named "Single-plane * APIs" and "Multi-plane APIs" respectively and are documented in Section 2.1 * and Section 2.2 of the above mentioned "Part I - Video for Linux API" * documentation. * * The single-plane API allows, among other parameters, the configuration of the * image resolution, the pixel format and the stride length. In that case the * stride applies to all planes (possibly sub-sampled). The multi-plane API * allows configuring the resolution, the pixel format and a per-plane stride * length and total size. * * Packed image formats, which occupy a single memory area, are easily described * through the single-plane API. When used on a video device that implements the * multi-plane API, only the size and stride information contained in the first * plane are taken into account. * * Planar image formats, which occupy distinct memory areas, are easily * described through the multi-plane APIs. When used on a video device that * implements the single-plane API, all planes are stored one after the other * in a contiguous memory area, and it is not possible to configure per-plane * stride length and size, but only a global stride length which is applied to * all planes. * * The V4L2DeviceFormat class describes both packed and planar image formats, * regardless of the API type (single or multi plane) implemented by the video * device the format has to be applied to. The total size and bytes per line * of images represented with packed formats are configured using the first * entry of the V4L2DeviceFormat::planes array, while the per-plane size and * per-plane stride length of images represented with planar image formats are * configured using the opportune number of entries of the * V4L2DeviceFormat::planes array, as prescribed by the image format * definition (semi-planar formats use 2 entries, while planar formats use the * whole 3 entries). The number of valid entries of the * V4L2DeviceFormat::planes array is defined by the * V4L2DeviceFormat::planesCount value. */ /** * \var V4L2DeviceFormat::size * \brief The image size in pixels */ /** * \var V4L2DeviceFormat::fourcc * \brief The fourcc code describing the pixel encoding scheme * * The fourcc code, as defined by the V4L2 API with the V4L2_PIX_FMT_* macros, * that identifies the image format pixel encoding scheme. */ /** * \var V4L2DeviceFormat::planes * \brief The per-plane memory size information * * Images are stored in memory in one or more data planes. Each data plane has a * specific line stride and memory size, which could differ from the image * visible sizes to accommodate padding at the end of lines and end of planes. * Only the first \ref planesCount entries are considered valid. */ /** * \var V4L2DeviceFormat::planesCount * \brief The number of valid data planes */ /** * \brief Assemble and return a string describing the format * \return A string describing the V4L2DeviceFormat */ const std::string V4L2DeviceFormat::toString() const { std::stringstream ss; ss.fill(0); ss << size.toString() << "-0x" << std::hex << std::setw(8) << fourcc; return ss.str(); } /** * \class V4L2VideoDevice * \brief V4L2VideoDevice object and API * * The V4L2VideoDevice class models an instance of a V4L2 video device. * It is constructed with the path to a V4L2 video device node. The device node * is only opened upon a call to open() which must be checked for success. * * The video device capabilities are validated when the device is opened and the * device is rejected if it is not a suitable V4L2 capture or output video * device, or if the video device does not support streaming I/O. * * No API call other than open(), isOpen() and close() shall be called on an * unopened device instance. * * The V4L2VideoDevice class tracks queued buffers and handles buffer events. It * automatically dequeues completed buffers and emits the \ref bufferReady * signal. * * Upon destruction any device left open will be closed, and any resources * released. */ /** * \brief Construct a V4L2VideoDevice * \param[in] deviceNode The file-system path to the video device node */ V4L2VideoDevice::V4L2VideoDevice(const std::string &deviceNode) : V4L2Device(deviceNode), bufferPool_(nullptr), fdEvent_(nullptr) { /* * We default to an MMAP based CAPTURE video device, however this will * be updated based upon the device capabilities. */ bufferType_ = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; memoryType_ = V4L2_MEMORY_MMAP; } /** * \brief Construct a V4L2VideoDevice from a MediaEntity * \param[in] entity The MediaEntity to build the video device from * * Construct a V4L2VideoDevice from a MediaEntity's device node path. */ V4L2VideoDevice::V4L2VideoDevice(const MediaEntity *entity) : V4L2VideoDevice(entity->deviceNode()) { } V4L2VideoDevice::~V4L2VideoDevice() { close(); } /** * \brief Open the V4L2 video device node and query its capabilities * * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::open() { int ret; ret = V4L2Device::open(O_RDWR | O_NONBLOCK); if (ret < 0) return ret; ret = ioctl(VIDIOC_QUERYCAP, &caps_); if (ret < 0) { LOG(V4L2, Error) << "Failed to query device capabilities: " << strerror(-ret); return ret; } if (!caps_.hasStreaming()) { LOG(V4L2, Error) << "Device does not support streaming I/O"; return -EINVAL; } /* * Set buffer type and wait for read notifications on CAPTURE video * devices (POLLIN), and write notifications for OUTPUT video devices * (POLLOUT). */ if (caps_.isVideoCapture()) { fdEvent_ = new EventNotifier(fd(), EventNotifier::Read); bufferType_ = caps_.isMultiplanar() ? V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE : V4L2_BUF_TYPE_VIDEO_CAPTURE; } else if (caps_.isVideoOutput()) { fdEvent_ = new EventNotifier(fd(), EventNotifier::Write); bufferType_ = caps_.isMultiplanar() ? V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE : V4L2_BUF_TYPE_VIDEO_OUTPUT; } else if (caps_.isMetaCapture()) { fdEvent_ = new EventNotifier(fd(), EventNotifier::Read); bufferType_ = V4L2_BUF_TYPE_META_CAPTURE; } else if (caps_.isMetaOutput()) { fdEvent_ = new EventNotifier(fd(), EventNotifier::Write); bufferType_ = V4L2_BUF_TYPE_META_OUTPUT; } else { LOG(V4L2, Error) << "Device is not a supported type"; return -EINVAL; } fdEvent_->activated.connect(this, &V4L2VideoDevice::bufferAvailable); fdEvent_->setEnabled(false); LOG(V4L2, Debug) << "Opened device " << caps_.bus_info() << ": " << caps_.driver() << ": " << caps_.card(); return 0; } /** * \brief Open a V4L2 video device from an opened file handle and query its * capabilities * \param[in] handle The file descriptor to set * \param[in] type The device type to operate on * * This methods opens a video device from the existing file descriptor \a * handle. Like open(), this method queries the capabilities of the device, but * handles it according to the given device \a type instead of determining its * type from the capabilities. This can be used to force a given device type for * memory-to-memory devices. * * The file descriptor \a handle is duplicated, and the caller is responsible * for closing the \a handle when it has no further use for it. The close() * method will close the duplicated file descriptor, leaving \a handle * untouched. * * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::open(int handle, enum v4l2_buf_type type) { int ret; int newFd; newFd = dup(handle); if (newFd < 0) { ret = -errno; LOG(V4L2, Error) << "Failed to duplicate file handle: " << strerror(-ret); return ret; } ret = V4L2Device::setFd(newFd); if (ret < 0) { LOG(V4L2, Error) << "Failed to set file handle: " << strerror(-ret); ::close(newFd); return ret; } ret = ioctl(VIDIOC_QUERYCAP, &caps_); if (ret < 0) { LOG(V4L2, Error) << "Failed to query device capabilities: " << strerror(-ret); return ret; } if (!caps_.hasStreaming()) { LOG(V4L2, Error) << "Device does not support streaming I/O"; return -EINVAL; } /* * Set buffer type and wait for read notifications on CAPTURE video * devices (POLLIN), and write notifications for OUTPUT video devices * (POLLOUT). */ switch (type) { case V4L2_BUF_TYPE_VIDEO_OUTPUT: fdEvent_ = new EventNotifier(fd(), EventNotifier::Write); bufferType_ = caps_.isMultiplanar() ? V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE : V4L2_BUF_TYPE_VIDEO_OUTPUT; break; case V4L2_BUF_TYPE_VIDEO_CAPTURE: fdEvent_ = new EventNotifier(fd(), EventNotifier::Read); bufferType_ = caps_.isMultiplanar() ? V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE : V4L2_BUF_TYPE_VIDEO_CAPTURE; break; default: LOG(V4L2, Error) << "Unsupported buffer type"; return -EINVAL; } fdEvent_->activated.connect(this, &V4L2VideoDevice::bufferAvailable); fdEvent_->setEnabled(false); LOG(V4L2, Debug) << "Opened device " << caps_.bus_info() << ": " << caps_.driver() << ": " << caps_.card(); return 0; } /** * \brief Close the video device, releasing any resources acquired by open() */ void V4L2VideoDevice::close() { if (!isOpen()) return; releaseBuffers(); delete fdEvent_; V4L2Device::close(); } /** * \fn V4L2VideoDevice::driverName() * \brief Retrieve the name of the V4L2 device driver * \return The string containing the driver name */ /** * \fn V4L2VideoDevice::deviceName() * \brief Retrieve the name of the V4L2 video device * \return The string containing the device name */ /** * \fn V4L2VideoDevice::busName() * \brief Retrieve the location of the device in the system * \return The string containing the device location */ std::string V4L2VideoDevice::logPrefix() const { return deviceNode() + (V4L2_TYPE_IS_OUTPUT(bufferType_) ? "[out]" : "[cap]"); } /** * \brief Retrieve the image format set on the V4L2 video device * \param[out] format The image format applied on the video device * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::getFormat(V4L2DeviceFormat *format) { if (caps_.isMeta()) return getFormatMeta(format); else if (caps_.isMultiplanar()) return getFormatMultiplane(format); else return getFormatSingleplane(format); } /** * \brief Configure an image format on the V4L2 video device * \param[inout] format The image format to apply to the video device * * Apply the supplied \a format to the video device, and return the actually * applied format parameters, as \ref V4L2VideoDevice::getFormat would do. * * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::setFormat(V4L2DeviceFormat *format) { if (caps_.isMeta()) return setFormatMeta(format); else if (caps_.isMultiplanar()) return setFormatMultiplane(format); else return setFormatSingleplane(format); } int V4L2VideoDevice::getFormatMeta(V4L2DeviceFormat *format) { struct v4l2_format v4l2Format = {}; struct v4l2_meta_format *pix = &v4l2Format.fmt.meta; int ret; v4l2Format.type = bufferType_; ret = ioctl(VIDIOC_G_FMT, &v4l2Format); if (ret) { LOG(V4L2, Error) << "Unable to get format: " << strerror(-ret); return ret; } format->size.width = 0; format->size.height = 0; format->fourcc = pix->dataformat; format->planesCount = 1; format->planes[0].bpl = pix->buffersize; format->planes[0].size = pix->buffersize; return 0; } int V4L2VideoDevice::setFormatMeta(V4L2DeviceFormat *format) { struct v4l2_format v4l2Format = {}; struct v4l2_meta_format *pix = &v4l2Format.fmt.meta; int ret; v4l2Format.type = bufferType_; pix->dataformat = format->fourcc; pix->buffersize = format->planes[0].size; ret = ioctl(VIDIOC_S_FMT, &v4l2Format); if (ret) { LOG(V4L2, Error) << "Unable to set format: " << strerror(-ret); return ret; } /* * Return to caller the format actually applied on the video device, * which might differ from the requested one. */ format->size.width = 0; format->size.height = 0; format->fourcc = format->fourcc; format->planesCount = 1; format->planes[0].bpl = pix->buffersize; format->planes[0].size = pix->buffersize; return 0; } int V4L2VideoDevice::getFormatMultiplane(V4L2DeviceFormat *format) { struct v4l2_format v4l2Format = {}; struct v4l2_pix_format_mplane *pix = &v4l2Format.fmt.pix_mp; int ret; v4l2Format.type = bufferType_; ret = ioctl(VIDIOC_G_FMT, &v4l2Format); if (ret) { LOG(V4L2, Error) << "Unable to get format: " << strerror(-ret); return ret; } format->size.width = pix->width; format->size.height = pix->height; format->fourcc = pix->pixelformat; format->planesCount = pix->num_planes; for (unsigned int i = 0; i < format->planesCount; ++i) { format->planes[i].bpl = pix->plane_fmt[i].bytesperline; format->planes[i].size = pix->plane_fmt[i].sizeimage; } return 0; } int V4L2VideoDevice::setFormatMultiplane(V4L2DeviceFormat *format) { struct v4l2_format v4l2Format = {}; struct v4l2_pix_format_mplane *pix = &v4l2Format.fmt.pix_mp; int ret; v4l2Format.type = bufferType_; pix->width = format->size.width; pix->height = format->size.height; pix->pixelformat = format->fourcc; pix->num_planes = format->planesCount; pix->field = V4L2_FIELD_NONE; for (unsigned int i = 0; i < pix->num_planes; ++i) { pix->plane_fmt[i].bytesperline = format->planes[i].bpl; pix->plane_fmt[i].sizeimage = format->planes[i].size; } ret = ioctl(VIDIOC_S_FMT, &v4l2Format); if (ret) { LOG(V4L2, Error) << "Unable to set format: " << strerror(-ret); return ret; } /* * Return to caller the format actually applied on the video device, * which might differ from the requested one. */ format->size.width = pix->width; format->size.height = pix->height; format->fourcc = pix->pixelformat; format->planesCount = pix->num_planes; for (unsigned int i = 0; i < format->planesCount; ++i) { format->planes[i].bpl = pix->plane_fmt[i].bytesperline; format->planes[i].size = pix->plane_fmt[i].sizeimage; } return 0; } int V4L2VideoDevice::getFormatSingleplane(V4L2DeviceFormat *format) { struct v4l2_format v4l2Format = {}; struct v4l2_pix_format *pix = &v4l2Format.fmt.pix; int ret; v4l2Format.type = bufferType_; ret = ioctl(VIDIOC_G_FMT, &v4l2Format); if (ret) { LOG(V4L2, Error) << "Unable to get format: " << strerror(-ret); return ret; } format->size.width = pix->width; format->size.height = pix->height; format->fourcc = pix->pixelformat; format->planesCount = 1; format->planes[0].bpl = pix->bytesperline; format->planes[0].size = pix->sizeimage; return 0; } int V4L2VideoDevice::setFormatSingleplane(V4L2DeviceFormat *format) { struct v4l2_format v4l2Format = {}; struct v4l2_pix_format *pix = &v4l2Format.fmt.pix; int ret; v4l2Format.type = bufferType_; pix->width = format->size.width; pix->height = format->size.height; pix->pixelformat = format->fourcc; pix->bytesperline = format->planes[0].bpl; pix->field = V4L2_FIELD_NONE; ret = ioctl(VIDIOC_S_FMT, &v4l2Format); if (ret) { LOG(V4L2, Error) << "Unable to set format: " << strerror(-ret); return ret; } /* * Return to caller the format actually applied on the device, * which might differ from the requested one. */ format->size.width = pix->width; format->size.height = pix->height; format->fourcc = pix->pixelformat; format->planesCount = 1; format->planes[0].bpl = pix->bytesperline; format->planes[0].size = pix->sizeimage; return 0; } /** * \brief Enumerate all pixel formats and frame sizes * * Enumerate all pixel formats and frame sizes supported by the video device. * * \return A list of the supported video device formats */ ImageFormats V4L2VideoDevice::formats() { ImageFormats formats; for (unsigned int pixelformat : enumPixelformats()) { std::vector<SizeRange> sizes = enumSizes(pixelformat); if (sizes.empty()) return {}; if (formats.addFormat(pixelformat, sizes)) { LOG(V4L2, Error) << "Could not add sizes for pixel format " << pixelformat; return {}; } } return formats; } std::vector<unsigned int> V4L2VideoDevice::enumPixelformats() { std::vector<unsigned int> formats; int ret; for (unsigned int index = 0; ; index++) { struct v4l2_fmtdesc pixelformatEnum = {}; pixelformatEnum.index = index; pixelformatEnum.type = bufferType_; ret = ioctl(VIDIOC_ENUM_FMT, &pixelformatEnum); if (ret) break; formats.push_back(pixelformatEnum.pixelformat); } if (ret && ret != -EINVAL) { LOG(V4L2, Error) << "Unable to enumerate pixel formats: " << strerror(-ret); return {}; } return formats; } std::vector<SizeRange> V4L2VideoDevice::enumSizes(unsigned int pixelFormat) { std::vector<SizeRange> sizes; int ret; for (unsigned int index = 0;; index++) { struct v4l2_frmsizeenum frameSize = {}; frameSize.index = index; frameSize.pixel_format = pixelFormat; ret = ioctl(VIDIOC_ENUM_FRAMESIZES, &frameSize); if (ret) break; if (index != 0 && frameSize.type != V4L2_FRMSIZE_TYPE_DISCRETE) { LOG(V4L2, Error) << "Non-zero index for non discrete type"; return {}; } switch (frameSize.type) { case V4L2_FRMSIZE_TYPE_DISCRETE: sizes.emplace_back(frameSize.discrete.width, frameSize.discrete.height); break; case V4L2_FRMSIZE_TYPE_CONTINUOUS: sizes.emplace_back(frameSize.stepwise.min_width, frameSize.stepwise.min_height, frameSize.stepwise.max_width, frameSize.stepwise.max_height); break; case V4L2_FRMSIZE_TYPE_STEPWISE: sizes.emplace_back(frameSize.stepwise.min_width, frameSize.stepwise.min_height, frameSize.stepwise.max_width, frameSize.stepwise.max_height, frameSize.stepwise.step_width, frameSize.stepwise.step_height); break; default: LOG(V4L2, Error) << "Unknown VIDIOC_ENUM_FRAMESIZES type " << frameSize.type; return {}; } } if (ret && ret != -EINVAL) { LOG(V4L2, Error) << "Unable to enumerate frame sizes: " << strerror(-ret); return {}; } return sizes; } int V4L2VideoDevice::requestBuffers(unsigned int count) { struct v4l2_requestbuffers rb = {}; int ret; rb.count = count; rb.type = bufferType_; rb.memory = memoryType_; ret = ioctl(VIDIOC_REQBUFS, &rb); if (ret < 0) { LOG(V4L2, Error) << "Unable to request " << count << " buffers: " << strerror(-ret); return ret; } LOG(V4L2, Debug) << rb.count << " buffers requested."; return rb.count; } /** * \brief Request buffers to be allocated from the video device and stored in * the buffer pool provided. * \param[out] pool BufferPool to populate with buffers * \return 0 on success or a negative error code otherwise */ int V4L2VideoDevice::exportBuffers(BufferPool *pool) { unsigned int allocatedBuffers; unsigned int i; int ret; memoryType_ = V4L2_MEMORY_MMAP; ret = requestBuffers(pool->count()); if (ret < 0) return ret; allocatedBuffers = ret; if (allocatedBuffers < pool->count()) { LOG(V4L2, Error) << "Not enough buffers provided by V4L2VideoDevice"; requestBuffers(0); return -ENOMEM; } /* Map the buffers. */ for (i = 0; i < pool->count(); ++i) { struct v4l2_plane planes[VIDEO_MAX_PLANES] = {}; struct v4l2_buffer buf = {}; BufferMemory &buffer = pool->buffers()[i]; buf.index = i; buf.type = bufferType_; buf.memory = memoryType_; buf.length = VIDEO_MAX_PLANES; buf.m.planes = planes; ret = ioctl(VIDIOC_QUERYBUF, &buf); if (ret < 0) { LOG(V4L2, Error) << "Unable to query buffer " << i << ": " << strerror(-ret); break; } if (V4L2_TYPE_IS_MULTIPLANAR(buf.type)) { for (unsigned int p = 0; p < buf.length; ++p) { ret = createPlane(&buffer, i, p, buf.m.planes[p].length); if (ret) break; } } else { ret = createPlane(&buffer, i, 0, buf.length); } if (ret) { LOG(V4L2, Error) << "Failed to create plane"; break; } } if (ret) { requestBuffers(0); pool->destroyBuffers(); return ret; } bufferPool_ = pool; return 0; } int V4L2VideoDevice::createPlane(BufferMemory *buffer, unsigned int index, unsigned int planeIndex, unsigned int length) { struct v4l2_exportbuffer expbuf = {}; int ret; LOG(V4L2, Debug) << "Buffer " << index << " plane " << planeIndex << ": length=" << length; expbuf.type = bufferType_; expbuf.index = index; expbuf.plane = planeIndex; expbuf.flags = O_RDWR;