summaryrefslogtreecommitdiff
path: root/src/apps/cam
diff options
context:
space:
mode:
Diffstat (limited to 'src/apps/cam')
-rw-r--r--src/apps/cam/camera_session.cpp469
-rw-r--r--src/apps/cam/camera_session.h79
-rw-r--r--src/apps/cam/capture-script.yaml71
-rw-r--r--src/apps/cam/capture_script.cpp662
-rw-r--r--src/apps/cam/capture_script.h73
-rw-r--r--src/apps/cam/drm.cpp717
-rw-r--r--src/apps/cam/drm.h334
-rw-r--r--src/apps/cam/file_sink.cpp158
-rw-r--r--src/apps/cam/file_sink.h45
-rw-r--r--src/apps/cam/frame_sink.cpp67
-rw-r--r--src/apps/cam/frame_sink.h32
-rw-r--r--src/apps/cam/kms_sink.cpp536
-rw-r--r--src/apps/cam/kms_sink.h83
-rw-r--r--src/apps/cam/main.cpp370
-rw-r--r--src/apps/cam/main.h27
-rw-r--r--src/apps/cam/meson.build62
-rw-r--r--src/apps/cam/sdl_sink.cpp215
-rw-r--r--src/apps/cam/sdl_sink.h48
-rw-r--r--src/apps/cam/sdl_texture.cpp36
-rw-r--r--src/apps/cam/sdl_texture.h30
-rw-r--r--src/apps/cam/sdl_texture_mjpg.cpp83
-rw-r--r--src/apps/cam/sdl_texture_mjpg.h23
-rw-r--r--src/apps/cam/sdl_texture_yuv.cpp33
-rw-r--r--src/apps/cam/sdl_texture_yuv.h26
24 files changed, 4279 insertions, 0 deletions
diff --git a/src/apps/cam/camera_session.cpp b/src/apps/cam/camera_session.cpp
new file mode 100644
index 00000000..f13355ba
--- /dev/null
+++ b/src/apps/cam/camera_session.cpp
@@ -0,0 +1,469 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2019, Google Inc.
+ *
+ * Camera capture session
+ */
+
+#include <iomanip>
+#include <iostream>
+#include <limits.h>
+#include <sstream>
+
+#include <libcamera/control_ids.h>
+#include <libcamera/property_ids.h>
+
+#include "../common/event_loop.h"
+#include "../common/stream_options.h"
+
+#include "camera_session.h"
+#include "capture_script.h"
+#include "file_sink.h"
+#ifdef HAVE_KMS
+#include "kms_sink.h"
+#endif
+#include "main.h"
+#ifdef HAVE_SDL
+#include "sdl_sink.h"
+#endif
+
+using namespace libcamera;
+
+CameraSession::CameraSession(CameraManager *cm,
+ const std::string &cameraId,
+ unsigned int cameraIndex,
+ const OptionsParser::Options &options)
+ : options_(options), cameraIndex_(cameraIndex), last_(0),
+ queueCount_(0), captureCount_(0), captureLimit_(0),
+ printMetadata_(false)
+{
+ char *endptr;
+ unsigned long index = strtoul(cameraId.c_str(), &endptr, 10);
+ if (*endptr == '\0' && index > 0 && index <= cm->cameras().size())
+ camera_ = cm->cameras()[index - 1];
+ else
+ camera_ = cm->get(cameraId);
+
+ if (!camera_) {
+ std::cerr << "Camera " << cameraId << " not found" << std::endl;
+ return;
+ }
+
+ if (camera_->acquire()) {
+ std::cerr << "Failed to acquire camera " << cameraId
+ << std::endl;
+ return;
+ }
+
+ std::vector<StreamRole> roles = StreamKeyValueParser::roles(options_[OptStream]);
+
+ std::unique_ptr<CameraConfiguration> config =
+ camera_->generateConfiguration(roles);
+ if (!config || config->size() != roles.size()) {
+ std::cerr << "Failed to get default stream configuration"
+ << std::endl;
+ return;
+ }
+
+ if (options_.isSet(OptOrientation)) {
+ std::string orientOpt = options_[OptOrientation].toString();
+ static const std::map<std::string, libcamera::Orientation> orientations{
+ { "rot0", libcamera::Orientation::Rotate0 },
+ { "rot180", libcamera::Orientation::Rotate180 },
+ { "mirror", libcamera::Orientation::Rotate0Mirror },
+ { "flip", libcamera::Orientation::Rotate180Mirror },
+ };
+
+ auto orientation = orientations.find(orientOpt);
+ if (orientation == orientations.end()) {
+ std::cerr << "Invalid orientation " << orientOpt << std::endl;
+ return;
+ }
+
+ config->orientation = orientation->second;
+ }
+
+ /* Apply configuration if explicitly requested. */
+ if (StreamKeyValueParser::updateConfiguration(config.get(),
+ options_[OptStream])) {
+ std::cerr << "Failed to update configuration" << std::endl;
+ return;
+ }
+
+ bool strictFormats = options_.isSet(OptStrictFormats);
+
+#ifdef HAVE_KMS
+ if (options_.isSet(OptDisplay)) {
+ if (options_.isSet(OptFile)) {
+ std::cerr << "--display and --file options are mutually exclusive"
+ << std::endl;
+ return;
+ }
+
+ if (roles.size() != 1) {
+ std::cerr << "Display doesn't support multiple streams"
+ << std::endl;
+ return;
+ }
+
+ if (roles[0] != StreamRole::Viewfinder) {
+ std::cerr << "Display requires a viewfinder stream"
+ << std::endl;
+ return;
+ }
+ }
+#endif
+
+ if (options_.isSet(OptCaptureScript)) {
+ std::string scriptName = options_[OptCaptureScript].toString();
+ script_ = std::make_unique<CaptureScript>(camera_, scriptName);
+ if (!script_->valid()) {
+ std::cerr << "Invalid capture script '" << scriptName
+ << "'" << std::endl;
+ return;
+ }
+ }
+
+ switch (config->validate()) {
+ case CameraConfiguration::Valid:
+ break;
+
+ case CameraConfiguration::Adjusted:
+ if (strictFormats) {
+ std::cout << "Adjusting camera configuration disallowed by --strict-formats argument"
+ << std::endl;
+ return;
+ }
+ std::cout << "Camera configuration adjusted" << std::endl;
+ break;
+
+ case CameraConfiguration::Invalid:
+ std::cout << "Camera configuration invalid" << std::endl;
+ return;
+ }
+
+ config_ = std::move(config);
+}
+
+CameraSession::~CameraSession()
+{
+ if (camera_)
+ camera_->release();
+}
+
+void CameraSession::listControls() const
+{
+ for (const auto &[id, info] : camera_->controls()) {
+ std::cout << "Control: " << id->name() << ": "
+ << info.toString() << std::endl;
+ }
+}
+
+void CameraSession::listProperties() const
+{
+ for (const auto &[key, value] : camera_->properties()) {
+ const ControlId *id = properties::properties.at(key);
+
+ std::cout << "Property: " << id->name() << " = "
+ << value.toString() << std::endl;
+ }
+}
+
+void CameraSession::infoConfiguration() const
+{
+ unsigned int index = 0;
+ for (const StreamConfiguration &cfg : *config_) {
+ std::cout << index << ": " << cfg.toString() << std::endl;
+
+ const StreamFormats &formats = cfg.formats();
+ for (PixelFormat pixelformat : formats.pixelformats()) {
+ std::cout << " * Pixelformat: "
+ << pixelformat << " "
+ << formats.range(pixelformat).toString()
+ << std::endl;
+
+ for (const Size &size : formats.sizes(pixelformat))
+ std::cout << " - " << size << std::endl;
+ }
+
+ index++;
+ }
+}
+
+int CameraSession::start()
+{
+ int ret;
+
+ queueCount_ = 0;
+ captureCount_ = 0;
+ captureLimit_ = options_[OptCapture].toInteger();
+ printMetadata_ = options_.isSet(OptMetadata);
+
+ ret = camera_->configure(config_.get());
+ if (ret < 0) {
+ std::cout << "Failed to configure camera" << std::endl;
+ return ret;
+ }
+
+ streamNames_.clear();
+ for (unsigned int index = 0; index < config_->size(); ++index) {
+ StreamConfiguration &cfg = config_->at(index);
+ streamNames_[cfg.stream()] = "cam" + std::to_string(cameraIndex_)
+ + "-stream" + std::to_string(index);
+ }
+
+ camera_->requestCompleted.connect(this, &CameraSession::requestComplete);
+
+#ifdef HAVE_KMS
+ if (options_.isSet(OptDisplay))
+ sink_ = std::make_unique<KMSSink>(options_[OptDisplay].toString());
+#endif
+
+#ifdef HAVE_SDL
+ if (options_.isSet(OptSDL))
+ sink_ = std::make_unique<SDLSink>();
+#endif
+
+ if (options_.isSet(OptFile)) {
+ if (!options_[OptFile].toString().empty())
+ sink_ = std::make_unique<FileSink>(camera_.get(), streamNames_,
+ options_[OptFile]);
+ else
+ sink_ = std::make_unique<FileSink>(camera_.get(), streamNames_);
+ }
+
+ if (sink_) {
+ ret = sink_->configure(*config_);
+ if (ret < 0) {
+ std::cout << "Failed to configure frame sink"
+ << std::endl;
+ return ret;
+ }
+
+ sink_->requestProcessed.connect(this, &CameraSession::sinkRelease);
+ }
+
+ allocator_ = std::make_unique<FrameBufferAllocator>(camera_);
+
+ return startCapture();
+}
+
+void CameraSession::stop()
+{
+ int ret = camera_->stop();
+ if (ret)
+ std::cout << "Failed to stop capture" << std::endl;
+
+ if (sink_) {
+ ret = sink_->stop();
+ if (ret)
+ std::cout << "Failed to stop frame sink" << std::endl;
+ }
+
+ sink_.reset();
+
+ requests_.clear();
+
+ allocator_.reset();
+}
+
+int CameraSession::startCapture()
+{
+ int ret;
+
+ /* Identify the stream with the least number of buffers. */
+ unsigned int nbuffers = UINT_MAX;
+ for (StreamConfiguration &cfg : *config_) {
+ ret = allocator_->allocate(cfg.stream());
+ if (ret < 0) {
+ std::cerr << "Can't allocate buffers" << std::endl;
+ return -ENOMEM;
+ }
+
+ unsigned int allocated = allocator_->buffers(cfg.stream()).size();
+ nbuffers = std::min(nbuffers, allocated);
+ }
+
+ /*
+ * TODO: make cam tool smarter to support still capture by for
+ * example pushing a button. For now run all streams all the time.
+ */
+
+ for (unsigned int i = 0; i < nbuffers; i++) {
+ std::unique_ptr<Request> request = camera_->createRequest();
+ if (!request) {
+ std::cerr << "Can't create request" << std::endl;
+ return -ENOMEM;
+ }
+
+ for (StreamConfiguration &cfg : *config_) {
+ Stream *stream = cfg.stream();
+ const std::vector<std::unique_ptr<FrameBuffer>> &buffers =
+ allocator_->buffers(stream);
+ const std::unique_ptr<FrameBuffer> &buffer = buffers[i];
+
+ ret = request->addBuffer(stream, buffer.get());
+ if (ret < 0) {
+ std::cerr << "Can't set buffer for request"
+ << std::endl;
+ return ret;
+ }
+
+ if (sink_)
+ sink_->mapBuffer(buffer.get());
+ }
+
+ requests_.push_back(std::move(request));
+ }
+
+ if (sink_) {
+ ret = sink_->start();
+ if (ret) {
+ std::cout << "Failed to start frame sink" << std::endl;
+ return ret;
+ }
+ }
+
+ ret = camera_->start();
+ if (ret) {
+ std::cout << "Failed to start capture" << std::endl;
+ if (sink_)
+ sink_->stop();
+ return ret;
+ }
+
+ for (std::unique_ptr<Request> &request : requests_) {
+ ret = queueRequest(request.get());
+ if (ret < 0) {
+ std::cerr << "Can't queue request" << std::endl;
+ camera_->stop();
+ if (sink_)
+ sink_->stop();
+ return ret;
+ }
+ }
+
+ if (captureLimit_)
+ std::cout << "cam" << cameraIndex_
+ << ": Capture " << captureLimit_ << " frames"
+ << std::endl;
+ else
+ std::cout << "cam" << cameraIndex_
+ << ": Capture until user interrupts by SIGINT"
+ << std::endl;
+
+ return 0;
+}
+
+int CameraSession::queueRequest(Request *request)
+{
+ if (captureLimit_ && queueCount_ >= captureLimit_)
+ return 0;
+
+ if (script_)
+ request->controls() = script_->frameControls(queueCount_);
+
+ queueCount_++;
+
+ return camera_->queueRequest(request);
+}
+
+void CameraSession::requestComplete(Request *request)
+{
+ if (request->status() == Request::RequestCancelled)
+ return;
+
+ /*
+ * Defer processing of the completed request to the event loop, to avoid
+ * blocking the camera manager thread.
+ */
+ EventLoop::instance()->callLater([this, request]() { processRequest(request); });
+}
+
+void CameraSession::processRequest(Request *request)
+{
+ /*
+ * If we've reached the capture limit, we're done. This doesn't
+ * duplicate the check below that emits the captureDone signal, as this
+ * function will be called for each request still in flight after the
+ * capture limit is reached and we don't want to emit the signal every
+ * single time.
+ */
+ if (captureLimit_ && captureCount_ >= captureLimit_)
+ return;
+
+ const Request::BufferMap &buffers = request->buffers();
+
+ /*
+ * Compute the frame rate. The timestamp is arbitrarily retrieved from
+ * the first buffer, as all buffers should have matching timestamps.
+ */
+ uint64_t ts = buffers.begin()->second->metadata().timestamp;
+ double fps = ts - last_;
+ fps = last_ != 0 && fps ? 1000000000.0 / fps : 0.0;
+ last_ = ts;
+
+ bool requeue = true;
+
+ std::stringstream info;
+ info << ts / 1000000000 << "."
+ << std::setw(6) << std::setfill('0') << ts / 1000 % 1000000
+ << " (" << std::fixed << std::setprecision(2) << fps << " fps)";
+
+ for (const auto &[stream, buffer] : buffers) {
+ const FrameMetadata &metadata = buffer->metadata();
+
+ info << " " << streamNames_[stream]
+ << " seq: " << std::setw(6) << std::setfill('0') << metadata.sequence
+ << " bytesused: ";
+
+ unsigned int nplane = 0;
+ for (const FrameMetadata::Plane &plane : metadata.planes()) {
+ info << plane.bytesused;
+ if (++nplane < metadata.planes().size())
+ info << "/";
+ }
+ }
+
+ if (sink_) {
+ if (!sink_->processRequest(request))
+ requeue = false;
+ }
+
+ std::cout << info.str() << std::endl;
+
+ if (printMetadata_) {
+ const ControlList &requestMetadata = request->metadata();
+ for (const auto &[key, value] : requestMetadata) {
+ const ControlId *id = controls::controls.at(key);
+ std::cout << "\t" << id->name() << " = "
+ << value.toString() << std::endl;
+ }
+ }
+
+ /*
+ * Notify the user that capture is complete if the limit has just been
+ * reached.
+ */
+ captureCount_++;
+ if (captureLimit_ && captureCount_ >= captureLimit_) {
+ captureDone.emit();
+ return;
+ }
+
+ /*
+ * If the frame sink holds on the request, we'll requeue it later in the
+ * complete handler.
+ */
+ if (!requeue)
+ return;
+
+ request->reuse(Request::ReuseBuffers);
+ queueRequest(request);
+}
+
+void CameraSession::sinkRelease(Request *request)
+{
+ request->reuse(Request::ReuseBuffers);
+ queueRequest(request);
+}
diff --git a/src/apps/cam/camera_session.h b/src/apps/cam/camera_session.h
new file mode 100644
index 00000000..4442fd9b
--- /dev/null
+++ b/src/apps/cam/camera_session.h
@@ -0,0 +1,79 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2019, Google Inc.
+ *
+ * Camera capture session
+ */
+
+#pragma once
+
+#include <memory>
+#include <stdint.h>
+#include <string>
+#include <vector>
+
+#include <libcamera/base/signal.h>
+
+#include <libcamera/camera.h>
+#include <libcamera/camera_manager.h>
+#include <libcamera/framebuffer.h>
+#include <libcamera/framebuffer_allocator.h>
+#include <libcamera/request.h>
+#include <libcamera/stream.h>
+
+#include "../common/options.h"
+
+class CaptureScript;
+class FrameSink;
+
+class CameraSession
+{
+public:
+ CameraSession(libcamera::CameraManager *cm,
+ const std::string &cameraId, unsigned int cameraIndex,
+ const OptionsParser::Options &options);
+ ~CameraSession();
+
+ bool isValid() const { return config_ != nullptr; }
+ const OptionsParser::Options &options() { return options_; }
+
+ libcamera::Camera *camera() { return camera_.get(); }
+ libcamera::CameraConfiguration *config() { return config_.get(); }
+
+ void listControls() const;
+ void listProperties() const;
+ void infoConfiguration() const;
+
+ int start();
+ void stop();
+
+ libcamera::Signal<> captureDone;
+
+private:
+ int startCapture();
+
+ int queueRequest(libcamera::Request *request);
+ void requestComplete(libcamera::Request *request);
+ void processRequest(libcamera::Request *request);
+ void sinkRelease(libcamera::Request *request);
+
+ const OptionsParser::Options &options_;
+ std::shared_ptr<libcamera::Camera> camera_;
+ std::unique_ptr<libcamera::CameraConfiguration> config_;
+
+ std::unique_ptr<CaptureScript> script_;
+
+ std::map<const libcamera::Stream *, std::string> streamNames_;
+ std::unique_ptr<FrameSink> sink_;
+ unsigned int cameraIndex_;
+
+ uint64_t last_;
+
+ unsigned int queueCount_;
+ unsigned int captureCount_;
+ unsigned int captureLimit_;
+ bool printMetadata_;
+
+ std::unique_ptr<libcamera::FrameBufferAllocator> allocator_;
+ std::vector<std::unique_ptr<libcamera::Request>> requests_;
+};
diff --git a/src/apps/cam/capture-script.yaml b/src/apps/cam/capture-script.yaml
new file mode 100644
index 00000000..7118865e
--- /dev/null
+++ b/src/apps/cam/capture-script.yaml
@@ -0,0 +1,71 @@
+# SPDX-License-Identifier: CC0-1.0
+
+# Capture script example
+#
+# A capture script allows to associate a list of controls and their values
+# to frame numbers.
+#
+# The script allows defining a list of frames associated with controls
+# and an optional list of properties that can control the script behaviour.
+
+# properties:
+# # Repeat the controls every 'idx' frames.
+# - loop: idx
+#
+# # List of frame number with associated a list of controls to be applied
+# frames:
+# - frame-number:
+# Control1: value1
+# Control2: value2
+
+# \todo Formally define the capture script structure with a schema
+
+# Notes:
+# - Controls have to be specified by name, as defined in the
+# libcamera::controls:: enumeration
+# - Controls not supported by the camera currently operated are ignored
+# - Frame numbers shall be monotonically incrementing, gaps are allowed
+# - If a loop limit is specified, frame numbers in the 'frames' list shall be
+# less than the loop control
+
+# Example: Turn brightness up and down every 460 frames
+
+properties:
+ - loop: 460
+
+frames:
+ - 0:
+ Brightness: 0.0
+
+ - 40:
+ Brightness: 0.2
+
+ - 80:
+ Brightness: 0.4
+
+ - 120:
+ Brightness: 0.8
+
+ - 160:
+ Brightness: 0.4
+
+ - 200:
+ Brightness: 0.2
+
+ - 240:
+ Brightness: 0.0
+
+ - 280:
+ Brightness: -0.2
+
+ - 300:
+ Brightness: -0.4
+
+ - 340:
+ Brightness: -0.8
+
+ - 380:
+ Brightness: -0.4
+
+ - 420:
+ Brightness: -0.2
diff --git a/src/apps/cam/capture_script.cpp b/src/apps/cam/capture_script.cpp
new file mode 100644
index 00000000..fc1dfa75
--- /dev/null
+++ b/src/apps/cam/capture_script.cpp
@@ -0,0 +1,662 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2022, Ideas on Board Oy
+ *
+ * Capture session configuration script
+ */
+
+#include "capture_script.h"
+
+#include <iostream>
+#include <stdio.h>
+#include <stdlib.h>
+
+using namespace libcamera;
+
+CaptureScript::CaptureScript(std::shared_ptr<Camera> camera,
+ const std::string &fileName)
+ : camera_(camera), loop_(0), valid_(false)
+{
+ FILE *fh = fopen(fileName.c_str(), "r");
+ if (!fh) {
+ int ret = -errno;
+ std::cerr << "Failed to open capture script " << fileName
+ << ": " << strerror(-ret) << std::endl;
+ return;
+ }
+
+ /*
+ * Map the camera's controls to their name so that they can be
+ * easily identified when parsing the script file.
+ */
+ for (const auto &[control, info] : camera_->controls())
+ controls_[control->name()] = control;
+
+ int ret = parseScript(fh);
+ fclose(fh);
+ if (ret)
+ return;
+
+ valid_ = true;
+}
+
+/* Retrieve the control list associated with a frame number. */
+const ControlList &CaptureScript::frameControls(unsigned int frame)
+{
+ static ControlList controls{};
+ unsigned int idx = frame;
+
+ /* If we loop, repeat the controls every 'loop_' frames. */
+ if (loop_)
+ idx = frame % loop_;
+
+ auto it = frameControls_.find(idx);
+ if (it == frameControls_.end())
+ return controls;
+
+ return it->second;
+}
+
+CaptureScript::EventPtr CaptureScript::nextEvent(yaml_event_type_t expectedType)
+{
+ EventPtr event(new yaml_event_t);
+
+ if (!yaml_parser_parse(&parser_, event.get()))
+ return nullptr;
+
+ if (expectedType != YAML_NO_EVENT && !checkEvent(event, expectedType))
+ return nullptr;
+
+ return event;
+}
+
+bool CaptureScript::checkEvent(const EventPtr &event, yaml_event_type_t expectedType) const
+{
+ if (event->type != expectedType) {
+ std::cerr << "Capture script error on line " << event->start_mark.line
+ << " column " << event->start_mark.column << ": "
+ << "Expected " << eventTypeName(expectedType)
+ << " event, got " << eventTypeName(event->type)
+ << std::endl;
+ return false;
+ }
+
+ return true;
+}
+
+std::string CaptureScript::eventScalarValue(const EventPtr &event)
+{
+ return std::string(reinterpret_cast<char *>(event->data.scalar.value),
+ event->data.scalar.length);
+}
+
+std::string CaptureScript::eventTypeName(yaml_event_type_t type)
+{
+ static const std::map<yaml_event_type_t, std::string> typeNames = {
+ { YAML_STREAM_START_EVENT, "stream-start" },
+ { YAML_STREAM_END_EVENT, "stream-end" },
+ { YAML_DOCUMENT_START_EVENT, "document-start" },
+ { YAML_DOCUMENT_END_EVENT, "document-end" },
+ { YAML_ALIAS_EVENT, "alias" },
+ { YAML_SCALAR_EVENT, "scalar" },
+ { YAML_SEQUENCE_START_EVENT, "sequence-start" },
+ { YAML_SEQUENCE_END_EVENT, "sequence-end" },
+ { YAML_MAPPING_START_EVENT, "mapping-start" },
+ { YAML_MAPPING_END_EVENT, "mapping-end" },
+ };
+
+ auto it = typeNames.find(type);
+ if (it == typeNames.end())
+ return "[type " + std::to_string(type) + "]";
+
+ return it->second;
+}
+
+int CaptureScript::parseScript(FILE *script)
+{
+ int ret = yaml_parser_initialize(&parser_);
+ if (!ret) {
+ std::cerr << "Failed to initialize yaml parser" << std::endl;
+ return ret;
+ }
+
+ /* Delete the parser upon function exit. */
+ struct ParserDeleter {
+ ParserDeleter(yaml_parser_t *parser) : parser_(parser) { }
+ ~ParserDeleter() { yaml_parser_delete(parser_); }
+ yaml_parser_t *parser_;
+ } deleter(&parser_);
+
+ yaml_parser_set_input_file(&parser_, script);
+
+ EventPtr event = nextEvent(YAML_STREAM_START_EVENT);
+ if (!event)
+ return -EINVAL;
+
+ event = nextEvent(YAML_DOCUMENT_START_EVENT);
+ if (!event)
+ return -EINVAL;
+
+ event = nextEvent(YAML_MAPPING_START_EVENT);
+ if (!event)
+ return -EINVAL;
+
+ while (1) {
+ event = nextEvent();
+ if (!event)
+ return -EINVAL;
+
+ if (event->type == YAML_MAPPING_END_EVENT)
+ return 0;
+
+ if (!checkEvent(event, YAML_SCALAR_EVENT))
+ return -EINVAL;
+
+ std::string section = eventScalarValue(event);
+
+ if (section == "properties") {
+ ret = parseProperties();
+ if (ret)
+ return ret;
+ } else if (section == "frames") {
+ ret = parseFrames();
+ if (ret)
+ return ret;
+ } else {
+ std::cerr << "Unsupported section '" << section << "'"
+ << std::endl;
+ return -EINVAL;
+ }
+ }
+}
+
+int CaptureScript::parseProperty()
+{
+ EventPtr event = nextEvent(YAML_MAPPING_START_EVENT);
+ if (!event)
+ return -EINVAL;
+
+ std::string prop = parseScalar();
+ if (prop.empty())
+ return -EINVAL;
+
+ if (prop == "loop") {
+ event = nextEvent();
+ if (!event)
+ return -EINVAL;
+
+ std::string value = eventScalarValue(event);
+ if (value.empty())
+ return -EINVAL;
+
+ loop_ = atoi(value.c_str());
+ if (!loop_) {
+ std::cerr << "Invalid loop limit '" << loop_ << "'"
+ << std::endl;
+ return -EINVAL;
+ }
+ } else {
+ std::cerr << "Unsupported property '" << prop << "'" << std::endl;
+ return -EINVAL;
+ }
+
+ event = nextEvent(YAML_MAPPING_END_EVENT);
+ if (!event)
+ return -EINVAL;
+
+ return 0;
+}
+
+int CaptureScript::parseProperties()
+{
+ EventPtr event = nextEvent(YAML_SEQUENCE_START_EVENT);
+ if (!event)
+ return -EINVAL;
+
+ while (1) {
+ if (event->type == YAML_SEQUENCE_END_EVENT)
+ return 0;
+
+ int ret = parseProperty();
+ if (ret)
+ return ret;
+
+ event = nextEvent();
+ if (!event)
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
+int CaptureScript::parseFrames()
+{
+ EventPtr event = nextEvent(YAML_SEQUENCE_START_EVENT);
+ if (!event)
+ return -EINVAL;
+
+ while (1) {
+ event = nextEvent();
+ if (!event)
+ return -EINVAL;
+
+ if (event->type == YAML_SEQUENCE_END_EVENT)
+ return 0;
+
+ int ret = parseFrame(std::move(event));
+ if (ret)
+ return ret;
+ }
+}
+
+int CaptureScript::parseFrame(EventPtr event)
+{
+ if (!checkEvent(event, YAML_MAPPING_START_EVENT))
+ return -EINVAL;
+
+ std::string key = parseScalar();
+ if (key.empty())
+ return -EINVAL;
+
+ unsigned int frameId = atoi(key.c_str());
+ if (loop_ && frameId >= loop_) {
+ std::cerr
+ << "Frame id (" << frameId << ") shall be smaller than"
+ << "loop limit (" << loop_ << ")" << std::endl;
+ return -EINVAL;
+ }
+
+ event = nextEvent(YAML_MAPPING_START_EVENT);
+ if (!event)
+ return -EINVAL;
+
+ ControlList controls{};
+
+ while (1) {
+ event = nextEvent();
+ if (!event)
+ return -EINVAL;
+
+ if (event->type == YAML_MAPPING_END_EVENT)
+ break;
+
+ int ret = parseControl(std::move(event), controls);
+ if (ret)
+ return ret;
+ }
+
+ frameControls_[frameId] = std::move(controls);
+
+ event = nextEvent(YAML_MAPPING_END_EVENT);
+ if (!event)
+ return -EINVAL;
+
+ return 0;
+}
+
+int CaptureScript::parseControl(EventPtr event, ControlList &controls)
+{
+ /* We expect a value after a key. */
+ std::string name = eventScalarValue(event);
+ if (name.empty())
+ return -EINVAL;
+
+ /* If the camera does not support the control just ignore it. */
+ auto it = controls_.find(name);
+ if (it == controls_.end()) {
+ std::cerr << "Unsupported control '" << name << "'" << std::endl;
+ return -EINVAL;
+ }
+
+ const ControlId *controlId = it->second;
+
+ ControlValue val = unpackControl(controlId);
+ if (val.isNone()) {
+ std::cerr << "Error unpacking control '" << name << "'"
+ << std::endl;
+ return -EINVAL;
+ }
+
+ controls.set(controlId->id(), val);
+
+ return 0;
+}
+
+std::string CaptureScript::parseScalar()
+{
+ EventPtr event = nextEvent(YAML_SCALAR_EVENT);
+ if (!event)
+ return "";
+
+ return eventScalarValue(event);
+}
+
+ControlValue CaptureScript::parseRectangles()
+{
+ std::vector<libcamera::Rectangle> rectangles;
+
+ std::vector<std::vector<std::string>> arrays = parseArrays();
+ if (arrays.empty())
+ return {};
+
+ for (const std::vector<std::string> &values : arrays) {
+ if (values.size() != 4) {
+ std::cerr << "Error parsing Rectangle: expected "
+ << "array with 4 parameters" << std::endl;
+ return {};
+ }
+
+ Rectangle rect = unpackRectangle(values);
+ rectangles.push_back(rect);
+ }
+
+ ControlValue controlValue;
+ if (rectangles.size() == 1)
+ controlValue.set(rectangles.at(0));
+ else
+ controlValue.set(Span<const Rectangle>(rectangles));
+
+ return controlValue;
+}
+
+std::vector<std::vector<std::string>> CaptureScript::parseArrays()
+{
+ EventPtr event = nextEvent(YAML_SEQUENCE_START_EVENT);
+ if (!event)
+ return {};
+
+ event = nextEvent();
+ if (!event)
+ return {};
+
+ std::vector<std::vector<std::string>> valueArrays;
+
+ /* Parse single array. */
+ if (event->type == YAML_SCALAR_EVENT) {
+ std::string firstValue = eventScalarValue(event);
+ if (firstValue.empty())
+ return {};
+
+ std::vector<std::string> remaining = parseSingleArray();
+
+ std::vector<std::string> values = { firstValue };
+ values.insert(std::end(values),
+ std::begin(remaining), std::end(remaining));
+ valueArrays.push_back(values);
+
+ return valueArrays;
+ }
+
+ /* Parse array of arrays. */
+ while (1) {
+ switch (event->type) {
+ case YAML_SEQUENCE_START_EVENT: {
+ std::vector<std::string> values = parseSingleArray();
+ valueArrays.push_back(values);
+ break;
+ }
+ case YAML_SEQUENCE_END_EVENT:
+ return valueArrays;
+ default:
+ return {};
+ }
+
+ event = nextEvent();
+ if (!event)
+ return {};
+ }
+}
+
+std::vector<std::string> CaptureScript::parseSingleArray()
+{
+ std::vector<std::string> values;
+
+ while (1) {
+ EventPtr event = nextEvent();
+ if (!event)
+ return {};
+
+ switch (event->type) {
+ case YAML_SCALAR_EVENT: {
+ std::string value = eventScalarValue(event);
+ if (value.empty())
+ return {};
+ values.push_back(value);
+ break;
+ }
+ case YAML_SEQUENCE_END_EVENT:
+ return values;
+ default:
+ return {};
+ }
+ }
+}
+
+void CaptureScript::unpackFailure(const ControlId *id, const std::string &repr)
+{
+ static const std::map<unsigned int, const char *> typeNames = {
+ { ControlTypeNone, "none" },
+ { ControlTypeBool, "bool" },
+ { ControlTypeByte, "byte" },
+ { ControlTypeInteger32, "int32" },
+ { ControlTypeInteger64, "int64" },
+ { ControlTypeFloat, "float" },
+ { ControlTypeString, "string" },
+ { ControlTypeRectangle, "Rectangle" },
+ { ControlTypeSize, "Size" },
+ };
+
+ const char *typeName;
+ auto it = typeNames.find(id->type());
+ if (it != typeNames.end())
+ typeName = it->second;
+ else
+ typeName = "unknown";
+
+ std::cerr << "Unsupported control '" << repr << "' for "
+ << typeName << " control " << id->name() << std::endl;
+}
+
+ControlValue CaptureScript::parseScalarControl(const ControlId *id,
+ const std::string repr)
+{
+ ControlValue value{};
+
+ switch (id->type()) {
+ case ControlTypeNone:
+ break;
+ case ControlTypeBool: {
+ bool val;
+
+ if (repr == "true") {
+ val = true;
+ } else if (repr == "false") {
+ val = false;
+ } else {
+ unpackFailure(id, repr);
+ return value;
+ }
+
+ value.set<bool>(val);
+ break;
+ }
+ case ControlTypeByte: {
+ uint8_t val = strtol(repr.c_str(), NULL, 10);
+ value.set<uint8_t>(val);
+ break;
+ }
+ case ControlTypeInteger32: {
+ int32_t val = strtol(repr.c_str(), NULL, 10);
+ value.set<int32_t>(val);
+ break;
+ }
+ case ControlTypeInteger64: {
+ int64_t val = strtoll(repr.c_str(), NULL, 10);
+ value.set<int64_t>(val);
+ break;
+ }
+ case ControlTypeFloat: {
+ float val = strtof(repr.c_str(), NULL);
+ value.set<float>(val);
+ break;
+ }
+ case ControlTypeString: {
+ value.set<std::string>(repr);
+ break;
+ }
+ default:
+ std::cerr << "Unsupported control type" << std::endl;
+ break;
+ }
+
+ return value;
+}
+
+ControlValue CaptureScript::parseArrayControl(const ControlId *id,
+ const std::vector<std::string> &repr)
+{
+ ControlValue value{};
+
+ switch (id->type()) {
+ case ControlTypeNone:
+ break;
+ case ControlTypeBool: {
+ /*
+ * This is unpleasant, but we cannot use an std::vector<> as its
+ * boolean type overload does not allow to access the raw data,
+ * as boolean values are stored in a bitmask for efficiency.
+ *
+ * As we need a contiguous memory region to wrap in a Span<>,
+ * use an array instead but be strict about not overflowing it
+ * by limiting the number of controls we can store.
+ *
+ * Be loud but do not fail, as the issue would present at
+ * runtime and it's not fatal.
+ */
+ static constexpr unsigned int kMaxNumBooleanControls = 1024;
+ std::array<bool, kMaxNumBooleanControls> values;
+ unsigned int idx = 0;
+
+ for (const std::string &s : repr) {
+ bool val;
+
+ if (s == "true") {
+ val = true;
+ } else if (s == "false") {
+ val = false;
+ } else {
+ unpackFailure(id, s);
+ return value;
+ }
+
+ if (idx == kMaxNumBooleanControls) {
+ std::cerr << "Cannot parse more than "
+ << kMaxNumBooleanControls
+ << " boolean controls" << std::endl;
+ break;
+ }
+
+ values[idx++] = val;
+ }
+
+ value = Span<bool>(values.data(), idx);
+ break;
+ }
+ case ControlTypeByte: {
+ std::vector<uint8_t> values;
+ for (const std::string &s : repr) {
+ uint8_t val = strtoll(s.c_str(), NULL, 10);
+ values.push_back(val);
+ }
+
+ value = Span<const uint8_t>(values.data(), values.size());
+ break;
+ }
+ case ControlTypeInteger32: {
+ std::vector<int32_t> values;
+ for (const std::string &s : repr) {
+ int32_t val = strtoll(s.c_str(), NULL, 10);
+ values.push_back(val);
+ }
+
+ value = Span<const int32_t>(values.data(), values.size());
+ break;
+ }
+ case ControlTypeInteger64: {
+ std::vector<int64_t> values;
+ for (const std::string &s : repr) {
+ int64_t val = strtoll(s.c_str(), NULL, 10);
+ values.push_back(val);
+ }
+
+ value = Span<const int64_t>(values.data(), values.size());
+ break;
+ }
+ case ControlTypeFloat: {
+ std::vector<float> values;
+ for (const std::string &s : repr)
+ values.push_back(strtof(s.c_str(), NULL));
+
+ value = Span<const float>(values.data(), values.size());
+ break;
+ }
+ case ControlTypeString: {
+ value = Span<const std::string>(repr.data(), repr.size());
+ break;
+ }
+ default:
+ std::cerr << "Unsupported control type" << std::endl;
+ break;
+ }
+
+ return value;
+}
+
+ControlValue CaptureScript::unpackControl(const ControlId *id)
+{
+ /* Parse complex types. */
+ switch (id->type()) {
+ case ControlTypeRectangle:
+ return parseRectangles();
+ case ControlTypeSize:
+ /* \todo Parse Sizes. */
+ return {};
+ default:
+ break;
+ }
+
+ /* Check if the control has a single scalar value or is an array. */
+ EventPtr event = nextEvent();
+ if (!event)
+ return {};
+
+ switch (event->type) {
+ case YAML_SCALAR_EVENT: {
+ const std::string repr = eventScalarValue(event);
+ if (repr.empty())
+ return {};
+
+ return parseScalarControl(id, repr);
+ }
+ case YAML_SEQUENCE_START_EVENT: {
+ std::vector<std::string> array = parseSingleArray();
+ if (array.empty())
+ return {};
+
+ return parseArrayControl(id, array);
+ }
+ default:
+ std::cerr << "Unexpected event type: " << event->type << std::endl;
+ return {};
+ }
+}
+
+libcamera::Rectangle CaptureScript::unpackRectangle(const std::vector<std::string> &strVec)
+{
+ int x = strtol(strVec[0].c_str(), NULL, 10);
+ int y = strtol(strVec[1].c_str(), NULL, 10);
+ unsigned int width = strtoul(strVec[2].c_str(), NULL, 10);
+ unsigned int height = strtoul(strVec[3].c_str(), NULL, 10);
+
+ return Rectangle(x, y, width, height);
+}
diff --git a/src/apps/cam/capture_script.h b/src/apps/cam/capture_script.h
new file mode 100644
index 00000000..294b9203
--- /dev/null
+++ b/src/apps/cam/capture_script.h
@@ -0,0 +1,73 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2022, Ideas on Board Oy
+ *
+ * Capture session configuration script
+ */
+
+#pragma once
+
+#include <map>
+#include <memory>
+#include <string>
+
+#include <libcamera/camera.h>
+#include <libcamera/controls.h>
+
+#include <yaml.h>
+
+class CaptureScript
+{
+public:
+ CaptureScript(std::shared_ptr<libcamera::Camera> camera,
+ const std::string &fileName);
+
+ bool valid() const { return valid_; }
+
+ const libcamera::ControlList &frameControls(unsigned int frame);
+
+private:
+ struct EventDeleter {
+ void operator()(yaml_event_t *event) const
+ {
+ yaml_event_delete(event);
+ delete event;
+ }
+ };
+ using EventPtr = std::unique_ptr<yaml_event_t, EventDeleter>;
+
+ std::map<std::string, const libcamera::ControlId *> controls_;
+ std::map<unsigned int, libcamera::ControlList> frameControls_;
+ std::shared_ptr<libcamera::Camera> camera_;
+ yaml_parser_t parser_;
+ unsigned int loop_;
+ bool valid_;
+
+ EventPtr nextEvent(yaml_event_type_t expectedType = YAML_NO_EVENT);
+ bool checkEvent(const EventPtr &event, yaml_event_type_t expectedType) const;
+ static std::string eventScalarValue(const EventPtr &event);
+ static std::string eventTypeName(yaml_event_type_t type);
+
+ int parseScript(FILE *script);
+
+ int parseProperties();
+ int parseProperty();
+ int parseFrames();
+ int parseFrame(EventPtr event);
+ int parseControl(EventPtr event, libcamera::ControlList &controls);
+
+ libcamera::ControlValue parseScalarControl(const libcamera::ControlId *id,
+ const std::string repr);
+ libcamera::ControlValue parseArrayControl(const libcamera::ControlId *id,
+ const std::vector<std::string> &repr);
+
+ std::string parseScalar();
+ libcamera::ControlValue parseRectangles();
+ std::vector<std::vector<std::string>> parseArrays();
+ std::vector<std::string> parseSingleArray();
+
+ void unpackFailure(const libcamera::ControlId *id,
+ const std::string &repr);
+ libcamera::ControlValue unpackControl(const libcamera::ControlId *id);
+ libcamera::Rectangle unpackRectangle(const std::vector<std::string> &strVec);
+};
diff --git a/src/apps/cam/drm.cpp b/src/apps/cam/drm.cpp
new file mode 100644
index 00000000..47bbb6b0
--- /dev/null
+++ b/src/apps/cam/drm.cpp
@@ -0,0 +1,717 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2021, Ideas on Board Oy
+ *
+ * DRM/KMS Helpers
+ */
+
+#include "drm.h"
+
+#include <algorithm>
+#include <dirent.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <iostream>
+#include <set>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+
+#include <libcamera/framebuffer.h>
+#include <libcamera/geometry.h>
+#include <libcamera/pixel_format.h>
+
+#include <libdrm/drm_mode.h>
+
+#include "../common/event_loop.h"
+
+namespace DRM {
+
+Object::Object(Device *dev, uint32_t id, Type type)
+ : id_(id), dev_(dev), type_(type)
+{
+ /* Retrieve properties from the objects that support them. */
+ if (type != TypeConnector && type != TypeCrtc &&
+ type != TypeEncoder && type != TypePlane)
+ return;
+
+ /*
+ * We can't distinguish between failures due to the object having no
+ * property and failures due to other conditions. Assume we use the API
+ * correctly and consider the object has no property.
+ */
+ drmModeObjectProperties *properties = drmModeObjectGetProperties(dev->fd(), id, type);
+ if (!properties)
+ return;
+
+ properties_.reserve(properties->count_props);
+ for (uint32_t i = 0; i < properties->count_props; ++i)
+ properties_.emplace_back(properties->props[i],
+ properties->prop_values[i]);
+
+ drmModeFreeObjectProperties(properties);
+}
+
+Object::~Object()
+{
+}
+
+const Property *Object::property(const std::string &name) const
+{
+ for (const PropertyValue &pv : properties_) {
+ const Property *property = static_cast<const Property *>(dev_->object(pv.id()));
+ if (property && property->name() == name)
+ return property;
+ }
+
+ return nullptr;
+}
+
+const PropertyValue *Object::propertyValue(const std::string &name) const
+{
+ for (const PropertyValue &pv : properties_) {
+ const Property *property = static_cast<const Property *>(dev_->object(pv.id()));
+ if (property && property->name() == name)
+ return &pv;
+ }
+
+ return nullptr;
+}
+
+Property::Property(Device *dev, drmModePropertyRes *property)
+ : Object(dev, property->prop_id, TypeProperty),
+ name_(property->name), flags_(property->flags),
+ values_(property->values, property->values + property->count_values),
+ blobs_(property->blob_ids, property->blob_ids + property->count_blobs)
+{
+ if (drm_property_type_is(property, DRM_MODE_PROP_RANGE))
+ type_ = TypeRange;
+ else if (drm_property_type_is(property, DRM_MODE_PROP_ENUM))
+ type_ = TypeEnum;
+ else if (drm_property_type_is(property, DRM_MODE_PROP_BLOB))
+ type_ = TypeBlob;
+ else if (drm_property_type_is(property, DRM_MODE_PROP_BITMASK))
+ type_ = TypeBitmask;
+ else if (drm_property_type_is(property, DRM_MODE_PROP_OBJECT))
+ type_ = TypeObject;
+ else if (drm_property_type_is(property, DRM_MODE_PROP_SIGNED_RANGE))
+ type_ = TypeSignedRange;
+ else
+ type_ = TypeUnknown;
+
+ for (int i = 0; i < property->count_enums; ++i)
+ enums_[property->enums[i].value] = property->enums[i].name;
+}
+
+Blob::Blob(Device *dev, const libcamera::Span<const uint8_t> &data)
+ : Object(dev, 0, Object::TypeBlob)
+{
+ drmModeCreatePropertyBlob(dev->fd(), data.data(), data.size(), &id_);
+}
+
+Blob::~Blob()
+{
+ if (isValid())
+ drmModeDestroyPropertyBlob(device()->fd(), id());
+}
+
+Mode::Mode(const drmModeModeInfo &mode)
+ : drmModeModeInfo(mode)
+{
+}
+
+std::unique_ptr<Blob> Mode::toBlob(Device *dev) const
+{
+ libcamera::Span<const uint8_t> data{ reinterpret_cast<const uint8_t *>(this),
+ sizeof(*this) };
+ return std::make_unique<Blob>(dev, data);
+}
+
+Crtc::Crtc(Device *dev, const drmModeCrtc *crtc, unsigned int index)
+ : Object(dev, crtc->crtc_id, Object::TypeCrtc), index_(index)
+{
+}
+
+Encoder::Encoder(Device *dev, const drmModeEncoder *encoder)
+ : Object(dev, encoder->encoder_id, Object::TypeEncoder),
+ type_(encoder->encoder_type)
+{
+ const std::list<Crtc> &crtcs = dev->crtcs();
+ possibleCrtcs_.reserve(crtcs.size());
+
+ for (const Crtc &crtc : crtcs) {
+ if (encoder->possible_crtcs & (1 << crtc.index()))
+ possibleCrtcs_.push_back(&crtc);
+ }
+
+ possibleCrtcs_.shrink_to_fit();
+}
+
+namespace {
+
+const std::map<uint32_t, const char *> connectorTypeNames{
+ { DRM_MODE_CONNECTOR_Unknown, "Unknown" },
+ { DRM_MODE_CONNECTOR_VGA, "VGA" },
+ { DRM_MODE_CONNECTOR_DVII, "DVI-I" },
+ { DRM_MODE_CONNECTOR_DVID, "DVI-D" },
+ { DRM_MODE_CONNECTOR_DVIA, "DVI-A" },
+ { DRM_MODE_CONNECTOR_Composite, "Composite" },
+ { DRM_MODE_CONNECTOR_SVIDEO, "S-Video" },
+ { DRM_MODE_CONNECTOR_LVDS, "LVDS" },
+ { DRM_MODE_CONNECTOR_Component, "Component" },
+ { DRM_MODE_CONNECTOR_9PinDIN, "9-Pin-DIN" },
+ { DRM_MODE_CONNECTOR_DisplayPort, "DP" },
+ { DRM_MODE_CONNECTOR_HDMIA, "HDMI-A" },
+ { DRM_MODE_CONNECTOR_HDMIB, "HDMI-B" },
+ { DRM_MODE_CONNECTOR_TV, "TV" },
+ { DRM_MODE_CONNECTOR_eDP, "eDP" },
+ { DRM_MODE_CONNECTOR_VIRTUAL, "Virtual" },
+ { DRM_MODE_CONNECTOR_DSI, "DSI" },
+ { DRM_MODE_CONNECTOR_DPI, "DPI" },
+};
+
+} /* namespace */
+
+Connector::Connector(Device *dev, const drmModeConnector *connector)
+ : Object(dev, connector->connector_id, Object::TypeConnector),
+ type_(connector->connector_type)
+{
+ auto typeName = connectorTypeNames.find(connector->connector_type);
+ if (typeName == connectorTypeNames.end()) {
+ std::cerr
+ << "Invalid connector type "
+ << connector->connector_type << std::endl;
+ typeName = connectorTypeNames.find(DRM_MODE_CONNECTOR_Unknown);
+ }
+
+ name_ = std::string(typeName->second) + "-"
+ + std::to_string(connector->connector_type_id);
+
+ switch (connector->connection) {
+ case DRM_MODE_CONNECTED:
+ status_ = Status::Connected;
+ break;
+
+ case DRM_MODE_DISCONNECTED:
+ status_ = Status::Disconnected;
+ break;
+
+ case DRM_MODE_UNKNOWNCONNECTION:
+ default:
+ status_ = Status::Unknown;
+ break;
+ }
+
+ const std::list<Encoder> &encoders = dev->encoders();
+
+ encoders_.reserve(connector->count_encoders);
+
+ for (int i = 0; i < connector->count_encoders; ++i) {
+ uint32_t encoderId = connector->encoders[i];
+ auto encoder = std::find_if(encoders.begin(), encoders.end(),
+ [=](const Encoder &e) {
+ return e.id() == encoderId;
+ });
+ if (encoder == encoders.end()) {
+ std::cerr
+ << "Encoder " << encoderId << " not found"
+ << std::endl;
+ continue;
+ }
+
+ encoders_.push_back(&*encoder);
+ }
+
+ encoders_.shrink_to_fit();
+
+ modes_ = { connector->modes, connector->modes + connector->count_modes };
+}
+
+Plane::Plane(Device *dev, const drmModePlane *plane)
+ : Object(dev, plane->plane_id, Object::TypePlane),
+ possibleCrtcsMask_(plane->possible_crtcs)
+{
+ formats_ = { plane->formats, plane->formats + plane->count_formats };
+
+ const std::list<Crtc> &crtcs = dev->crtcs();
+ possibleCrtcs_.reserve(crtcs.size());
+
+ for (const Crtc &crtc : crtcs) {
+ if (plane->possible_crtcs & (1 << crtc.index()))
+ possibleCrtcs_.push_back(&crtc);
+ }
+
+ possibleCrtcs_.shrink_to_fit();
+}
+
+bool Plane::supportsFormat(const libcamera::PixelFormat &format) const
+{
+ return std::find(formats_.begin(), formats_.end(), format.fourcc())
+ != formats_.end();
+}
+
+int Plane::setup()
+{
+ const PropertyValue *pv = propertyValue("type");
+ if (!pv)
+ return -EINVAL;
+
+ switch (pv->value()) {
+ case DRM_PLANE_TYPE_OVERLAY:
+ type_ = TypeOverlay;
+ break;
+
+ case DRM_PLANE_TYPE_PRIMARY:
+ type_ = TypePrimary;
+ break;
+
+ case DRM_PLANE_TYPE_CURSOR:
+ type_ = TypeCursor;
+ break;
+
+ default:
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
+FrameBuffer::FrameBuffer(Device *dev)
+ : Object(dev, 0, Object::TypeFb)
+{
+}
+
+FrameBuffer::~FrameBuffer()
+{
+ for (const auto &plane : planes_) {
+ struct drm_gem_close gem_close = {
+ .handle = plane.second.handle,
+ .pad = 0,
+ };
+ int ret;
+
+ do {
+ ret = ioctl(device()->fd(), DRM_IOCTL_GEM_CLOSE, &gem_close);
+ } while (ret == -1 && (errno == EINTR || errno == EAGAIN));
+
+ if (ret == -1) {
+ ret = -errno;
+ std::cerr
+ << "Failed to close GEM object: "
+ << strerror(-ret) << std::endl;
+ }
+ }
+
+ drmModeRmFB(device()->fd(), id());
+}
+
+AtomicRequest::AtomicRequest(Device *dev)
+ : dev_(dev), valid_(true)
+{
+ request_ = drmModeAtomicAlloc();
+ if (!request_)
+ valid_ = false;
+}
+
+AtomicRequest::~AtomicRequest()
+{
+ if (request_)
+ drmModeAtomicFree(request_);
+}
+
+int AtomicRequest::addProperty(const Object *object, const std::string &property,
+ uint64_t value)
+{
+ if (!valid_)
+ return -EINVAL;
+
+ const Property *prop = object->property(property);
+ if (!prop) {
+ valid_ = false;
+ return -EINVAL;
+ }
+
+ return addProperty(object->id(), prop->id(), value);
+}
+
+int AtomicRequest::addProperty(const Object *object, const std::string &property,
+ std::unique_ptr<Blob> blob)
+{
+ if (!valid_)
+ return -EINVAL;
+
+ const Property *prop = object->property(property);
+ if (!prop) {
+ valid_ = false;
+ return -EINVAL;
+ }
+
+ int ret = addProperty(object->id(), prop->id(), blob->id());
+ if (ret < 0)
+ return ret;
+
+ blobs_.emplace_back(std::move(blob));
+
+ return 0;
+}
+
+int AtomicRequest::addProperty(uint32_t object, uint32_t property, uint64_t value)
+{
+ int ret = drmModeAtomicAddProperty(request_, object, property, value);
+ if (ret < 0) {
+ valid_ = false;
+ return ret;
+ }
+
+ return 0;
+}
+
+int AtomicRequest::commit(unsigned int flags)
+{
+ if (!valid_)
+ return -EINVAL;
+
+ uint32_t drmFlags = 0;
+ if (flags & FlagAllowModeset)
+ drmFlags |= DRM_MODE_ATOMIC_ALLOW_MODESET;
+ if (flags & FlagAsync)
+ drmFlags |= DRM_MODE_PAGE_FLIP_EVENT | DRM_MODE_ATOMIC_NONBLOCK;
+ if (flags & FlagTestOnly)
+ drmFlags |= DRM_MODE_ATOMIC_TEST_ONLY;
+
+ return drmModeAtomicCommit(dev_->fd(), request_, drmFlags, this);
+}
+
+Device::Device()
+ : fd_(-1)
+{
+}
+
+Device::~Device()
+{
+ if (fd_ != -1)
+ drmClose(fd_);
+}
+
+int Device::init()
+{
+ int ret = openCard();
+ if (ret < 0) {
+ std::cerr << "Failed to open any DRM/KMS device: "
+ << strerror(-ret) << std::endl;
+ return ret;
+ }
+
+ /*
+ * Enable the atomic APIs. This also automatically enables the
+ * universal planes API.
+ */
+ ret = drmSetClientCap(fd_, DRM_CLIENT_CAP_ATOMIC, 1);
+ if (ret < 0) {
+ ret = -errno;
+ std::cerr
+ << "Failed to enable atomic capability: "
+ << strerror(-ret) << std::endl;
+ return ret;
+ }
+
+ /* List all the resources. */
+ ret = getResources();
+ if (ret < 0)
+ return ret;
+
+ EventLoop::instance()->addFdEvent(fd_, EventLoop::Read,
+ std::bind(&Device::drmEvent, this));
+
+ return 0;
+}
+
+int Device::openCard()
+{
+ const std::string dirName = "/dev/dri/";
+ bool found = false;
+ int ret;
+
+ /*
+ * Open the first DRM/KMS device beginning with /dev/dri/card. The
+ * libdrm drmOpen*() functions require either a module name or a bus ID,
+ * which we don't have, so bypass them. The automatic module loading and
+ * device node creation from drmOpen() is of no practical use as any
+ * modern system will handle that through udev or an equivalent
+ * component.
+ */
+ DIR *folder = opendir(dirName.c_str());
+ if (!folder) {
+ ret = -errno;
+ std::cerr << "Failed to open " << dirName
+ << " directory: " << strerror(-ret) << std::endl;
+ return ret;
+ }
+
+ for (struct dirent *res; (res = readdir(folder));) {
+ uint64_t cap;
+
+ if (strncmp(res->d_name, "card", 4))
+ continue;
+
+ const std::string devName = dirName + res->d_name;
+ fd_ = open(devName.c_str(), O_RDWR | O_CLOEXEC);
+ if (fd_ < 0) {
+ ret = -errno;
+ std::cerr << "Failed to open DRM/KMS device " << devName << ": "
+ << strerror(-ret) << std::endl;
+ continue;
+ }
+
+ /*
+ * Skip devices that don't support the modeset API, to avoid
+ * selecting a DRM device corresponding to a GPU. There is no
+ * modeset capability, but the kernel returns an error for most
+ * caps if mode setting isn't support by the driver. The
+ * DRM_CAP_DUMB_BUFFER capability is one of those, other would
+ * do as well. The capability value itself isn't relevant.
+ */
+ ret = drmGetCap(fd_, DRM_CAP_DUMB_BUFFER, &cap);
+ if (ret < 0) {
+ drmClose(fd_);
+ fd_ = -1;
+ continue;
+ }
+
+ found = true;
+ break;
+ }
+
+ closedir(folder);
+
+ return found ? 0 : -ENOENT;
+}
+
+int Device::getResources()
+{
+ int ret;
+
+ std::unique_ptr<drmModeRes, decltype(&drmModeFreeResources)> resources{
+ drmModeGetResources(fd_),
+ &drmModeFreeResources
+ };
+ if (!resources) {
+ ret = -errno;
+ std::cerr
+ << "Failed to get DRM/KMS resources: "
+ << strerror(-ret) << std::endl;
+ return ret;
+ }
+
+ for (int i = 0; i < resources->count_crtcs; ++i) {
+ drmModeCrtc *crtc = drmModeGetCrtc(fd_, resources->crtcs[i]);
+ if (!crtc) {
+ ret = -errno;
+ std::cerr
+ << "Failed to get CRTC: " << strerror(-ret)
+ << std::endl;
+ return ret;
+ }
+
+ crtcs_.emplace_back(this, crtc, i);
+ drmModeFreeCrtc(crtc);
+
+ Crtc &obj = crtcs_.back();
+ objects_[obj.id()] = &obj;
+ }
+
+ for (int i = 0; i < resources->count_encoders; ++i) {
+ drmModeEncoder *encoder =
+ drmModeGetEncoder(fd_, resources->encoders[i]);
+ if (!encoder) {
+ ret = -errno;
+ std::cerr
+ << "Failed to get encoder: " << strerror(-ret)
+ << std::endl;
+ return ret;
+ }
+
+ encoders_.emplace_back(this, encoder);
+ drmModeFreeEncoder(encoder);
+
+ Encoder &obj = encoders_.back();
+ objects_[obj.id()] = &obj;
+ }
+
+ for (int i = 0; i < resources->count_connectors; ++i) {
+ drmModeConnector *connector =
+ drmModeGetConnector(fd_, resources->connectors[i]);
+ if (!connector) {
+ ret = -errno;
+ std::cerr
+ << "Failed to get connector: " << strerror(-ret)
+ << std::endl;
+ return ret;
+ }
+
+ connectors_.emplace_back(this, connector);
+ drmModeFreeConnector(connector);
+
+ Connector &obj = connectors_.back();
+ objects_[obj.id()] = &obj;
+ }
+
+ std::unique_ptr<drmModePlaneRes, decltype(&drmModeFreePlaneResources)> planes{
+ drmModeGetPlaneResources(fd_),
+ &drmModeFreePlaneResources
+ };
+ if (!planes) {
+ ret = -errno;
+ std::cerr
+ << "Failed to get DRM/KMS planes: "
+ << strerror(-ret) << std::endl;
+ return ret;
+ }
+
+ for (uint32_t i = 0; i < planes->count_planes; ++i) {
+ drmModePlane *plane =
+ drmModeGetPlane(fd_, planes->planes[i]);
+ if (!plane) {
+ ret = -errno;
+ std::cerr
+ << "Failed to get plane: " << strerror(-ret)
+ << std::endl;
+ return ret;
+ }
+
+ planes_.emplace_back(this, plane);
+ drmModeFreePlane(plane);
+
+ Plane &obj = planes_.back();
+ objects_[obj.id()] = &obj;
+ }
+
+ /* Set the possible planes for each CRTC. */
+ for (Crtc &crtc : crtcs_) {
+ for (const Plane &plane : planes_) {
+ if (plane.possibleCrtcsMask_ & (1 << crtc.index()))
+ crtc.planes_.push_back(&plane);
+ }
+ }
+
+ /* Collect all property IDs and create Property instances. */
+ std::set<uint32_t> properties;
+ for (const auto &object : objects_) {
+ for (const PropertyValue &value : object.second->properties())
+ properties.insert(value.id());
+ }
+
+ for (uint32_t id : properties) {
+ drmModePropertyRes *property = drmModeGetProperty(fd_, id);
+ if (!property) {
+ ret = -errno;
+ std::cerr
+ << "Failed to get property: " << strerror(-ret)
+ << std::endl;
+ continue;
+ }
+
+ properties_.emplace_back(this, property);
+ drmModeFreeProperty(property);
+
+ Property &obj = properties_.back();
+ objects_[obj.id()] = &obj;
+ }
+
+ /* Finally, perform all delayed setup of mode objects. */
+ for (auto &object : objects_) {
+ ret = object.second->setup();
+ if (ret < 0) {
+ std::cerr
+ << "Failed to setup object " << object.second->id()
+ << ": " << strerror(-ret) << std::endl;
+ return ret;
+ }
+ }
+
+ return 0;
+}
+
+const Object *Device::object(uint32_t id)
+{
+ const auto iter = objects_.find(id);
+ if (iter == objects_.end())
+ return nullptr;
+
+ return iter->second;
+}
+
+std::unique_ptr<FrameBuffer> Device::createFrameBuffer(
+ const libcamera::FrameBuffer &buffer,
+ const libcamera::PixelFormat &format,
+ const libcamera::Size &size,
+ const std::array<uint32_t, 4> &strides)
+{
+ std::unique_ptr<FrameBuffer> fb{ new FrameBuffer(this) };
+
+ uint32_t handles[4] = {};
+ uint32_t offsets[4] = {};
+ int ret;
+
+ const std::vector<libcamera::FrameBuffer::Plane> &planes = buffer.planes();
+
+ unsigned int i = 0;
+ for (const libcamera::FrameBuffer::Plane &plane : planes) {
+ int fd = plane.fd.get();
+ uint32_t handle;
+
+ auto iter = fb->planes_.find(fd);
+ if (iter == fb->planes_.end()) {
+ ret = drmPrimeFDToHandle(fd_, plane.fd.get(), &handle);
+ if (ret < 0) {
+ ret = -errno;
+ std::cerr
+ << "Unable to import framebuffer dmabuf: "
+ << strerror(-ret) << std::endl;
+ return nullptr;
+ }
+
+ fb->planes_[fd] = { handle };
+ } else {
+ handle = iter->second.handle;
+ }
+
+ handles[i] = handle;
+ offsets[i] = plane.offset;
+ ++i;
+ }
+
+ ret = drmModeAddFB2(fd_, size.width, size.height, format.fourcc(), handles,
+ strides.data(), offsets, &fb->id_, 0);
+ if (ret < 0) {
+ ret = -errno;
+ std::cerr
+ << "Failed to add framebuffer: "
+ << strerror(-ret) << std::endl;
+ return nullptr;
+ }
+
+ return fb;
+}
+
+void Device::drmEvent()
+{
+ drmEventContext ctx{};
+ ctx.version = DRM_EVENT_CONTEXT_VERSION;
+ ctx.page_flip_handler = &Device::pageFlipComplete;
+
+ drmHandleEvent(fd_, &ctx);
+}
+
+void Device::pageFlipComplete([[maybe_unused]] int fd,
+ [[maybe_unused]] unsigned int sequence,
+ [[maybe_unused]] unsigned int tv_sec,
+ [[maybe_unused]] unsigned int tv_usec,
+ void *user_data)
+{
+ AtomicRequest *request = static_cast<AtomicRequest *>(user_data);
+ request->device()->requestComplete.emit(request);
+}
+
+} /* namespace DRM */
diff --git a/src/apps/cam/drm.h b/src/apps/cam/drm.h
new file mode 100644
index 00000000..1ba83b6e
--- /dev/null
+++ b/src/apps/cam/drm.h
@@ -0,0 +1,334 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2021, Ideas on Board Oy
+ *
+ * DRM/KMS Helpers
+ */
+
+#pragma once
+
+#include <array>
+#include <list>
+#include <map>
+#include <memory>
+#include <stdint.h>
+#include <string>
+#include <vector>
+
+#include <libcamera/base/signal.h>
+#include <libcamera/base/span.h>
+
+#include <libdrm/drm.h>
+#include <xf86drm.h>
+#include <xf86drmMode.h>
+
+namespace libcamera {
+class FrameBuffer;
+class PixelFormat;
+class Size;
+} /* namespace libcamera */
+
+namespace DRM {
+
+class Device;
+class Plane;
+class Property;
+class PropertyValue;
+
+class Object
+{
+public:
+ enum Type {
+ TypeCrtc = DRM_MODE_OBJECT_CRTC,
+ TypeConnector = DRM_MODE_OBJECT_CONNECTOR,
+ TypeEncoder = DRM_MODE_OBJECT_ENCODER,
+ TypeMode = DRM_MODE_OBJECT_MODE,
+ TypeProperty = DRM_MODE_OBJECT_PROPERTY,
+ TypeFb = DRM_MODE_OBJECT_FB,
+ TypeBlob = DRM_MODE_OBJECT_BLOB,
+ TypePlane = DRM_MODE_OBJECT_PLANE,
+ TypeAny = DRM_MODE_OBJECT_ANY,
+ };
+
+ Object(Device *dev, uint32_t id, Type type);
+ virtual ~Object();
+
+ Device *device() const { return dev_; }
+ uint32_t id() const { return id_; }
+ Type type() const { return type_; }
+
+ const Property *property(const std::string &name) const;
+ const PropertyValue *propertyValue(const std::string &name) const;
+ const std::vector<PropertyValue> &properties() const { return properties_; }
+
+protected:
+ virtual int setup()
+ {
+ return 0;
+ }
+
+ uint32_t id_;
+
+private:
+ friend Device;
+
+ Device *dev_;
+ Type type_;
+ std::vector<PropertyValue> properties_;
+};
+
+class Property : public Object
+{
+public:
+ enum Type {
+ TypeUnknown = 0,
+ TypeRange,
+ TypeEnum,
+ TypeBlob,
+ TypeBitmask,
+ TypeObject,
+ TypeSignedRange,
+ };
+
+ Property(Device *dev, drmModePropertyRes *property);
+
+ Type type() const { return type_; }
+ const std::string &name() const { return name_; }
+
+ bool isImmutable() const { return flags_ & DRM_MODE_PROP_IMMUTABLE; }
+
+ const std::vector<uint64_t> values() const { return values_; }
+ const std::map<uint32_t, std::string> &enums() const { return enums_; }
+ const std::vector<uint32_t> blobs() const { return blobs_; }
+
+private:
+ Type type_;
+ std::string name_;
+ uint32_t flags_;
+ std::vector<uint64_t> values_;
+ std::map<uint32_t, std::string> enums_;
+ std::vector<uint32_t> blobs_;
+};
+
+class PropertyValue
+{
+public:
+ PropertyValue(uint32_t id, uint64_t value)
+ : id_(id), value_(value)
+ {
+ }
+
+ uint32_t id() const { return id_; }
+ uint32_t value() const { return value_; }
+
+private:
+ uint32_t id_;
+ uint64_t value_;
+};
+
+class Blob : public Object
+{
+public:
+ Blob(Device *dev, const libcamera::Span<const uint8_t> &data);
+ ~Blob();
+
+ bool isValid() const { return id() != 0; }
+};
+
+class Mode : public drmModeModeInfo
+{
+public:
+ Mode(const drmModeModeInfo &mode);
+
+ std::unique_ptr<Blob> toBlob(Device *dev) const;
+};
+
+class Crtc : public Object
+{
+public:
+ Crtc(Device *dev, const drmModeCrtc *crtc, unsigned int index);
+
+ unsigned int index() const { return index_; }
+ const std::vector<const Plane *> &planes() const { return planes_; }
+
+private:
+ friend Device;
+
+ unsigned int index_;
+ std::vector<const Plane *> planes_;
+};
+
+class Encoder : public Object
+{
+public:
+ Encoder(Device *dev, const drmModeEncoder *encoder);
+
+ uint32_t type() const { return type_; }
+
+ const std::vector<const Crtc *> &possibleCrtcs() const { return possibleCrtcs_; }
+
+private:
+ uint32_t type_;
+ std::vector<const Crtc *> possibleCrtcs_;
+};
+
+class Connector : public Object
+{
+public:
+ enum Status {
+ Connected,
+ Disconnected,
+ Unknown,
+ };
+
+ Connector(Device *dev, const drmModeConnector *connector);
+
+ uint32_t type() const { return type_; }
+ const std::string &name() const { return name_; }
+
+ Status status() const { return status_; }
+
+ const std::vector<const Encoder *> &encoders() const { return encoders_; }
+ const std::vector<Mode> &modes() const { return modes_; }
+
+private:
+ uint32_t type_;
+ std::string name_;
+ Status status_;
+ std::vector<const Encoder *> encoders_;
+ std::vector<Mode> modes_;
+};
+
+class Plane : public Object
+{
+public:
+ enum Type {
+ TypeOverlay,
+ TypePrimary,
+ TypeCursor,
+ };
+
+ Plane(Device *dev, const drmModePlane *plane);
+
+ Type type() const { return type_; }
+ const std::vector<uint32_t> &formats() const { return formats_; }
+ const std::vector<const Crtc *> &possibleCrtcs() const { return possibleCrtcs_; }
+
+ bool supportsFormat(const libcamera::PixelFormat &format) const;
+
+protected:
+ int setup() override;
+
+private:
+ friend class Device;
+
+ Type type_;
+ std::vector<uint32_t> formats_;
+ std::vector<const Crtc *> possibleCrtcs_;
+ uint32_t possibleCrtcsMask_;
+};
+
+class FrameBuffer : public Object
+{
+public:
+ struct Plane {
+ uint32_t handle;
+ };
+
+ ~FrameBuffer();
+
+private:
+ friend class Device;
+
+ FrameBuffer(Device *dev);
+
+ std::map<int, Plane> planes_;
+};
+
+class AtomicRequest
+{
+public:
+ enum Flags {
+ FlagAllowModeset = (1 << 0),
+ FlagAsync = (1 << 1),
+ FlagTestOnly = (1 << 2),
+ };
+
+ AtomicRequest(Device *dev);
+ ~AtomicRequest();
+
+ Device *device() const { return dev_; }
+ bool isValid() const { return valid_; }
+
+ int addProperty(const Object *object, const std::string &property,
+ uint64_t value);
+ int addProperty(const Object *object, const std::string &property,
+ std::unique_ptr<Blob> blob);
+ int commit(unsigned int flags = 0);
+
+private:
+ AtomicRequest(const AtomicRequest &) = delete;
+ AtomicRequest(const AtomicRequest &&) = delete;
+ AtomicRequest &operator=(const AtomicRequest &) = delete;
+ AtomicRequest &operator=(const AtomicRequest &&) = delete;
+
+ int addProperty(uint32_t object, uint32_t property, uint64_t value);
+
+ Device *dev_;
+ bool valid_;
+ drmModeAtomicReq *request_;
+ std::list<std::unique_ptr<Blob>> blobs_;
+};
+
+class Device
+{
+public:
+ Device();
+ ~Device();
+
+ int init();
+
+ int fd() const { return fd_; }
+
+ const std::list<Crtc> &crtcs() const { return crtcs_; }
+ const std::list<Encoder> &encoders() const { return encoders_; }
+ const std::list<Connector> &connectors() const { return connectors_; }
+ const std::list<Plane> &planes() const { return planes_; }
+ const std::list<Property> &properties() const { return properties_; }
+
+ const Object *object(uint32_t id);
+
+ std::unique_ptr<FrameBuffer> createFrameBuffer(
+ const libcamera::FrameBuffer &buffer,
+ const libcamera::PixelFormat &format,
+ const libcamera::Size &size,
+ const std::array<uint32_t, 4> &strides);
+
+ libcamera::Signal<AtomicRequest *> requestComplete;
+
+private:
+ Device(const Device &) = delete;
+ Device(const Device &&) = delete;
+ Device &operator=(const Device &) = delete;
+ Device &operator=(const Device &&) = delete;
+
+ int openCard();
+ int getResources();
+
+ void drmEvent();
+ static void pageFlipComplete(int fd, unsigned int sequence,
+ unsigned int tv_sec, unsigned int tv_usec,
+ void *user_data);
+
+ int fd_;
+
+ std::list<Crtc> crtcs_;
+ std::list<Encoder> encoders_;
+ std::list<Connector> connectors_;
+ std::list<Plane> planes_;
+ std::list<Property> properties_;
+
+ std::map<uint32_t, Object *> objects_;
+};
+
+} /* namespace DRM */
diff --git a/src/apps/cam/file_sink.cpp b/src/apps/cam/file_sink.cpp
new file mode 100644
index 00000000..3e000d2f
--- /dev/null
+++ b/src/apps/cam/file_sink.cpp
@@ -0,0 +1,158 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2019, Google Inc.
+ *
+ * File Sink
+ */
+
+#include <assert.h>
+#include <fcntl.h>
+#include <iomanip>
+#include <iostream>
+#include <sstream>
+#include <string.h>
+#include <unistd.h>
+
+#include <libcamera/camera.h>
+
+#include "../common/dng_writer.h"
+#include "../common/image.h"
+#include "../common/ppm_writer.h"
+
+#include "file_sink.h"
+
+using namespace libcamera;
+
+FileSink::FileSink([[maybe_unused]] const libcamera::Camera *camera,
+ const std::map<const libcamera::Stream *, std::string> &streamNames,
+ const std::string &pattern)
+ :
+#ifdef HAVE_TIFF
+ camera_(camera),
+#endif
+ streamNames_(streamNames), pattern_(pattern)
+{
+}
+
+FileSink::~FileSink()
+{
+}
+
+int FileSink::configure(const libcamera::CameraConfiguration &config)
+{
+ int ret = FrameSink::configure(config);
+ if (ret < 0)
+ return ret;
+
+ return 0;
+}
+
+void FileSink::mapBuffer(FrameBuffer *buffer)
+{
+ std::unique_ptr<Image> image =
+ Image::fromFrameBuffer(buffer, Image::MapMode::ReadOnly);
+ assert(image != nullptr);
+
+ mappedBuffers_[buffer] = std::move(image);
+}
+
+bool FileSink::processRequest(Request *request)
+{
+ for (auto [stream, buffer] : request->buffers())
+ writeBuffer(stream, buffer, request->metadata());
+
+ return true;
+}
+
+void FileSink::writeBuffer(const Stream *stream, FrameBuffer *buffer,
+ [[maybe_unused]] const ControlList &metadata)
+{
+ std::string filename;
+ size_t pos;
+ int fd, ret = 0;
+
+ if (!pattern_.empty())
+ filename = pattern_;
+
+#ifdef HAVE_TIFF
+ bool dng = filename.find(".dng", filename.size() - 4) != std::string::npos;
+#endif /* HAVE_TIFF */
+ bool ppm = filename.find(".ppm", filename.size() - 4) != std::string::npos;
+
+ if (filename.empty() || filename.back() == '/')
+ filename += "frame-#.bin";
+
+ pos = filename.find_first_of('#');
+ if (pos != std::string::npos) {
+ std::stringstream ss;
+ ss << streamNames_[stream] << "-" << std::setw(6)
+ << std::setfill('0') << buffer->metadata().sequence;
+ filename.replace(pos, 1, ss.str());
+ }
+
+ Image *image = mappedBuffers_[buffer].get();
+
+#ifdef HAVE_TIFF
+ if (dng) {
+ ret = DNGWriter::write(filename.c_str(), camera_,
+ stream->configuration(), metadata,
+ buffer, image->data(0).data());
+ if (ret < 0)
+ std::cerr << "failed to write DNG file `" << filename
+ << "'" << std::endl;
+
+ return;
+ }
+#endif /* HAVE_TIFF */
+ if (ppm) {
+ ret = PPMWriter::write(filename.c_str(), stream->configuration(),
+ image->data(0));
+ if (ret < 0)
+ std::cerr << "failed to write PPM file `" << filename
+ << "'" << std::endl;
+
+ return;
+ }
+
+ fd = open(filename.c_str(), O_CREAT | O_WRONLY |
+ (pos == std::string::npos ? O_APPEND : O_TRUNC),
+ S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH);
+ if (fd == -1) {
+ ret = -errno;
+ std::cerr << "failed to open file " << filename << ": "
+ << strerror(-ret) << std::endl;
+ return;
+ }
+
+ for (unsigned int i = 0; i < buffer->planes().size(); ++i) {
+ /*
+ * This was formerly a local "const FrameMetadata::Plane &"
+ * however this causes a false positive warning for dangling
+ * references on gcc 13.
+ */
+ const unsigned int bytesused = buffer->metadata().planes()[i].bytesused;
+
+ Span<uint8_t> data = image->data(i);
+ const unsigned int length = std::min<unsigned int>(bytesused, data.size());
+
+ if (bytesused > data.size())
+ std::cerr << "payload size " << bytesused
+ << " larger than plane size " << data.size()
+ << std::endl;
+
+ ret = ::write(fd, data.data(), length);
+ if (ret < 0) {
+ ret = -errno;
+ std::cerr << "write error: " << strerror(-ret)
+ << std::endl;
+ break;
+ } else if (ret != (int)length) {
+ std::cerr << "write error: only " << ret
+ << " bytes written instead of "
+ << length << std::endl;
+ break;
+ }
+ }
+
+ close(fd);
+}
diff --git a/src/apps/cam/file_sink.h b/src/apps/cam/file_sink.h
new file mode 100644
index 00000000..9d560783
--- /dev/null
+++ b/src/apps/cam/file_sink.h
@@ -0,0 +1,45 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2019, Google Inc.
+ *
+ * File Sink
+ */
+
+#pragma once
+
+#include <map>
+#include <memory>
+#include <string>
+
+#include <libcamera/stream.h>
+
+#include "frame_sink.h"
+
+class Image;
+
+class FileSink : public FrameSink
+{
+public:
+ FileSink(const libcamera::Camera *camera,
+ const std::map<const libcamera::Stream *, std::string> &streamNames,
+ const std::string &pattern = "");
+ ~FileSink();
+
+ int configure(const libcamera::CameraConfiguration &config) override;
+
+ void mapBuffer(libcamera::FrameBuffer *buffer) override;
+
+ bool processRequest(libcamera::Request *request) override;
+
+private:
+ void writeBuffer(const libcamera::Stream *stream,
+ libcamera::FrameBuffer *buffer,
+ const libcamera::ControlList &metadata);
+
+#ifdef HAVE_TIFF
+ const libcamera::Camera *camera_;
+#endif
+ std::map<const libcamera::Stream *, std::string> streamNames_;
+ std::string pattern_;
+ std::map<libcamera::FrameBuffer *, std::unique_ptr<Image>> mappedBuffers_;
+};
diff --git a/src/apps/cam/frame_sink.cpp b/src/apps/cam/frame_sink.cpp
new file mode 100644
index 00000000..68d6f2c1
--- /dev/null
+++ b/src/apps/cam/frame_sink.cpp
@@ -0,0 +1,67 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2021, Ideas on Board Oy
+ *
+ * Base Frame Sink Class
+ */
+
+#include "frame_sink.h"
+
+/**
+ * \class FrameSink
+ * \brief Abstract class to model a consumer of frames
+ *
+ * The FrameSink class models the consumer that processes frames after a request
+ * completes. It receives requests through processRequest(), and processes them
+ * synchronously or asynchronously. This allows frame sinks to hold onto frames
+ * for an extended period of time, for instance to display them until a new
+ * frame arrives.
+ *
+ * A frame sink processes whole requests, and is solely responsible for deciding
+ * how to handle different frame buffers in case multiple streams are captured.
+ */
+
+FrameSink::~FrameSink()
+{
+}
+
+int FrameSink::configure([[maybe_unused]] const libcamera::CameraConfiguration &config)
+{
+ return 0;
+}
+
+void FrameSink::mapBuffer([[maybe_unused]] libcamera::FrameBuffer *buffer)
+{
+}
+
+int FrameSink::start()
+{
+ return 0;
+}
+
+int FrameSink::stop()
+{
+ return 0;
+}
+
+/**
+ * \fn FrameSink::processRequest()
+ * \param[in] request The request
+ *
+ * This function is called to instruct the sink to process a request. The sink
+ * may process the request synchronously or queue it for asynchronous
+ * processing.
+ *
+ * When the request is processed synchronously, this function shall return true.
+ * The \a request shall not be accessed by the FrameSink after the function
+ * returns.
+ *
+ * When the request is processed asynchronously, the FrameSink temporarily takes
+ * ownership of the \a request. The function shall return false, and the
+ * FrameSink shall emit the requestProcessed signal when the request processing
+ * completes. If the stop() function is called before the request processing
+ * completes, it shall release the request synchronously.
+ *
+ * \return True if the request has been processed synchronously, false if
+ * processing has been queued
+ */
diff --git a/src/apps/cam/frame_sink.h b/src/apps/cam/frame_sink.h
new file mode 100644
index 00000000..11105c6c
--- /dev/null
+++ b/src/apps/cam/frame_sink.h
@@ -0,0 +1,32 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2021, Ideas on Board Oy
+ *
+ * Base Frame Sink Class
+ */
+
+#pragma once
+
+#include <libcamera/base/signal.h>
+
+namespace libcamera {
+class CameraConfiguration;
+class FrameBuffer;
+class Request;
+} /* namespace libcamera */
+
+class FrameSink
+{
+public:
+ virtual ~FrameSink();
+
+ virtual int configure(const libcamera::CameraConfiguration &config);
+
+ virtual void mapBuffer(libcamera::FrameBuffer *buffer);
+
+ virtual int start();
+ virtual int stop();
+
+ virtual bool processRequest(libcamera::Request *request) = 0;
+ libcamera::Signal<libcamera::Request *> requestProcessed;
+};
diff --git a/src/apps/cam/kms_sink.cpp b/src/apps/cam/kms_sink.cpp
new file mode 100644
index 00000000..672c985a
--- /dev/null
+++ b/src/apps/cam/kms_sink.cpp
@@ -0,0 +1,536 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2021, Ideas on Board Oy
+ *
+ * KMS Sink
+ */
+
+#include "kms_sink.h"
+
+#include <array>
+#include <algorithm>
+#include <assert.h>
+#include <iostream>
+#include <limits.h>
+#include <memory>
+#include <stdint.h>
+#include <string.h>
+
+#include <libcamera/camera.h>
+#include <libcamera/formats.h>
+#include <libcamera/framebuffer.h>
+#include <libcamera/stream.h>
+
+#include "drm.h"
+
+KMSSink::KMSSink(const std::string &connectorName)
+ : connector_(nullptr), crtc_(nullptr), plane_(nullptr), mode_(nullptr)
+{
+ int ret = dev_.init();
+ if (ret < 0)
+ return;
+
+ /*
+ * Find the requested connector. If no specific connector is requested,
+ * pick the first connected connector or, if no connector is connected,
+ * the first connector with unknown status.
+ */
+ for (const DRM::Connector &conn : dev_.connectors()) {
+ if (!connectorName.empty()) {
+ if (conn.name() != connectorName)
+ continue;
+
+ connector_ = &conn;
+ break;
+ }
+
+ if (conn.status() == DRM::Connector::Connected) {
+ connector_ = &conn;
+ break;
+ }
+
+ if (!connector_ && conn.status() == DRM::Connector::Unknown)
+ connector_ = &conn;
+ }
+
+ if (!connector_) {
+ if (!connectorName.empty())
+ std::cerr
+ << "Connector " << connectorName << " not found"
+ << std::endl;
+ else
+ std::cerr << "No connected connector found" << std::endl;
+ return;
+ }
+
+ dev_.requestComplete.connect(this, &KMSSink::requestComplete);
+}
+
+void KMSSink::mapBuffer(libcamera::FrameBuffer *buffer)
+{
+ std::array<uint32_t, 4> strides = {};
+
+ /* \todo Should libcamera report per-plane strides ? */
+ unsigned int uvStrideMultiplier;
+
+ switch (format_) {
+ case libcamera::formats::NV24:
+ case libcamera::formats::NV42:
+ uvStrideMultiplier = 4;
+ break;
+ case libcamera::formats::YUV420:
+ case libcamera::formats::YVU420:
+ case libcamera::formats::YUV422:
+ uvStrideMultiplier = 1;
+ break;
+ default:
+ uvStrideMultiplier = 2;
+ break;
+ }
+
+ strides[0] = stride_;
+ for (unsigned int i = 1; i < buffer->planes().size(); ++i)
+ strides[i] = stride_ * uvStrideMultiplier / 2;
+
+ std::unique_ptr<DRM::FrameBuffer> drmBuffer =
+ dev_.createFrameBuffer(*buffer, format_, size_, strides);
+ if (!drmBuffer)
+ return;
+
+ buffers_.emplace(std::piecewise_construct,
+ std::forward_as_tuple(buffer),
+ std::forward_as_tuple(std::move(drmBuffer)));
+}
+
+int KMSSink::configure(const libcamera::CameraConfiguration &config)
+{
+ if (!connector_)
+ return -EINVAL;
+
+ crtc_ = nullptr;
+ plane_ = nullptr;
+ mode_ = nullptr;
+
+ const libcamera::StreamConfiguration &cfg = config.at(0);
+
+ /* Find the best mode for the stream size. */
+ const std::vector<DRM::Mode> &modes = connector_->modes();
+
+ unsigned int cfgArea = cfg.size.width * cfg.size.height;
+ unsigned int bestDistance = UINT_MAX;
+
+ for (const DRM::Mode &mode : modes) {
+ unsigned int modeArea = mode.hdisplay * mode.vdisplay;
+ unsigned int distance = modeArea > cfgArea ? modeArea - cfgArea
+ : cfgArea - modeArea;
+
+ if (distance < bestDistance) {
+ mode_ = &mode;
+ bestDistance = distance;
+
+ /*
+ * If the sizes match exactly, there will be no better
+ * match.
+ */
+ if (distance == 0)
+ break;
+ }
+ }
+
+ if (!mode_) {
+ std::cerr << "No modes\n";
+ return -EINVAL;
+ }
+
+ int ret = configurePipeline(cfg.pixelFormat);
+ if (ret < 0)
+ return ret;
+
+ size_ = cfg.size;
+ stride_ = cfg.stride;
+
+ /* Configure color space. */
+ colorEncoding_ = std::nullopt;
+ colorRange_ = std::nullopt;
+
+ if (cfg.colorSpace->ycbcrEncoding == libcamera::ColorSpace::YcbcrEncoding::None)
+ return 0;
+
+ /*
+ * The encoding and range enums are defined in the kernel but not
+ * exposed in public headers.
+ */
+ enum drm_color_encoding {
+ DRM_COLOR_YCBCR_BT601,
+ DRM_COLOR_YCBCR_BT709,
+ DRM_COLOR_YCBCR_BT2020,
+ };
+
+ enum drm_color_range {
+ DRM_COLOR_YCBCR_LIMITED_RANGE,
+ DRM_COLOR_YCBCR_FULL_RANGE,
+ };
+
+ const DRM::Property *colorEncoding = plane_->property("COLOR_ENCODING");
+ const DRM::Property *colorRange = plane_->property("COLOR_RANGE");
+
+ if (colorEncoding) {
+ drm_color_encoding encoding;
+
+ switch (cfg.colorSpace->ycbcrEncoding) {
+ case libcamera::ColorSpace::YcbcrEncoding::Rec601:
+ default:
+ encoding = DRM_COLOR_YCBCR_BT601;
+ break;
+ case libcamera::ColorSpace::YcbcrEncoding::Rec709:
+ encoding = DRM_COLOR_YCBCR_BT709;
+ break;
+ case libcamera::ColorSpace::YcbcrEncoding::Rec2020:
+ encoding = DRM_COLOR_YCBCR_BT2020;
+ break;
+ }
+
+ for (const auto &[id, name] : colorEncoding->enums()) {
+ if (id == encoding) {
+ colorEncoding_ = encoding;
+ break;
+ }
+ }
+ }
+
+ if (colorRange) {
+ drm_color_range range;
+
+ switch (cfg.colorSpace->range) {
+ case libcamera::ColorSpace::Range::Limited:
+ default:
+ range = DRM_COLOR_YCBCR_LIMITED_RANGE;
+ break;
+ case libcamera::ColorSpace::Range::Full:
+ range = DRM_COLOR_YCBCR_FULL_RANGE;
+ break;
+ }
+
+ for (const auto &[id, name] : colorRange->enums()) {
+ if (id == range) {
+ colorRange_ = range;
+ break;
+ }
+ }
+ }
+
+ if (!colorEncoding_ || !colorRange_)
+ std::cerr << "Color space " << cfg.colorSpace->toString()
+ << " not supported by the display device."
+ << " Colors may be wrong." << std::endl;
+
+ return 0;
+}
+
+int KMSSink::selectPipeline(const libcamera::PixelFormat &format)
+{
+ /*
+ * If the requested format has an alpha channel, also consider the X
+ * variant.
+ */
+ libcamera::PixelFormat xFormat;
+
+ switch (format) {
+ case libcamera::formats::ABGR8888:
+ xFormat = libcamera::formats::XBGR8888;
+ break;
+ case libcamera::formats::ARGB8888:
+ xFormat = libcamera::formats::XRGB8888;
+ break;
+ case libcamera::formats::BGRA8888:
+ xFormat = libcamera::formats::BGRX8888;
+ break;
+ case libcamera::formats::RGBA8888:
+ xFormat = libcamera::formats::RGBX8888;
+ break;
+ }
+
+ /*
+ * Find a CRTC and plane suitable for the request format and the
+ * connector at the end of the pipeline. Restrict the search to primary
+ * planes for now.
+ */
+ for (const DRM::Encoder *encoder : connector_->encoders()) {
+ for (const DRM::Crtc *crtc : encoder->possibleCrtcs()) {
+ for (const DRM::Plane *plane : crtc->planes()) {
+ if (plane->type() != DRM::Plane::TypePrimary)
+ continue;
+
+ if (plane->supportsFormat(format)) {
+ crtc_ = crtc;
+ plane_ = plane;
+ format_ = format;
+ return 0;
+ }
+
+ if (plane->supportsFormat(xFormat)) {
+ crtc_ = crtc;
+ plane_ = plane;
+ format_ = xFormat;
+ return 0;
+ }
+ }
+ }
+ }
+
+ return -EPIPE;
+}
+
+int KMSSink::configurePipeline(const libcamera::PixelFormat &format)
+{
+ const int ret = selectPipeline(format);
+ if (ret) {
+ std::cerr
+ << "Unable to find display pipeline for format "
+ << format << std::endl;
+
+ return ret;
+ }
+
+ std::cout
+ << "Using KMS plane " << plane_->id() << ", CRTC " << crtc_->id()
+ << ", connector " << connector_->name()
+ << " (" << connector_->id() << "), mode " << mode_->hdisplay
+ << "x" << mode_->vdisplay << "@" << mode_->vrefresh << std::endl;
+
+ return 0;
+}
+
+int KMSSink::start()
+{
+ int ret = FrameSink::start();
+ if (ret < 0)
+ return ret;
+
+ /* Disable all CRTCs and planes to start from a known valid state. */
+ DRM::AtomicRequest request(&dev_);
+
+ for (const DRM::Crtc &crtc : dev_.crtcs())
+ request.addProperty(&crtc, "ACTIVE", 0);
+
+ for (const DRM::Plane &plane : dev_.planes()) {
+ request.addProperty(&plane, "CRTC_ID", 0);
+ request.addProperty(&plane, "FB_ID", 0);
+ }
+
+ ret = request.commit(DRM::AtomicRequest::FlagAllowModeset);
+ if (ret < 0) {
+ std::cerr
+ << "Failed to disable CRTCs and planes: "
+ << strerror(-ret) << std::endl;
+ return ret;
+ }
+
+ return 0;
+}
+
+int KMSSink::stop()
+{
+ /* Display pipeline. */
+ DRM::AtomicRequest request(&dev_);
+
+ request.addProperty(connector_, "CRTC_ID", 0);
+ request.addProperty(crtc_, "ACTIVE", 0);
+ request.addProperty(crtc_, "MODE_ID", 0);
+ request.addProperty(plane_, "CRTC_ID", 0);
+ request.addProperty(plane_, "FB_ID", 0);
+
+ int ret = request.commit(DRM::AtomicRequest::FlagAllowModeset);
+ if (ret < 0) {
+ std::cerr
+ << "Failed to stop display pipeline: "
+ << strerror(-ret) << std::endl;
+ return ret;
+ }
+
+ /* Free all buffers. */
+ pending_.reset();
+ queued_.reset();
+ active_.reset();
+ buffers_.clear();
+
+ return FrameSink::stop();
+}
+
+bool KMSSink::testModeSet(DRM::FrameBuffer *drmBuffer,
+ const libcamera::Rectangle &src,
+ const libcamera::Rectangle &dst)
+{
+ DRM::AtomicRequest drmRequest{ &dev_ };
+
+ drmRequest.addProperty(connector_, "CRTC_ID", crtc_->id());
+
+ drmRequest.addProperty(crtc_, "ACTIVE", 1);
+ drmRequest.addProperty(crtc_, "MODE_ID", mode_->toBlob(&dev_));
+
+ drmRequest.addProperty(plane_, "CRTC_ID", crtc_->id());
+ drmRequest.addProperty(plane_, "FB_ID", drmBuffer->id());
+ drmRequest.addProperty(plane_, "SRC_X", src.x << 16);
+ drmRequest.addProperty(plane_, "SRC_Y", src.y << 16);
+ drmRequest.addProperty(plane_, "SRC_W", src.width << 16);
+ drmRequest.addProperty(plane_, "SRC_H", src.height << 16);
+ drmRequest.addProperty(plane_, "CRTC_X", dst.x);
+ drmRequest.addProperty(plane_, "CRTC_Y", dst.y);
+ drmRequest.addProperty(plane_, "CRTC_W", dst.width);
+ drmRequest.addProperty(plane_, "CRTC_H", dst.height);
+
+ return !drmRequest.commit(DRM::AtomicRequest::FlagAllowModeset |
+ DRM::AtomicRequest::FlagTestOnly);
+}
+
+bool KMSSink::setupComposition(DRM::FrameBuffer *drmBuffer)
+{
+ /*
+ * Test composition options, from most to least desirable, to select the
+ * best one.
+ */
+ const libcamera::Rectangle framebuffer{ size_ };
+ const libcamera::Rectangle display{ 0, 0, mode_->hdisplay, mode_->vdisplay };
+
+ /* 1. Scale the frame buffer to full screen, preserving aspect ratio. */
+ libcamera::Rectangle src = framebuffer;
+ libcamera::Rectangle dst = display.size().boundedToAspectRatio(framebuffer.size())
+ .centeredTo(display.center());
+
+ if (testModeSet(drmBuffer, src, dst)) {
+ std::cout << "KMS: full-screen scaled output, square pixels"
+ << std::endl;
+ src_ = src;
+ dst_ = dst;
+ return true;
+ }
+
+ /*
+ * 2. Scale the frame buffer to full screen, without preserving aspect
+ * ratio.
+ */
+ src = framebuffer;
+ dst = display;
+
+ if (testModeSet(drmBuffer, src, dst)) {
+ std::cout << "KMS: full-screen scaled output, non-square pixels"
+ << std::endl;
+ src_ = src;
+ dst_ = dst;
+ return true;
+ }
+
+ /* 3. Center the frame buffer on the display. */
+ src = display.size().centeredTo(framebuffer.center()).boundedTo(framebuffer);
+ dst = framebuffer.size().centeredTo(display.center()).boundedTo(display);
+
+ if (testModeSet(drmBuffer, src, dst)) {
+ std::cout << "KMS: centered output" << std::endl;
+ src_ = src;
+ dst_ = dst;
+ return true;
+ }
+
+ /* 4. Align the frame buffer on the top-left of the display. */
+ src = framebuffer.boundedTo(display);
+ dst = display.boundedTo(framebuffer);
+
+ if (testModeSet(drmBuffer, src, dst)) {
+ std::cout << "KMS: top-left aligned output" << std::endl;
+ src_ = src;
+ dst_ = dst;
+ return true;
+ }
+
+ return false;
+}
+
+bool KMSSink::processRequest(libcamera::Request *camRequest)
+{
+ /*
+ * Perform a very crude rate adaptation by simply dropping the request
+ * if the display queue is full.
+ */
+ if (pending_)
+ return true;
+
+ libcamera::FrameBuffer *buffer = camRequest->buffers().begin()->second;
+ auto iter = buffers_.find(buffer);
+ if (iter == buffers_.end())
+ return true;
+
+ DRM::FrameBuffer *drmBuffer = iter->second.get();
+
+ unsigned int flags = DRM::AtomicRequest::FlagAsync;
+ std::unique_ptr<DRM::AtomicRequest> drmRequest =
+ std::make_unique<DRM::AtomicRequest>(&dev_);
+ drmRequest->addProperty(plane_, "FB_ID", drmBuffer->id());
+
+ if (!active_ && !queued_) {
+ /* Enable the display pipeline on the first frame. */
+ if (!setupComposition(drmBuffer)) {
+ std::cerr << "Failed to setup composition" << std::endl;
+ return true;
+ }
+
+ drmRequest->addProperty(connector_, "CRTC_ID", crtc_->id());
+
+ drmRequest->addProperty(crtc_, "ACTIVE", 1);
+ drmRequest->addProperty(crtc_, "MODE_ID", mode_->toBlob(&dev_));
+
+ drmRequest->addProperty(plane_, "CRTC_ID", crtc_->id());
+ drmRequest->addProperty(plane_, "SRC_X", src_.x << 16);
+ drmRequest->addProperty(plane_, "SRC_Y", src_.y << 16);
+ drmRequest->addProperty(plane_, "SRC_W", src_.width << 16);
+ drmRequest->addProperty(plane_, "SRC_H", src_.height << 16);
+ drmRequest->addProperty(plane_, "CRTC_X", dst_.x);
+ drmRequest->addProperty(plane_, "CRTC_Y", dst_.y);
+ drmRequest->addProperty(plane_, "CRTC_W", dst_.width);
+ drmRequest->addProperty(plane_, "CRTC_H", dst_.height);
+
+ if (colorEncoding_)
+ drmRequest->addProperty(plane_, "COLOR_ENCODING", *colorEncoding_);
+ if (colorRange_)
+ drmRequest->addProperty(plane_, "COLOR_RANGE", *colorRange_);
+
+ flags |= DRM::AtomicRequest::FlagAllowModeset;
+ }
+
+ pending_ = std::make_unique<Request>(std::move(drmRequest), camRequest);
+
+ std::lock_guard<std::mutex> lock(lock_);
+
+ if (!queued_) {
+ int ret = pending_->drmRequest_->commit(flags);
+ if (ret < 0) {
+ std::cerr
+ << "Failed to commit atomic request: "
+ << strerror(-ret) << std::endl;
+ /* \todo Implement error handling */
+ }
+
+ queued_ = std::move(pending_);
+ }
+
+ return false;
+}
+
+void KMSSink::requestComplete([[maybe_unused]] DRM::AtomicRequest *request)
+{
+ std::lock_guard<std::mutex> lock(lock_);
+
+ assert(queued_ && queued_->drmRequest_.get() == request);
+
+ /* Complete the active request, if any. */
+ if (active_)
+ requestProcessed.emit(active_->camRequest_);
+
+ /* The queued request becomes active. */
+ active_ = std::move(queued_);
+
+ /* Queue the pending request, if any. */
+ if (pending_) {
+ pending_->drmRequest_->commit(DRM::AtomicRequest::FlagAsync);
+ queued_ = std::move(pending_);
+ }
+}
diff --git a/src/apps/cam/kms_sink.h b/src/apps/cam/kms_sink.h
new file mode 100644
index 00000000..4b7b4c26
--- /dev/null
+++ b/src/apps/cam/kms_sink.h
@@ -0,0 +1,83 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2021, Ideas on Board Oy
+ *
+ * KMS Sink
+ */
+
+#pragma once
+
+#include <list>
+#include <memory>
+#include <mutex>
+#include <optional>
+#include <string>
+#include <utility>
+
+#include <libcamera/base/signal.h>
+
+#include <libcamera/geometry.h>
+#include <libcamera/pixel_format.h>
+
+#include "drm.h"
+#include "frame_sink.h"
+
+class KMSSink : public FrameSink
+{
+public:
+ KMSSink(const std::string &connectorName);
+
+ void mapBuffer(libcamera::FrameBuffer *buffer) override;
+
+ int configure(const libcamera::CameraConfiguration &config) override;
+ int start() override;
+ int stop() override;
+
+ bool processRequest(libcamera::Request *request) override;
+
+private:
+ class Request
+ {
+ public:
+ Request(std::unique_ptr<DRM::AtomicRequest> drmRequest,
+ libcamera::Request *camRequest)
+ : drmRequest_(std::move(drmRequest)), camRequest_(camRequest)
+ {
+ }
+
+ std::unique_ptr<DRM::AtomicRequest> drmRequest_;
+ libcamera::Request *camRequest_;
+ };
+
+ int selectPipeline(const libcamera::PixelFormat &format);
+ int configurePipeline(const libcamera::PixelFormat &format);
+ bool testModeSet(DRM::FrameBuffer *drmBuffer,
+ const libcamera::Rectangle &src,
+ const libcamera::Rectangle &dst);
+ bool setupComposition(DRM::FrameBuffer *drmBuffer);
+
+ void requestComplete(DRM::AtomicRequest *request);
+
+ DRM::Device dev_;
+
+ const DRM::Connector *connector_;
+ const DRM::Crtc *crtc_;
+ const DRM::Plane *plane_;
+ const DRM::Mode *mode_;
+
+ libcamera::PixelFormat format_;
+ libcamera::Size size_;
+ unsigned int stride_;
+ std::optional<unsigned int> colorEncoding_;
+ std::optional<unsigned int> colorRange_;
+
+ libcamera::Rectangle src_;
+ libcamera::Rectangle dst_;
+
+ std::map<libcamera::FrameBuffer *, std::unique_ptr<DRM::FrameBuffer>> buffers_;
+
+ std::mutex lock_;
+ std::unique_ptr<Request> pending_;
+ std::unique_ptr<Request> queued_;
+ std::unique_ptr<Request> active_;
+};
diff --git a/src/apps/cam/main.cpp b/src/apps/cam/main.cpp
new file mode 100644
index 00000000..4f87f200
--- /dev/null
+++ b/src/apps/cam/main.cpp
@@ -0,0 +1,370 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2019, Google Inc.
+ *
+ * cam - The libcamera swiss army knife
+ */
+
+#include <atomic>
+#include <iomanip>
+#include <iostream>
+#include <signal.h>
+#include <string.h>
+
+#include <libcamera/libcamera.h>
+#include <libcamera/property_ids.h>
+
+#include "../common/event_loop.h"
+#include "../common/options.h"
+#include "../common/stream_options.h"
+
+#include "camera_session.h"
+#include "main.h"
+
+using namespace libcamera;
+
+class CamApp
+{
+public:
+ CamApp();
+
+ static CamApp *instance();
+
+ int init(int argc, char **argv);
+ void cleanup();
+
+ int exec();
+ void quit();
+
+private:
+ void cameraAdded(std::shared_ptr<Camera> cam);
+ void cameraRemoved(std::shared_ptr<Camera> cam);
+ void captureDone();
+ int parseOptions(int argc, char *argv[]);
+ int run();
+
+ static std::string cameraName(const Camera *camera);
+
+ static CamApp *app_;
+ OptionsParser::Options options_;
+
+ std::unique_ptr<CameraManager> cm_;
+
+ std::atomic_uint loopUsers_;
+ EventLoop loop_;
+};
+
+CamApp *CamApp::app_ = nullptr;
+
+CamApp::CamApp()
+ : loopUsers_(0)
+{
+ CamApp::app_ = this;
+}
+
+CamApp *CamApp::instance()
+{
+ return CamApp::app_;
+}
+
+int CamApp::init(int argc, char **argv)
+{
+ int ret;
+
+ ret = parseOptions(argc, argv);
+ if (ret < 0)
+ return ret;
+
+ cm_ = std::make_unique<CameraManager>();
+
+ ret = cm_->start();
+ if (ret) {
+ std::cout << "Failed to start camera manager: "
+ << strerror(-ret) << std::endl;
+ return ret;
+ }
+
+ return 0;
+}
+
+void CamApp::cleanup()
+{
+ cm_->stop();
+}
+
+int CamApp::exec()
+{
+ int ret;
+
+ ret = run();
+ cleanup();
+
+ return ret;
+}
+
+void CamApp::quit()
+{
+ loop_.exit();
+}
+
+int CamApp::parseOptions(int argc, char *argv[])
+{
+ StreamKeyValueParser streamKeyValue;
+
+ OptionsParser parser;
+ parser.addOption(OptCamera, OptionString,
+ "Specify which camera to operate on, by id or by index", "camera",
+ ArgumentRequired, "camera", true);
+ parser.addOption(OptHelp, OptionNone, "Display this help message",
+ "help");
+ parser.addOption(OptInfo, OptionNone,
+ "Display information about stream(s)", "info");
+ parser.addOption(OptList, OptionNone, "List all cameras", "list");
+ parser.addOption(OptListControls, OptionNone, "List cameras controls",
+ "list-controls");
+ parser.addOption(OptListProperties, OptionNone, "List cameras properties",
+ "list-properties");
+ parser.addOption(OptMonitor, OptionNone,
+ "Monitor for hotplug and unplug camera events",
+ "monitor");
+
+ /* Sub-options of OptCamera: */
+ parser.addOption(OptCapture, OptionInteger,
+ "Capture until interrupted by user or until <count> frames captured",
+ "capture", ArgumentOptional, "count", false,
+ OptCamera);
+
+ parser.addOption(OptOrientation, OptionString,
+ "Desired image orientation (rot0, rot180, mirror, flip)",
+ "orientation", ArgumentRequired, "orientation", false,
+ OptCamera);
+#ifdef HAVE_KMS
+ parser.addOption(OptDisplay, OptionString,
+ "Display viewfinder through DRM/KMS on specified connector",
+ "display", ArgumentOptional, "connector", false,
+ OptCamera);
+#endif
+ parser.addOption(OptFile, OptionString,
+ "Write captured frames to disk\n"
+ "If the file name ends with a '/', it sets the directory in which\n"
+ "to write files, using the default file name. Otherwise it sets the\n"
+ "full file path and name. The first '#' character in the file name\n"
+ "is expanded to the camera index, stream name and frame sequence number.\n"
+#ifdef HAVE_TIFF
+ "If the file name ends with '.dng', then the frame will be written to\n"
+ "the output file(s) in DNG format.\n"
+#endif
+ "If the file name ends with '.ppm', then the frame will be written to\n"
+ "the output file(s) in PPM format.\n"
+ "The default file name is 'frame-#.bin'.",
+ "file", ArgumentOptional, "filename", false,
+ OptCamera);
+#ifdef HAVE_SDL
+ parser.addOption(OptSDL, OptionNone, "Display viewfinder through SDL",
+ "sdl", ArgumentNone, "", false, OptCamera);
+#endif
+ parser.addOption(OptStream, &streamKeyValue,
+ "Set configuration of a camera stream", "stream", true,
+ OptCamera);
+ parser.addOption(OptStrictFormats, OptionNone,
+ "Do not allow requested stream format(s) to be adjusted",
+ "strict-formats", ArgumentNone, nullptr, false,
+ OptCamera);
+ parser.addOption(OptMetadata, OptionNone,
+ "Print the metadata for completed requests",
+ "metadata", ArgumentNone, nullptr, false,
+ OptCamera);
+ parser.addOption(OptCaptureScript, OptionString,
+ "Load a capture session configuration script from a file",
+ "script", ArgumentRequired, "script", false,
+ OptCamera);
+
+ options_ = parser.parse(argc, argv);
+ if (!options_.valid())
+ return -EINVAL;
+
+ if (options_.empty() || options_.isSet(OptHelp)) {
+ parser.usage();
+ return options_.empty() ? -EINVAL : -EINTR;
+ }
+
+ return 0;
+}
+
+void CamApp::cameraAdded(std::shared_ptr<Camera> cam)
+{
+ std::cout << "Camera Added: " << cam->id() << std::endl;
+}
+
+void CamApp::cameraRemoved(std::shared_ptr<Camera> cam)
+{
+ std::cout << "Camera Removed: " << cam->id() << std::endl;
+}
+
+void CamApp::captureDone()
+{
+ if (--loopUsers_ == 0)
+ EventLoop::instance()->exit(0);
+}
+
+int CamApp::run()
+{
+ int ret;
+
+ /* 1. List all cameras. */
+ if (options_.isSet(OptList)) {
+ std::cout << "Available cameras:" << std::endl;
+
+ unsigned int index = 1;
+ for (const std::shared_ptr<Camera> &cam : cm_->cameras()) {
+ std::cout << index << ": " << cameraName(cam.get()) << std::endl;
+ index++;
+ }
+ }
+
+ /* 2. Create the camera sessions. */
+ std::vector<std::unique_ptr<CameraSession>> sessions;
+
+ if (options_.isSet(OptCamera)) {
+ unsigned int index = 0;
+
+ for (const OptionValue &camera : options_[OptCamera].toArray()) {
+ std::unique_ptr<CameraSession> session =
+ std::make_unique<CameraSession>(cm_.get(),
+ camera.toString(),
+ index,
+ camera.children());
+ if (!session->isValid()) {
+ std::cout << "Failed to create camera session" << std::endl;
+ return -EINVAL;
+ }
+
+ std::cout << "Using camera " << session->camera()->id()
+ << " as cam" << index << std::endl;
+
+ session->captureDone.connect(this, &CamApp::captureDone);
+
+ sessions.push_back(std::move(session));
+ index++;
+ }
+ }
+
+ /* 3. Print camera information. */
+ if (options_.isSet(OptListControls) ||
+ options_.isSet(OptListProperties) ||
+ options_.isSet(OptInfo)) {
+ for (const auto &session : sessions) {
+ if (options_.isSet(OptListControls))
+ session->listControls();
+ if (options_.isSet(OptListProperties))
+ session->listProperties();
+ if (options_.isSet(OptInfo))
+ session->infoConfiguration();
+ }
+ }
+
+ /* 4. Start capture. */
+ for (const auto &session : sessions) {
+ if (!session->options().isSet(OptCapture))
+ continue;
+
+ ret = session->start();
+ if (ret) {
+ std::cout << "Failed to start camera session" << std::endl;
+ return ret;
+ }
+
+ loopUsers_++;
+ }
+
+ /* 5. Enable hotplug monitoring. */
+ if (options_.isSet(OptMonitor)) {
+ std::cout << "Monitoring new hotplug and unplug events" << std::endl;
+ std::cout << "Press Ctrl-C to interrupt" << std::endl;
+
+ cm_->cameraAdded.connect(this, &CamApp::cameraAdded);
+ cm_->cameraRemoved.connect(this, &CamApp::cameraRemoved);
+
+ loopUsers_++;
+ }
+
+ if (loopUsers_)
+ loop_.exec();
+
+ /* 6. Stop capture. */
+ for (const auto &session : sessions) {
+ if (!session->options().isSet(OptCapture))
+ continue;
+
+ session->stop();
+ }
+
+ return 0;
+}
+
+std::string CamApp::cameraName(const Camera *camera)
+{
+ const ControlList &props = camera->properties();
+ bool addModel = true;
+ std::string name;
+
+ /*
+ * Construct the name from the camera location, model and ID. The model
+ * is only used if the location isn't present or is set to External.
+ */
+ const auto &location = props.get(properties::Location);
+ if (location) {
+ switch (*location) {
+ case properties::CameraLocationFront:
+ addModel = false;
+ name = "Internal front camera ";
+ break;
+ case properties::CameraLocationBack:
+ addModel = false;
+ name = "Internal back camera ";
+ break;
+ case properties::CameraLocationExternal:
+ name = "External camera ";
+ break;
+ }
+ }
+
+ if (addModel) {
+ /*
+ * If the camera location is not availble use the camera model
+ * to build the camera name.
+ */
+ const auto &model = props.get(properties::Model);
+ if (model)
+ name = "'" + *model + "' ";
+ }
+
+ name += "(" + camera->id() + ")";
+
+ return name;
+}
+
+void signalHandler([[maybe_unused]] int signal)
+{
+ std::cout << "Exiting" << std::endl;
+ CamApp::instance()->quit();
+}
+
+int main(int argc, char **argv)
+{
+ CamApp app;
+ int ret;
+
+ ret = app.init(argc, argv);
+ if (ret)
+ return ret == -EINTR ? 0 : EXIT_FAILURE;
+
+ struct sigaction sa = {};
+ sa.sa_handler = &signalHandler;
+ sigaction(SIGINT, &sa, nullptr);
+
+ if (app.exec())
+ return EXIT_FAILURE;
+
+ return 0;
+}
diff --git a/src/apps/cam/main.h b/src/apps/cam/main.h
new file mode 100644
index 00000000..64e6a20e
--- /dev/null
+++ b/src/apps/cam/main.h
@@ -0,0 +1,27 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2019, Google Inc.
+ *
+ * Cam application
+ */
+
+#pragma once
+
+enum {
+ OptCamera = 'c',
+ OptCapture = 'C',
+ OptDisplay = 'D',
+ OptFile = 'F',
+ OptHelp = 'h',
+ OptInfo = 'I',
+ OptList = 'l',
+ OptListProperties = 'p',
+ OptMonitor = 'm',
+ OptOrientation = 'o',
+ OptSDL = 'S',
+ OptStream = 's',
+ OptListControls = 256,
+ OptStrictFormats = 257,
+ OptMetadata = 258,
+ OptCaptureScript = 259,
+};
diff --git a/src/apps/cam/meson.build b/src/apps/cam/meson.build
new file mode 100644
index 00000000..c70ca3cd
--- /dev/null
+++ b/src/apps/cam/meson.build
@@ -0,0 +1,62 @@
+# SPDX-License-Identifier: CC0-1.0
+
+if opt_cam.disabled() or not libevent.found()
+ cam_enabled = false
+ subdir_done()
+endif
+
+cam_enabled = true
+
+cam_sources = files([
+ 'camera_session.cpp',
+ 'capture_script.cpp',
+ 'file_sink.cpp',
+ 'frame_sink.cpp',
+ 'main.cpp',
+])
+
+cam_cpp_args = [apps_cpp_args]
+
+libdrm = dependency('libdrm', required : false)
+libjpeg = dependency('libjpeg', required : false)
+libsdl2 = dependency('SDL2', required : false)
+
+if libdrm.found()
+ cam_cpp_args += [ '-DHAVE_KMS' ]
+ cam_sources += files([
+ 'drm.cpp',
+ 'kms_sink.cpp'
+ ])
+endif
+
+if libsdl2.found()
+ cam_cpp_args += ['-DHAVE_SDL']
+ cam_sources += files([
+ 'sdl_sink.cpp',
+ 'sdl_texture.cpp',
+ 'sdl_texture_yuv.cpp',
+ ])
+
+ if libjpeg.found()
+ cam_cpp_args += ['-DHAVE_LIBJPEG']
+ cam_sources += files([
+ 'sdl_texture_mjpg.cpp'
+ ])
+ endif
+endif
+
+cam = executable('cam', cam_sources,
+ link_with : apps_lib,
+ dependencies : [
+ libatomic,
+ libcamera_public,
+ libdrm,
+ libevent,
+ libjpeg,
+ libsdl2,
+ libtiff,
+ libyaml,
+ ],
+ cpp_args : cam_cpp_args,
+ install : true,
+ install_tag : 'bin')
diff --git a/src/apps/cam/sdl_sink.cpp b/src/apps/cam/sdl_sink.cpp
new file mode 100644
index 00000000..8355dd5e
--- /dev/null
+++ b/src/apps/cam/sdl_sink.cpp
@@ -0,0 +1,215 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2022, Ideas on Board Oy
+ *
+ * SDL Sink
+ */
+
+#include "sdl_sink.h"
+
+#include <assert.h>
+#include <fcntl.h>
+#include <iomanip>
+#include <iostream>
+#include <signal.h>
+#include <sstream>
+#include <string.h>
+#include <unistd.h>
+
+#include <libcamera/camera.h>
+#include <libcamera/formats.h>
+
+#include "../common/event_loop.h"
+#include "../common/image.h"
+
+#ifdef HAVE_LIBJPEG
+#include "sdl_texture_mjpg.h"
+#endif
+#include "sdl_texture_yuv.h"
+
+using namespace libcamera;
+
+using namespace std::chrono_literals;
+
+SDLSink::SDLSink()
+ : window_(nullptr), renderer_(nullptr), rect_({}),
+ init_(false)
+{
+}
+
+SDLSink::~SDLSink()
+{
+ stop();
+}
+
+int SDLSink::configure(const libcamera::CameraConfiguration &config)
+{
+ int ret = FrameSink::configure(config);
+ if (ret < 0)
+ return ret;
+
+ if (config.size() > 1) {
+ std::cerr
+ << "SDL sink only supports one camera stream at present, streaming first camera stream"
+ << std::endl;
+ } else if (config.empty()) {
+ std::cerr << "Require at least one camera stream to process"
+ << std::endl;
+ return -EINVAL;
+ }
+
+ const libcamera::StreamConfiguration &cfg = config.at(0);
+ rect_.w = cfg.size.width;
+ rect_.h = cfg.size.height;
+
+ switch (cfg.pixelFormat) {
+#ifdef HAVE_LIBJPEG
+ case libcamera::formats::MJPEG:
+ texture_ = std::make_unique<SDLTextureMJPG>(rect_);
+ break;
+#endif
+#if SDL_VERSION_ATLEAST(2, 0, 16)
+ case libcamera::formats::NV12:
+ texture_ = std::make_unique<SDLTextureNV12>(rect_, cfg.stride);
+ break;
+#endif
+ case libcamera::formats::YUYV:
+ texture_ = std::make_unique<SDLTextureYUYV>(rect_, cfg.stride);
+ break;
+ default:
+ std::cerr << "Unsupported pixel format "
+ << cfg.pixelFormat.toString() << std::endl;
+ return -EINVAL;
+ };
+
+ return 0;
+}
+
+int SDLSink::start()
+{
+ int ret = SDL_Init(SDL_INIT_VIDEO);
+ if (ret) {
+ std::cerr << "Failed to initialize SDL: " << SDL_GetError()
+ << std::endl;
+ return ret;
+ }
+
+ init_ = true;
+ window_ = SDL_CreateWindow("", SDL_WINDOWPOS_UNDEFINED,
+ SDL_WINDOWPOS_UNDEFINED, rect_.w,
+ rect_.h,
+ SDL_WINDOW_SHOWN | SDL_WINDOW_RESIZABLE);
+ if (!window_) {
+ std::cerr << "Failed to create SDL window: " << SDL_GetError()
+ << std::endl;
+ return -EINVAL;
+ }
+
+ renderer_ = SDL_CreateRenderer(window_, -1, 0);
+ if (!renderer_) {
+ std::cerr << "Failed to create SDL renderer: " << SDL_GetError()
+ << std::endl;
+ return -EINVAL;
+ }
+
+ /*
+ * Set for scaling purposes, not critical, don't return in case of
+ * error.
+ */
+ ret = SDL_RenderSetLogicalSize(renderer_, rect_.w, rect_.h);
+ if (ret)
+ std::cerr << "Failed to set SDL render logical size: "
+ << SDL_GetError() << std::endl;
+
+ ret = texture_->create(renderer_);
+ if (ret) {
+ return ret;
+ }
+
+ /* \todo Make the event cancellable to support stop/start cycles. */
+ EventLoop::instance()->addTimerEvent(
+ 10ms, std::bind(&SDLSink::processSDLEvents, this));
+
+ return 0;
+}
+
+int SDLSink::stop()
+{
+ texture_.reset();
+
+ if (renderer_) {
+ SDL_DestroyRenderer(renderer_);
+ renderer_ = nullptr;
+ }
+
+ if (window_) {
+ SDL_DestroyWindow(window_);
+ window_ = nullptr;
+ }
+
+ if (init_) {
+ SDL_Quit();
+ init_ = false;
+ }
+
+ return FrameSink::stop();
+}
+
+void SDLSink::mapBuffer(FrameBuffer *buffer)
+{
+ std::unique_ptr<Image> image =
+ Image::fromFrameBuffer(buffer, Image::MapMode::ReadOnly);
+ assert(image != nullptr);
+
+ mappedBuffers_[buffer] = std::move(image);
+}
+
+bool SDLSink::processRequest(Request *request)
+{
+ for (auto [stream, buffer] : request->buffers()) {
+ renderBuffer(buffer);
+ break; /* to be expanded to launch SDL window per buffer */
+ }
+
+ return true;
+}
+
+/*
+ * Process SDL events, required for things like window resize and quit button
+ */
+void SDLSink::processSDLEvents()
+{
+ for (SDL_Event e; SDL_PollEvent(&e);) {
+ if (e.type == SDL_QUIT) {
+ /* Click close icon then quit */
+ EventLoop::instance()->exit(0);
+ }
+ }
+}
+
+void SDLSink::renderBuffer(FrameBuffer *buffer)
+{
+ Image *image = mappedBuffers_[buffer].get();
+
+ std::vector<Span<const uint8_t>> planes;
+ unsigned int i = 0;
+
+ planes.reserve(buffer->metadata().planes().size());
+
+ for (const FrameMetadata::Plane &meta : buffer->metadata().planes()) {
+ Span<uint8_t> data = image->data(i);
+ if (meta.bytesused > data.size())
+ std::cerr << "payload size " << meta.bytesused
+ << " larger than plane size " << data.size()
+ << std::endl;
+
+ planes.push_back(data);
+ i++;
+ }
+
+ texture_->update(planes);
+
+ SDL_RenderClear(renderer_);
+ SDL_RenderCopy(renderer_, texture_->get(), nullptr, nullptr);
+ SDL_RenderPresent(renderer_);
+}
diff --git a/src/apps/cam/sdl_sink.h b/src/apps/cam/sdl_sink.h
new file mode 100644
index 00000000..18ec7fbe
--- /dev/null
+++ b/src/apps/cam/sdl_sink.h
@@ -0,0 +1,48 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2022, Ideas on Board Oy
+ *
+ * SDL Sink
+ */
+
+#pragma once
+
+#include <map>
+#include <memory>
+
+#include <libcamera/stream.h>
+
+#include <SDL2/SDL.h>
+
+#include "frame_sink.h"
+
+class Image;
+class SDLTexture;
+
+class SDLSink : public FrameSink
+{
+public:
+ SDLSink();
+ ~SDLSink();
+
+ int configure(const libcamera::CameraConfiguration &config) override;
+ int start() override;
+ int stop() override;
+ void mapBuffer(libcamera::FrameBuffer *buffer) override;
+
+ bool processRequest(libcamera::Request *request) override;
+
+private:
+ void renderBuffer(libcamera::FrameBuffer *buffer);
+ void processSDLEvents();
+
+ std::map<libcamera::FrameBuffer *, std::unique_ptr<Image>>
+ mappedBuffers_;
+
+ std::unique_ptr<SDLTexture> texture_;
+
+ SDL_Window *window_;
+ SDL_Renderer *renderer_;
+ SDL_Rect rect_;
+ bool init_;
+};
diff --git a/src/apps/cam/sdl_texture.cpp b/src/apps/cam/sdl_texture.cpp
new file mode 100644
index 00000000..e52c4a3a
--- /dev/null
+++ b/src/apps/cam/sdl_texture.cpp
@@ -0,0 +1,36 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2022, Ideas on Board Oy
+ *
+ * SDL Texture
+ */
+
+#include "sdl_texture.h"
+
+#include <iostream>
+
+SDLTexture::SDLTexture(const SDL_Rect &rect, uint32_t pixelFormat,
+ const int stride)
+ : ptr_(nullptr), rect_(rect), pixelFormat_(pixelFormat), stride_(stride)
+{
+}
+
+SDLTexture::~SDLTexture()
+{
+ if (ptr_)
+ SDL_DestroyTexture(ptr_);
+}
+
+int SDLTexture::create(SDL_Renderer *renderer)
+{
+ ptr_ = SDL_CreateTexture(renderer, pixelFormat_,
+ SDL_TEXTUREACCESS_STREAMING, rect_.w,
+ rect_.h);
+ if (!ptr_) {
+ std::cerr << "Failed to create SDL texture: " << SDL_GetError()
+ << std::endl;
+ return -ENOMEM;
+ }
+
+ return 0;
+}
diff --git a/src/apps/cam/sdl_texture.h b/src/apps/cam/sdl_texture.h
new file mode 100644
index 00000000..990f83b6
--- /dev/null
+++ b/src/apps/cam/sdl_texture.h
@@ -0,0 +1,30 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2022, Ideas on Board Oy
+ *
+ * SDL Texture
+ */
+
+#pragma once
+
+#include <vector>
+
+#include <SDL2/SDL.h>
+
+#include "../common/image.h"
+
+class SDLTexture
+{
+public:
+ SDLTexture(const SDL_Rect &rect, uint32_t pixelFormat, const int stride);
+ virtual ~SDLTexture();
+ int create(SDL_Renderer *renderer);
+ virtual void update(const std::vector<libcamera::Span<const uint8_t>> &data) = 0;
+ SDL_Texture *get() const { return ptr_; }
+
+protected:
+ SDL_Texture *ptr_;
+ const SDL_Rect rect_;
+ const uint32_t pixelFormat_;
+ const int stride_;
+};
diff --git a/src/apps/cam/sdl_texture_mjpg.cpp b/src/apps/cam/sdl_texture_mjpg.cpp
new file mode 100644
index 00000000..cace18fc
--- /dev/null
+++ b/src/apps/cam/sdl_texture_mjpg.cpp
@@ -0,0 +1,83 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2022, Ideas on Board Oy
+ *
+ * SDL Texture MJPG
+ */
+
+#include "sdl_texture_mjpg.h"
+
+#include <iostream>
+#include <setjmp.h>
+#include <stdio.h>
+
+#include <jpeglib.h>
+
+using namespace libcamera;
+
+struct JpegErrorManager : public jpeg_error_mgr {
+ JpegErrorManager()
+ {
+ jpeg_std_error(this);
+ error_exit = errorExit;
+ output_message = outputMessage;
+ }
+
+ static void errorExit(j_common_ptr cinfo)
+ {
+ JpegErrorManager *self =
+ static_cast<JpegErrorManager *>(cinfo->err);
+ longjmp(self->escape_, 1);
+ }
+
+ static void outputMessage([[maybe_unused]] j_common_ptr cinfo)
+ {
+ }
+
+ jmp_buf escape_;
+};
+
+SDLTextureMJPG::SDLTextureMJPG(const SDL_Rect &rect)
+ : SDLTexture(rect, SDL_PIXELFORMAT_RGB24, rect.w * 3),
+ rgb_(std::make_unique<unsigned char[]>(stride_ * rect.h))
+{
+}
+
+int SDLTextureMJPG::decompress(Span<const uint8_t> data)
+{
+ struct jpeg_decompress_struct cinfo;
+
+ JpegErrorManager errorManager;
+ if (setjmp(errorManager.escape_)) {
+ /* libjpeg found an error */
+ jpeg_destroy_decompress(&cinfo);
+ std::cerr << "JPEG decompression error" << std::endl;
+ return -EINVAL;
+ }
+
+ cinfo.err = &errorManager;
+ jpeg_create_decompress(&cinfo);
+
+ jpeg_mem_src(&cinfo, data.data(), data.size());
+
+ jpeg_read_header(&cinfo, TRUE);
+
+ jpeg_start_decompress(&cinfo);
+
+ for (int i = 0; cinfo.output_scanline < cinfo.output_height; ++i) {
+ JSAMPROW rowptr = rgb_.get() + i * stride_;
+ jpeg_read_scanlines(&cinfo, &rowptr, 1);
+ }
+
+ jpeg_finish_decompress(&cinfo);
+
+ jpeg_destroy_decompress(&cinfo);
+
+ return 0;
+}
+
+void SDLTextureMJPG::update(const std::vector<libcamera::Span<const uint8_t>> &data)
+{
+ decompress(data[0]);
+ SDL_UpdateTexture(ptr_, nullptr, rgb_.get(), stride_);
+}
diff --git a/src/apps/cam/sdl_texture_mjpg.h b/src/apps/cam/sdl_texture_mjpg.h
new file mode 100644
index 00000000..37bed5f0
--- /dev/null
+++ b/src/apps/cam/sdl_texture_mjpg.h
@@ -0,0 +1,23 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2022, Ideas on Board Oy
+ *
+ * SDL Texture MJPG
+ */
+
+#pragma once
+
+#include "sdl_texture.h"
+
+class SDLTextureMJPG : public SDLTexture
+{
+public:
+ SDLTextureMJPG(const SDL_Rect &rect);
+
+ void update(const std::vector<libcamera::Span<const uint8_t>> &data) override;
+
+private:
+ int decompress(libcamera::Span<const uint8_t> data);
+
+ std::unique_ptr<unsigned char[]> rgb_;
+};
diff --git a/src/apps/cam/sdl_texture_yuv.cpp b/src/apps/cam/sdl_texture_yuv.cpp
new file mode 100644
index 00000000..480d7a37
--- /dev/null
+++ b/src/apps/cam/sdl_texture_yuv.cpp
@@ -0,0 +1,33 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2022, Ideas on Board Oy
+ *
+ * SDL YUV Textures
+ */
+
+#include "sdl_texture_yuv.h"
+
+using namespace libcamera;
+
+#if SDL_VERSION_ATLEAST(2, 0, 16)
+SDLTextureNV12::SDLTextureNV12(const SDL_Rect &rect, unsigned int stride)
+ : SDLTexture(rect, SDL_PIXELFORMAT_NV12, stride)
+{
+}
+
+void SDLTextureNV12::update(const std::vector<libcamera::Span<const uint8_t>> &data)
+{
+ SDL_UpdateNVTexture(ptr_, &rect_, data[0].data(), stride_,
+ data[1].data(), stride_);
+}
+#endif
+
+SDLTextureYUYV::SDLTextureYUYV(const SDL_Rect &rect, unsigned int stride)
+ : SDLTexture(rect, SDL_PIXELFORMAT_YUY2, stride)
+{
+}
+
+void SDLTextureYUYV::update(const std::vector<libcamera::Span<const uint8_t>> &data)
+{
+ SDL_UpdateTexture(ptr_, &rect_, data[0].data(), stride_);
+}
diff --git a/src/apps/cam/sdl_texture_yuv.h b/src/apps/cam/sdl_texture_yuv.h
new file mode 100644
index 00000000..29c756e7
--- /dev/null
+++ b/src/apps/cam/sdl_texture_yuv.h
@@ -0,0 +1,26 @@
+/* SPDX-License-Identifier: GPL-2.0-or-later */
+/*
+ * Copyright (C) 2022, Ideas on Board Oy
+ *
+ * SDL YUV Textures
+ */
+
+#pragma once
+
+#include "sdl_texture.h"
+
+#if SDL_VERSION_ATLEAST(2, 0, 16)
+class SDLTextureNV12 : public SDLTexture
+{
+public:
+ SDLTextureNV12(const SDL_Rect &rect, unsigned int stride);
+ void update(const std::vector<libcamera::Span<const uint8_t>> &data) override;
+};
+#endif
+
+class SDLTextureYUYV : public SDLTexture
+{
+public:
+ SDLTextureYUYV(const SDL_Rect &rect, unsigned int stride);
+ void update(const std::vector<libcamera::Span<const uint8_t>> &data) override;
+};