/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Collabora Ltd. * Author: Nicolas Dufresne * * gstlibcamerasrc.cpp - GStreamer Capture Element */ /** * \todo The following is a list of items that needs implementation in the GStreamer plugin * - Implement GstElement::send_event * + Allowing application to send EOS * + Allowing application to use FLUSH/FLUSH_STOP * + Prevent the main thread from accessing streaming thread * - Implement renegotiation (even if slow) * - Implement GstElement::request-new-pad (multi stream) * + Evaluate if a single streaming thread is fine * - Add application driven request (snapshot) * - Add framerate control * - Add buffer importation support * * Requires new libcamera API: * - Add framerate negotiation support * - Add colorimetry support * - Add timestamp support * - Use unique names to select the camera devices * - Add GstVideoMeta support (strides and offsets) * * \todo libcamera UVC drivers picks the lowest possible resolution first, this * should be fixed so that we get a decent resolution and framerate for the * role by default. */ #include "gstlibcamerasrc.h" #include #include #include #include #include #include "gstlibcameraallocator.h" #include "gstlibcamerapad.h" #include "gstlibcamerapool.h" #include "gstlibcamera-utils.h" using namespace libcamera; GST_DEBUG_CATEGORY_STATIC(source_debug); #define GST_CAT_DEFAULT source_debug struct RequestWrap { RequestWrap(Request *request); ~RequestWrap(); void attachBuffer(GstBuffer *buffer); GstBuffer *detachBuffer(Stream *stream); /* For ptr comparison only. */ Request *request_; std::map buffers_; }; RequestWrap::RequestWrap(Request *request) : request_(request) { } RequestWrap::~RequestWrap() { for (std::pair &item : buffers_) { if (item.second) gst_buffer_unref(item.second); } } void RequestWrap::attachBuffer(GstBuffer *buffer) { FrameBuffer *fb = gst_libcamera_buffer_get_frame_buffer(buffer); Stream *stream = gst_libcamera_buffer_get_stream(buffer); request_->addBuffer(stream, fb); auto item = buffers_.find(stream); if (item != buffers_.end()) { gst_buffer_unref(item->second); item->second = buffer; } else { buffers_[stream] = buffer; } } GstBuffer *RequestWrap::detachBuffer(Stream *stream) { GstBuffer *buffer = nullptr; auto item = buffers_.find(stream); if (item != buffers_.end()) { buffer = item->second; item->second = nullptr; } return buffer; } /* Used for C++ object with destructors. */ struct GstLibcameraSrcState { GstLibcameraSrc *src_; std::unique_ptr cm_; std::shared_ptr cam_; std::unique_ptr config_; std::vector srcpads_; std::queue> requests_; void requestCompleted(Request *request); }; struct _GstLibcameraSrc { GstElement parent; GRecMutex stream_lock; GstTask *task; gchar *camera_name; GstLibcameraSrcState *state; GstLibcameraAllocator *allocator; GstFlowCombiner *flow_combiner; }; enum { PROP_0, PROP_CAMERA_NAME }; G_DEFINE_TYPE_WITH_CODE(GstLibcameraSrc, gst_libcamera_src, GST_TYPE_ELEMENT, GST_DEBUG_CATEGORY_INIT(source_debug, "libcamerasrc", 0, "libcamera Source")); #define TEMPLATE_CAPS GST_STATIC_CAPS("video/x-raw; image/jpeg") /* For the simple case, we have a src pad that is always present. */ GstStaticPadTemplate src_template = { "src", GST_PAD_SRC, GST_PAD_ALWAYS, TEMPLATE_CAPS }; /* More pads can be requested in state < PAUSED */ GstStaticPadTemplate request_src_template = { "src_%s", GST_PAD_SRC, GST_PAD_REQUEST, TEMPLATE_CAPS }; void GstLibcameraSrcState::requestCompleted(Request *request) { GLibLocker lock(GST_OBJECT(src_)); GST_DEBUG_OBJECT(src_, "buffers are ready"); std::unique_ptr wrap = std::move(requests_.front()); requests_.pop(); g_return_if_fail(wrap->request_ == request); if ((request->status() == Request::RequestCancelled)) { GST_DEBUG_OBJECT(src_, "Request was cancelled"); return; } GstBuffer *buffer; for (GstPad *srcpad : srcpads_) { Stream *stream = gst_libcamera_pad_get_stream(srcpad); buffer = wrap->detachBuffer(stream); FrameBuffer *fb = gst_libcamera_buffer_get_frame_buffer(buffer); if (GST_ELEMENT_CLOCK(src_)) { GstClockTime gst_base_time = GST_ELEMENT(src_)->base_time; GstClockTime gst_now = gst_clock_get_time(GST_ELEMENT_CLOCK(src_)); /* \todo Need to expose which reference clock the timestamp relates to. */ GstClockTime sys_now = g_get_monotonic_time() * 1000; /* Deduced from: sys_now - sys_base_time == gst_now - gst_base_time */ GstClockTime sys_base_time = sys_now - (gst_now - gst_base_time); GST_BUFFER_PTS(buffer) = fb->metadata().timestamp - sys_base_time; gst_libcamera_pad_set_latency(srcpad, sys_now - fb->metadata().timestamp); } else { GST_BUFFER_PTS(buffer) = 0; } GST_BUFFER_OFFSET(buffer) = fb->metadata().sequence; GST_BUFFER_OFFSET_END(buffer) = fb->metadata().sequence; gst_libcamera_pad_queue_buffer(srcpad, buffer); } gst_libcamera_resume_task(this->src_->task); } static bool gst_libcamera_src_open(GstLibcameraSrc *self) { std::unique_ptr cm = std::make_unique(); std::shared_ptr cam; gint ret = 0; GST_DEBUG_OBJECT(self, "Opening camera device ..."); ret = cm->start(); if (ret) { GST_ELEMENT_ERROR(self, LIBRARY, INIT, ("Failed listing cameras."), ("libcamera::CameraMananger::start() failed: %s", g_strerror(-ret))); return false; } g_autofree gchar *camera_name = nullptr; { GLibLocker lock(GST_OBJECT(self)); if (self->camera_name) camera_name = g_strdup(self->camera_name); } if (camera_name) { cam = cm->get(self->camera_name); if (!cam) { GST_ELEMENT_ERROR(self, RESOURCE, NOT_FOUND, ("Could not find a camera named '%s'.", self->camera_name), ("libcamera::CameraMananger::get() returned nullptr")); return false; } } else { if (cm->cameras().empty()) { GST_ELEMENT_ERROR(self, RESOURCE, NOT_FOUND, ("Could not find any supported camera on this system."), ("libcamera::CameraMananger::cameras() is empty")); return false; } cam = cm->cameras()[0]; } GST_INFO_OBJECT(self, "Using camera '%s'", cam->id().c_str()); ret = cam->acquire(); if (ret) { GST_ELEMENT_ERROR(self, RESOURCE, BUSY, ("Camera '%s' is already in use.", cam->id().c_str()), ("libcamera::Camera::acquire() failed: %s", g_strerror(ret))); return false; } cam->requestCompleted.connect(self->state, &GstLibcameraSrcState::requestCompleted); /* No need to lock here, we didn't start our threads yet. */ self->state->cm_ = std::move(cm); self->state->cam_ = cam; return true; } static void gst_libcamera_src_task_run(gpointer user_data) { GstLibcameraSrc *self = GST_LIBCAMERA_SRC(user_data); GstLibcameraSrcState *state = self->state; Request *request = state->cam_->createRequest(); auto wrap = std::make_unique(request); for (GstPad *srcpad : state->srcpads_) { GstLibcameraPool *pool = gst_libcamera_pad_get_pool(srcpad); GstBuffer *buffer; GstFlowReturn ret; ret = gst_buffer_pool_acquire_buffer(GST_BUFFER_POOL(pool), &buffer, nullptr); if (ret != GST_FLOW_OK) { /* * RequestWrap does not take ownership, and we won't be * queueing this one due to lack of buffers. */ delete request; request = nullptr; break; } wrap->attachBuffer(buffer); } if (request) { GLibLocker lock(GST_OBJECT(self)); GST_TRACE_OBJECT(self, "Requesting buffers"); state->cam_->queueRequest(request); state->requests_.push(std::move(wrap)); } GstFlowReturn ret = GST_FLOW_OK; gst_flow_combiner_reset(self->flow_combiner); for (GstPad *srcpad : state->srcpads_) { ret = gst_libcamera_pad_push_pending(srcpad); ret = gst_flow_combiner_update_pad_flow(self->flow_combiner, srcpad, ret); } { /* * Here we need to decide if we want to pause or stop the task. This * needs to happen in lock step with the callback thread which may want * to resume the task. */ GLibLocker lock(GST_OBJECT(self)); if (ret != GST_FLOW_OK) { if (ret == GST_FLOW_EOS) { g_autoptr(GstEvent) eos = gst_event_new_eos(); guint32 seqnum = gst_util_seqnum_next(); gst_event_set_seqnum(eos, seqnum); for (GstPad *srcpad : state->srcpads_) gst_pad_push_event(srcpad, gst_event_ref(eos)); } else if (ret != GST_FLOW_FLUSHING) { GST_ELEMENT_FLOW_ERROR(self, ret); } gst_task_stop(self->task); return; } bool do_pause = true; for (GstPad *srcpad : state->srcpads_) { if (gst_libcamera_pad_has_pending(srcpad)) { do_pause = false; break; } } if (do_pause) gst_task_pause(self->task); } } static void gst_libcamera_src_task_enter(GstTask *task, [[maybe_unused]] GThread *thread, gpointer user_data) { GstLibcameraSrc *self = GST_LIBCAMERA_SRC(user_data); GLibRecLocker lock(&self->stream_lock); GstLibcameraSrcState *state = self->state; GstFlowReturn flow_ret = GST_FLOW_OK; gint ret; GST_DEBUG_OBJECT(self, "Streaming thread has started"); guint group_id = gst_util_group_id_next(); StreamRoles roles; for (GstPad *srcpad : state->srcpads_) { /* Create stream-id and push stream-start. */ g_autofree gchar *stream_id = gst_pad_create_stream_id(srcpad, GST_ELEMENT(self), nullptr); GstEvent *event = gst_event_new_stream_start(stream_id); gst_event_set_group_id(event, group_i/* SPDX-License-Identifier: GPL-2.0-or-later */ /* * Copyright (C) 2020-2021, Google Inc. * * simple_capture.cpp - Simple capture helper */ #include <gtest/gtest.h> #include "simple_capture.h" using namespace libcamera; SimpleCapture::SimpleCapture(std::shared_ptr<Camera> camera) : loop_(nullptr), camera_(camera), allocator_(std::make_unique<FrameBufferAllocator>(camera)) { } SimpleCapture::~SimpleCapture() { stop(); } void SimpleCapture::configure(StreamRole role) { config_ = camera_->generateConfiguration({ role }); if (!config_) { std::cout << "Role not supported by camera" << std::endl; GTEST_SKIP(); } if (config_->validate() != CameraConfiguration::Valid) { config_.reset(); FAIL() << "Configuration not valid"; } if (camera_->configure(config_.get())) { config_.reset(); FAIL() << "Failed to configure camera"; } } void SimpleCapture::start() { Stream *stream = config_->at(0).stream(); int count = allocator_->allocate(stream); ASSERT_GE(count, 0) << "Failed to allocate buffers"; EXPECT_EQ(count, config_->at(0).bufferCount) << "Allocated less buffers than expected"; camera_->requestCompleted.connect(this, &SimpleCapture::requestComplete); ASSERT_EQ(camera_->start(), 0) << "Failed to start camera"; } void SimpleCapture::stop() { if (!config_ || !allocator_->allocated()) return; camera_->stop(); camera_->requestCompleted.disconnect(this); Stream *stream = config_->at(0).stream(); requests_.clear(); allocator_->free(stream); } /* SimpleCaptureBalanced */ SimpleCaptureBalanced::SimpleCaptureBalanced(std::shared_ptr<Camera> camera) : SimpleCapture(camera) { } void SimpleCaptureBalanced::capture(unsigned int numRequests) { start(); Stream *stream = config_->at(0).stream(); const std::vector<std::unique_ptr<FrameBuffer>> &buffers = allocator_->buffers(stream); /* No point in testing less requests then the camera depth. */ if (buffers.size() > numRequests) { std::cout << "Camera needs " + std::to_string(buffers.size()) + " requests, can't test only " + std::to_string(numRequests) << std::endl; GTEST_SKIP(); } queueCount_ = 0; captureCount_ = 0; captureLimit_ = numRequests; /* Queue the recommended number of reqeuests. */ for (const std::unique_ptr<FrameBuffer> &buffer : buffers) { std::unique_ptr<Request> request = camera_->createRequest(); ASSERT_TRUE(request) << "Can't create request"; ASSERT_EQ(request->addBuffer(stream, buffer.get()), 0) << "Can't set buffer for request"; ASSERT_EQ(queueRequest(request.get()), 0) << "Failed to queue request"; requests_.push_back(std::move(request)); } /* Run capture session. */ loop_ = new EventLoop(); loop_->exec(); stop(); delete loop_; ASSERT_EQ(captureCount_, captureLimit_); } int SimpleCaptureBalanced::queueRequest(Request *request) { queueCount_++; if (queueCount_ > captureLimit_) return 0; return camera_->queueRequest(request); } void SimpleCaptureBalanced::requestComplete(Request *request) { captureCount_++; if (captureCount_ >= captureLimit_) { loop_->exit(0); return; } request->reuse(Request::ReuseBuffers); if (queueRequest(request)) loop_->exit(-EINVAL); } /* SimpleCaptureUnbalanced */ SimpleCaptureUnbalanced::SimpleCaptureUnbalanced(std::shared_ptr<Camera> camera) : SimpleCapture(camera) { } void SimpleCaptureUnbalanced::capture(unsigned int numRequests) { start(); Stream *stream = config_->at(0).stream(); const std::vector<std::unique_ptr<FrameBuffer>> &buffers = allocator_->buffers(stream); captureCount_ = 0; captureLimit_ = numRequests; /* Queue the recommended number of reqeuests. */ for (const std::unique_ptr<FrameBuffer> &buffer : buffers) { std::unique_ptr<Request> request = camera_->createRequest(); ASSERT_TRUE(request) << "Can't create request"; ASSERT_EQ(request->addBuffer(stream, buffer.get()), 0) << "Can't set buffer for request"; ASSERT_EQ(camera_->queueRequest(request.get()), 0) << "Failed to queue request"; requests_.push_back(std::move(request)); } /* Run capture session. */ loop_ = new EventLoop(); int status = loop_->exec(); stop(); delete loop_; ASSERT_EQ(status, 0); } void SimpleCaptureUnbalanced::requestComplete(Request *request) { captureCount_++; if (captureCount_ >= captureLimit_) { loop_->exit(0); return; } request->reuse(Request::ReuseBuffers); if (camera_->queueRequest(request)) loop_->exit(-EINVAL); }