summaryrefslogtreecommitdiff
path: root/utils/rkisp1/rkisp1-capture.sh
blob: cffe9fed8d12e13778042b64d508c1b866a54529 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
#!/bin/sh
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2019, Google Inc.
#
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
#
# rkisp-capture.sh - Capture processed frames from cameras based on the
# Rockchip ISP1
#
# The scripts makes use of the following tools, which are expected to be
# executable from the system-wide path or from the local directory:
#
# - media-ctl (from v4l-utils git://linuxtv.org/v4l-utils.git)
# - raw2rgbpnm (from git://git.retiisi.org.uk/~sailus/raw2rgbpnm.git)
# - yavta (from git://git.ideasonboard.org/yavta.git)

# Locate the sensor entity
find_sensor() {
	local bus
	local sensor_name=$1

	bus=$(grep "$sensor_name " /sys/class/video4linux/v4l-subdev*/name | cut -d ' ' -f 2)
	if [[ -z $bus ]]; then
		echo "Sensor '$sensor_name' not found." >&2
		exit 1
	fi

	echo "$sensor_name $bus"
}

# Locate the media device
find_media_device() {
	local mdev
	local name=$1

	for mdev in /dev/media* ; do
		media-ctl -d $mdev -p | grep -q "^driver[ \t]*$name$" && break
		mdev=
	done

	if [[ -z $mdev ]] ; then
		echo "$name media device not found." >&2
		exit 1
	fi

	echo $mdev
}

# Get the sensor format
get_sensor_format() {
	local format
	local sensor=$1

	format=$($mediactl --get-v4l2 "'$sensor':0" | sed 's/\[\([^ ]*\).*/\1/')
	sensor_mbus_code=$(echo $format | sed 's/fmt:\([A-Z0-9_]*\).*/\1/')
	sensor_size=$(echo $format | sed 's/[^\/]*\/\([0-9x]*\).*/\1/')

	echo "Capturing ${sensor_size} from sensor $sensor in ${sensor_mbus_code}"
}

# Configure the pipeline
configure_pipeline() {
	local format="fmt:$sensor_mbus_code/$sensor_size"
	local capture_mbus_code=$1
	local capture_size=$2

	echo "Configuring pipeline for $sensor in $format"

	$mediactl -r

	$mediactl -l "'$sensor':0 -> 'rockchip-sy-mipi-dphy':0 [1]"
	$mediactl -l "'rockchip-sy-mipi-dphy':1 -> 'rkisp1-isp-subdev':0 [1]"
	$mediactl -l "'rkisp1-isp-subdev':2 -> 'rkisp1_mainpath':0 [1]"

	$mediactl -V "\"$sensor\":0 [$format]"
	$mediactl -V "'rockchip-sy-mipi-dphy':1 [$format]"
	$mediactl -V "'rkisp1-isp-subdev':0 [$format crop:(0,0)/$sensor_size]"
	$mediactl -V "'rkisp1-isp-subdev':2 [fmt:$capture_mbus_code/$capture_size crop:(0,0)/$capture_size]"
}

# Capture frames
capture_frames() {
	local file_op
	local capture_format=$1
	local capture_size=$2
	local frame_count=$3
	local save_file=$4

	if [[ $save_file -eq 1 ]]; then
		file_op="--file=/tmp/frame-#.bin"
	fi

	yavta -c$frame_count -n5 -I -f $capture_format -s $capture_size \
		$file_op $($mediactl -e "rkisp1_mainpath")
}

# Convert captured files to ppm
convert_files() {
	local format=$1
	local size=$2
	local frame_count=$3

	echo "Converting ${frame_count} frames (${size})"

	for i in `seq 0 $(($frame_count - 1))`; do
		i=$(printf %06u $i)
		raw2rgbpnm -f $format -s $size /tmp/frame-$i.bin /tmp/frame-$i.ppm
	done
}

# Print usage message
usage() {
	echo "Usage: $1 [options] sensor-name"
	echo "Supported options:"
	echo "-c,--count n      Number of frame to capture"
	echo "--no-save         Do not save captured frames to disk"
	echo "-r, --raw         Capture RAW frames"
	echo "-s, --size wxh    Frame size"
}

# Parse command line arguments
capture_size=1024x768
frame_count=10
raw=false
save_file=1

while [[ $# -ne 0 ]] ; do
	case $1 in
	-c|--count)
		frame_count=$2
		shift 2
		;;
	--no-save)
		save_file=0
		shift
		;;

	-r|--raw)
		raw=true
		shift
		;;
	-s|--size)
		capture_size=$2
		shift 2
		;;
	-*)
		echo "Unsupported option $1" >&2
		usage $0
		exit 1
		;;
	*)
		break
		;;
	esac
done

if [[ $# -ne 1 ]] ; then
	usage $0
	exit 1
fi

sensor_name=$1

modprobe mipi_dphy_sy
modprobe video_rkisp1

sensor=$(find_sensor $sensor_name) || exit
mdev=$(find_media_device rkisp1) || exit
mediactl="media-ctl -d $mdev"

get_sensor_format "$sensor"
if [[ $raw == true ]] ; then
	capture_format=$(echo $sensor_mbus_code | sed 's/_[0-9X]$//')
	capture_mbus_code=$sensor_mbus_code
else
	capture_format=YUYV
	capture_mbus_code=YUYV8_2X8
fi

configure_pipeline $capture_mbus_code $capture_size
capture_frames $capture_format $capture_size $frame_count $save_file
[[ $save_file -eq 1 ]] && convert_files $capture_format $capture_size $frame_count
m"> */ /** * \todo The following is a list of items that needs implementation in the GStreamer plugin * - Implement GstElement::send_event * + Allowing application to send EOS * + Allowing application to use FLUSH/FLUSH_STOP * + Prevent the main thread from accessing streaming thread * - Implement renegotiation (even if slow) * - Implement GstElement::request-new-pad (multi stream) * + Evaluate if a single streaming thread is fine * - Add application driven request (snapshot) * - Add framerate control * - Add buffer importation support * * Requires new libcamera API: * - Add framerate negotiation support * - Add colorimetry support * - Add timestamp support * - Use unique names to select the camera devices * - Add GstVideoMeta support (strides and offsets) * * \todo libcamera UVC drivers picks the lowest possible resolution first, this * should be fixed so that we get a decent resolution and framerate for the * role by default. */ #include "gstlibcamerasrc.h" #include <queue> #include <vector> #include <gst/base/base.h> #include <libcamera/camera.h> #include <libcamera/camera_manager.h> #include "gstlibcameraallocator.h" #include "gstlibcamerapad.h" #include "gstlibcamerapool.h" #include "gstlibcamera-utils.h" using namespace libcamera; GST_DEBUG_CATEGORY_STATIC(source_debug); #define GST_CAT_DEFAULT source_debug struct RequestWrap { RequestWrap(std::unique_ptr<Request> request); ~RequestWrap(); void attachBuffer(GstBuffer *buffer); GstBuffer *detachBuffer(Stream *stream); std::unique_ptr<Request> request_; std::map<Stream *, GstBuffer *> buffers_; }; RequestWrap::RequestWrap(std::unique_ptr<Request> request) : request_(std::move(request)) { } RequestWrap::~RequestWrap() { for (std::pair<Stream *const, GstBuffer *> &item : buffers_) { if (item.second) gst_buffer_unref(item.second); } } void RequestWrap::attachBuffer(GstBuffer *buffer) { FrameBuffer *fb = gst_libcamera_buffer_get_frame_buffer(buffer); Stream *stream = gst_libcamera_buffer_get_stream(buffer); request_->addBuffer(stream, fb); auto item = buffers_.find(stream); if (item != buffers_.end()) { gst_buffer_unref(item->second); item->second = buffer; } else { buffers_[stream] = buffer; } } GstBuffer *RequestWrap::detachBuffer(Stream *stream) { GstBuffer *buffer = nullptr; auto item = buffers_.find(stream); if (item != buffers_.end()) { buffer = item->second; item->second = nullptr; } return buffer; } /* Used for C++ object with destructors. */ struct GstLibcameraSrcState { GstLibcameraSrc *src_; std::unique_ptr<CameraManager> cm_; std::shared_ptr<Camera> cam_; std::unique_ptr<CameraConfiguration> config_; std::vector<GstPad *> srcpads_; std::queue<std::unique_ptr<RequestWrap>> requests_; guint group_id_; void requestCompleted(Request *request); }; struct _GstLibcameraSrc { GstElement parent; GRecMutex stream_lock; GstTask *task; gchar *camera_name; GstLibcameraSrcState *state; GstLibcameraAllocator *allocator; GstFlowCombiner *flow_combiner; }; enum { PROP_0, PROP_CAMERA_NAME }; G_DEFINE_TYPE_WITH_CODE(GstLibcameraSrc, gst_libcamera_src, GST_TYPE_ELEMENT, GST_DEBUG_CATEGORY_INIT(source_debug, "libcamerasrc", 0, "libcamera Source")) #define TEMPLATE_CAPS GST_STATIC_CAPS("video/x-raw; image/jpeg") /* For the simple case, we have a src pad that is always present. */ GstStaticPadTemplate src_template = { "src", GST_PAD_SRC, GST_PAD_ALWAYS, TEMPLATE_CAPS }; /* More pads can be requested in state < PAUSED */ GstStaticPadTemplate request_src_template = { "src_%s", GST_PAD_SRC, GST_PAD_REQUEST, TEMPLATE_CAPS }; void GstLibcameraSrcState::requestCompleted(Request *request) { GLibLocker lock(GST_OBJECT(src_)); GST_DEBUG_OBJECT(src_, "buffers are ready"); std::unique_ptr<RequestWrap> wrap = std::move(requests_.front()); requests_.pop(); g_return_if_fail(wrap->request_.get() == request); if ((request->status() == Request::RequestCancelled)) { GST_DEBUG_OBJECT(src_, "Request was cancelled"); return; } GstBuffer *buffer; for (GstPad *srcpad : srcpads_) { Stream *stream = gst_libcamera_pad_get_stream(srcpad); buffer = wrap->detachBuffer(stream); FrameBuffer *fb = gst_libcamera_buffer_get_frame_buffer(buffer); if (GST_ELEMENT_CLOCK(src_)) { GstClockTime gst_base_time = GST_ELEMENT(src_)->base_time; GstClockTime gst_now = gst_clock_get_time(GST_ELEMENT_CLOCK(src_)); /* \todo Need to expose which reference clock the timestamp relates to. */ GstClockTime sys_now = g_get_monotonic_time() * 1000; /* Deduced from: sys_now - sys_base_time == gst_now - gst_base_time */ GstClockTime sys_base_time = sys_now - (gst_now - gst_base_time); GST_BUFFER_PTS(buffer) = fb->metadata().timestamp - sys_base_time; gst_libcamera_pad_set_latency(srcpad, sys_now - fb->metadata().timestamp); } else { GST_BUFFER_PTS(buffer) = 0; } GST_BUFFER_OFFSET(buffer) = fb->metadata().sequence; GST_BUFFER_OFFSET_END(buffer) = fb->metadata().sequence; gst_libcamera_pad_queue_buffer(srcpad, buffer); } gst_libcamera_resume_task(this->src_->task); } static bool gst_libcamera_src_open(GstLibcameraSrc *self) { std::unique_ptr<CameraManager> cm = std::make_unique<CameraManager>(); std::shared_ptr<Camera> cam; gint ret = 0; GST_DEBUG_OBJECT(self, "Opening camera device ..."); ret = cm->start(); if (ret) { GST_ELEMENT_ERROR(self, LIBRARY, INIT, ("Failed listing cameras."), ("libcamera::CameraMananger::start() failed: %s", g_strerror(-ret))); return false; } g_autofree gchar *camera_name = nullptr; { GLibLocker lock(GST_OBJECT(self)); if (self->camera_name) camera_name = g_strdup(self->camera_name); } if (camera_name) { cam = cm->get(self->camera_name); if (!cam) { GST_ELEMENT_ERROR(self, RESOURCE, NOT_FOUND, ("Could not find a camera named '%s'.", self->camera_name), ("libcamera::CameraMananger::get() returned nullptr")); return false; } } else { if (cm->cameras().empty()) { GST_ELEMENT_ERROR(self, RESOURCE, NOT_FOUND, ("Could not find any supported camera on this system."), ("libcamera::CameraMananger::cameras() is empty")); return false; } cam = cm->cameras()[0]; } GST_INFO_OBJECT(self, "Using camera '%s'", cam->id().c_str()); ret = cam->acquire(); if (ret) { GST_ELEMENT_ERROR(self, RESOURCE, BUSY, ("Camera '%s' is already in use.", cam->id().c_str()), ("libcamera::Camera::acquire() failed: %s", g_strerror(ret))); return false; } cam->requestCompleted.connect(self->state, &GstLibcameraSrcState::requestCompleted); /* No need to lock here, we didn't start our threads yet. */ self->state->cm_ = std::move(cm); self->state->cam_ = cam; return true; } static void gst_libcamera_src_task_run(gpointer user_data) { GstLibcameraSrc *self = GST_LIBCAMERA_SRC(user_data); GstLibcameraSrcState *state = self->state; std::unique_ptr<Request> request = state->cam_->createRequest(); if (!request) { GST_ELEMENT_ERROR(self, RESOURCE, NO_SPACE_LEFT, ("Failed to allocate request for camera '%s'.", state->cam_->id().c_str()), ("libcamera::Camera::createRequest() failed")); gst_task_stop(self->task); return; } std::unique_ptr<RequestWrap> wrap = std::make_unique<RequestWrap>(std::move(request)); for (GstPad *srcpad : state->srcpads_) { GstLibcameraPool *pool = gst_libcamera_pad_get_pool(srcpad); GstBuffer *buffer; GstFlowReturn ret; ret = gst_buffer_pool_acquire_buffer(GST_BUFFER_POOL(pool), &buffer, nullptr); if (ret != GST_FLOW_OK) { /* * RequestWrap has ownership of the rquest, and we * won't be queueing this one due to lack of buffers. */ wrap.release(); break; } wrap->attachBuffer(buffer); } if (wrap) { GLibLocker lock(GST_OBJECT(self)); GST_TRACE_OBJECT(self, "Requesting buffers"); state->cam_->queueRequest(wrap->request_.get()); state->requests_.push(std::move(wrap)); /* The RequestWrap will be deleted in the completion handler. */ } GstFlowReturn ret = GST_FLOW_OK; gst_flow_combiner_reset(self->flow_combiner); for (GstPad *srcpad : state->srcpads_) { ret = gst_libcamera_pad_push_pending(srcpad); ret = gst_flow_combiner_update_pad_flow(self->flow_combiner, srcpad, ret); } { /* * Here we need to decide if we want to pause or stop the task. This * needs to happen in lock step with the callback thread which may want * to resume the task. */ GLibLocker lock(GST_OBJECT(self)); if (ret != GST_FLOW_OK) { if (ret == GST_FLOW_EOS) { g_autoptr(GstEvent) eos = gst_event_new_eos(); guint32 seqnum = gst_util_seqnum_next(); gst_event_set_seqnum(eos, seqnum); for (GstPad *srcpad : state->srcpads_) gst_pad_push_event(srcpad, gst_event_ref(eos)); } else if (ret != GST_FLOW_FLUSHING) { GST_ELEMENT_FLOW_ERROR(self, ret); } gst_task_stop(self->task); return; } bool do_pause = true; for (GstPad *srcpad : state->srcpads_) { if (gst_libcamera_pad_has_pending(srcpad)) { do_pause = false; break; } } if (do_pause) gst_task_pause(self->task); } } static void gst_libcamera_src_task_enter(GstTask *task, [[maybe_unused]] GThread *thread, gpointer user_data) { GstLibcameraSrc *self = GST_LIBCAMERA_SRC(user_data); GLibRecLocker lock(&self->stream_lock); GstLibcameraSrcState *state = self->state; GstFlowReturn flow_ret = GST_FLOW_OK; gint ret; GST_DEBUG_OBJECT(self, "Streaming thread has started"); gint stream_id_num = 0; StreamRoles roles; for (GstPad *srcpad : state->srcpads_) { /* Create stream-id and push stream-start. */ g_autofree gchar *stream_id_intermediate = g_strdup_printf("%i%i", state->group_id_, stream_id_num++); g_autofree gchar *stream_id = gst_pad_create_stream_id(srcpad, GST_ELEMENT(self), stream_id_intermediate); GstEvent *event = gst_event_new_stream_start(stream_id); gst_event_set_group_id(event, state->group_id_); gst_pad_push_event(srcpad, event); /* Collect the streams roles for the next iteration. */ roles.push_back(gst_libcamera_pad_get_role(srcpad)); } /* Generate the stream configurations, there should be one per pad. */ state->config_ = state->cam_->generateConfiguration(roles); if (state->config_ == nullptr) { GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS, ("Failed to generate camera configuration from roles"), ("Camera::generateConfiguration() returned nullptr")); gst_task_stop(task); return; } g_assert(state->config_->size() == state->srcpads_.size()); for (gsize i = 0; i < state->srcpads_.size(); i++) { GstPad *srcpad = state->srcpads_[i]; StreamConfiguration &stream_cfg = state->config_->at(i); /* Retrieve the supported caps. */ g_autoptr(GstCaps) filter = gst_libcamera_stream_formats_to_caps(stream_cfg.formats()); g_autoptr(GstCaps) caps = gst_pad_peer_query_caps(srcpad, filter); if (gst_caps_is_empty(caps)) { flow_ret = GST_FLOW_NOT_NEGOTIATED; break; } /* Fixate caps and configure the stream. */ caps = gst_caps_make_writable(caps); gst_libcamera_configure_stream_from_caps(stream_cfg, caps); } if (flow_ret != GST_FLOW_OK) goto done; /* Validate the configuration. */ if (state->config_->validate() == CameraConfiguration::Invalid) { flow_ret = GST_FLOW_NOT_NEGOTIATED; goto done; } /* * Regardless if it has been modified, create clean caps and push the * caps event. Downstream will decide if the caps are acceptable. */ for (gsize i = 0; i < state->srcpads_.size(); i++) { GstPad *srcpad = state->srcpads_[i]; const StreamConfiguration &stream_cfg = state->config_->at(i); g_autoptr(GstCaps) caps = gst_libcamera_stream_configuration_to_caps(stream_cfg); if (!gst_pad_push_event(srcpad, gst_event_new_caps(caps))) { flow_ret = GST_FLOW_NOT_NEGOTIATED; break; } /* Send an open segment event with time format. */ GstSegment segment; gst_segment_init(&segment, GST_FORMAT_TIME); gst_pad_push_event(srcpad, gst_event_new_segment(&segment)); } ret = state->cam_->configure(state->config_.get()); if (ret) { GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS, ("Failed to configure camera: %s", g_strerror(-ret)), ("Camera::configure() failed with error code %i", ret)); gst_task_stop(task); return; } self->allocator = gst_libcamera_allocator_new(state->cam_, state->config_.get()); if (!self->allocator) { GST_ELEMENT_ERROR(self, RESOURCE, NO_SPACE_LEFT, ("Failed to allocate memory"), ("gst_libcamera_allocator_new() failed.")); gst_task_stop(task); return; } self->flow_combiner = gst_flow_combiner_new(); for (gsize i = 0; i < state->srcpads_.size(); i++) { GstPad *srcpad = state->srcpads_[i]; const StreamConfiguration &stream_cfg = state->config_->at(i); GstLibcameraPool *pool = gst_libcamera_pool_new(self->allocator, stream_cfg.stream()); g_signal_connect_swapped(pool, "buffer-notify", G_CALLBACK(gst_libcamera_resume_task), task); gst_libcamera_pad_set_pool(srcpad, pool); gst_flow_combiner_add_pad(self->flow_combiner, srcpad); } ret = state->cam_->start(); if (ret) { GST_ELEMENT_ERROR(self, RESOURCE, SETTINGS, ("Failed to start the camera: %s", g_strerror(-ret)), ("Camera.start() failed with error code %i", ret)); gst_task_stop(task); return; } done: switch (flow_ret) { case GST_FLOW_NOT_NEGOTIATED: GST_ELEMENT_FLOW_ERROR(self, flow_ret); gst_task_stop(task); break; default: break; } } static void