summaryrefslogtreecommitdiff
path: root/src/libcamera/pipeline/rpi/common/pipeline_base.h
blob: 0608bbe5f0c70b900c2f1ce07d2d315c254ebe3a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
 * Copyright (C) 2019-2023, Raspberry Pi Ltd
 *
 * pipeline_base.h - Pipeline handler base class for Raspberry Pi devices
 */

#include <map>
#include <memory>
#include <optional>
#include <queue>
#include <string>
#include <unordered_set>
#include <utility>
#include <vector>

#include <libcamera/controls.h>
#include <libcamera/request.h>

#include "libcamera/internal/bayer_format.h"
#include "libcamera/internal/camera.h"
#include "libcamera/internal/camera_sensor.h"
#include "libcamera/internal/framebuffer.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/media_object.h"
#include "libcamera/internal/pipeline_handler.h"
#include "libcamera/internal/v4l2_videodevice.h"
#include "libcamera/internal/yaml_parser.h"

#include <libcamera/ipa/raspberrypi_ipa_interface.h>
#include <libcamera/ipa/raspberrypi_ipa_proxy.h>

#include "delayed_controls.h"
#include "rpi_stream.h"

using namespace std::chrono_literals;

namespace libcamera {

namespace RPi {

/* Map of mbus codes to supported sizes reported by the sensor. */
using SensorFormats = std::map<unsigned int, std::vector<Size>>;

class RPiCameraConfiguration;
class CameraData : public Camera::Private
{
public:
	CameraData(PipelineHandler *pipe)
		: Camera::Private(pipe), state_(State::Stopped),
		  dropFrameCount_(0), buffersAllocated_(false),
		  ispOutputCount_(0), ispOutputTotal_(0)
	{
	}

	virtual ~CameraData()
	{
	}

	virtual CameraConfiguration::Status platformValidate(RPiCameraConfiguration *rpiConfig) const = 0;
	virtual int platformConfigure(const RPiCameraConfiguration *rpiConfig) = 0;
	virtual void platformStart() = 0;
	virtual void platformStop() = 0;

	double scoreFormat(double desired, double actual) const;
	V4L2SubdeviceFormat findBestFormat(const Size &req, unsigned int bitDepth) const;

	void freeBuffers();
	virtual void platformFreeBuffers() = 0;

	void enumerateVideoDevices(MediaLink *link, const std::string &frontend);

	int loadPipelineConfiguration();
	int loadIPA(ipa::RPi::InitResult *result);
	int configureIPA(const CameraConfiguration *config, ipa::RPi::ConfigResult *result);
	virtual int platformInitIpa(ipa::RPi::InitParams &params) = 0;
	virtual int platformConfigureIpa(ipa::RPi::ConfigParams &params) = 0;

	void metadataReady(const ControlList &metadata);
	void setDelayedControls(const ControlList &controls, uint32_t delayContext);
	void setLensControls(const ControlList &controls);
	void setSensorControls(ControlList &controls);

	Rectangle scaleIspCrop(const Rectangle &ispCrop) const;
	void applyScalerCrop(const ControlList &controls);
	virtual void platformSetIspCrop() = 0;

	void cameraTimeout();
	void frameStarted(uint32_t sequence);

	void clearIncompleteRequests();
	void handleStreamBuffer(FrameBuffer *buffer, Stream *stream);
	void handleState();

	virtual V4L2VideoDevice::Formats ispFormats() const = 0;
	virtual V4L2VideoDevice::Formats rawFormats() const = 0;
	virtual V4L2VideoDevice *frontendDevice() = 0;

	virtual int platformPipelineConfigure(const std::unique_ptr<YamlObject> &root) = 0;

	std::unique_ptr<ipa::RPi::IPAProxyRPi> ipa_;

	std::unique_ptr<CameraSensor> sensor_;
	SensorFormats sensorFormats_;

	/* The vector below is just for convenience when iterating over all streams. */
	std::vector<Stream *> streams_;
	/* Stores the ids of the buffers mapped in the IPA. */
	std::unordered_set<unsigned int> bufferIds_;
	/*
	 * Stores a cascade of Video Mux or Bridge devices between the sensor and
	 * Unicam together with media link across the entities.
	 */
	std::vector<std::pair<std::unique_ptr<V4L2Subdevice>, MediaLink *>> bridgeDevices_;

	std::unique_ptr<DelayedControls> delayedCtrls_;
	bool sensorMetadata_;

	/*
	 * All the functions in this class are called from a single calling
	 * thread. So, we do not need to have any mutex to protect access to any
	 * of the variables below.
	 */
	enum class State { Stopped, Idle, Busy, IpaComplete, Error };
	State state_;

	bool isRunning()
	{
		return state_ != State::Stopped && state_ != State::Error;
	}

	std::queue<Request *> requestQueue_;

	/* For handling digital zoom. */
	IPACameraSensorInfo sensorInfo_;
	Rectangle ispCrop_; /* crop in ISP (camera mode) pixels */
	Rectangle scalerCrop_; /* crop in sensor native pixels */
	Size ispMinCropSize_;

	unsigned int dropFrameCount_;

	/*
	 * If set, this stores the value that represets a gain of one for
	 * the V4L2_CID_NOTIFY_GAINS control.
	 */
	std::optional<int32_t> notifyGainsUnity_;

	/* Have internal buffers been allocated? */
	bool buffersAllocated_;

	struct Config {
		/*
		 * Override any request from the IPA to drop a number of startup
		 * frames.
		 */
		bool disableStartupFrameDrops;
		/*
		 * Override the camera timeout value calculated by the IPA based
		 * on frame durations.
		 */
		unsigned int cameraTimeoutValue;
	};

	Config config_;

protected:
	void fillRequestMetadata(const ControlList &bufferControls,
				 Request *request);

	virtual void tryRunPipeline() = 0;

	unsigned int ispOutputCount_;
	unsigned int ispOutputTotal_;

private:
	void checkRequestCompleted();
};

class PipelineHandlerBase : public PipelineHandler
{
public:
	PipelineHandlerBase(CameraManager *manager)
		: PipelineHandler(manager)
	{
	}

	virtual ~PipelineHandlerBase()
	{
	}

	static bool isRgb(const PixelFormat &pixFmt);
	static bool isYuv(const PixelFormat &pixFmt);
	static bool isRaw(const PixelFormat &pixFmt);

	static bool updateStreamConfig(StreamConfiguration *stream,
				       const V4L2DeviceFormat &format);
	static V4L2DeviceFormat toV4L2DeviceFormat(const V4L2VideoDevice *dev,
						   const StreamConfiguration *stream);
	static V4L2DeviceFormat toV4L2DeviceFormat(const V4L2VideoDevice *dev,
						   const V4L2SubdeviceFormat &format,
						   BayerFormat::Packing packingReq);

	std::unique_ptr<CameraConfiguration>
	generateConfiguration(Camera *camera, Span<const StreamRole> roles) override;
	int configure(Camera *camera, CameraConfiguration *config) override;

	int exportFrameBuffers(Camera *camera, libcamera::Stream *stream,
			       std::vector<std::unique_ptr<FrameBuffer>> *buffers) override;

	int start(Camera *camera, const ControlList *controls) override;
	void stopDevice(Camera *camera) override;
	void releaseDevice(Camera *camera) override;

	int queueRequestDevice(Camera *camera, Request *request) override;

protected:
	int registerCamera(std::unique_ptr<RPi::CameraData> &cameraData,
			   MediaDevice *frontent, const std::string &frontendName,
			   MediaDevice *backend, MediaEntity *sensorEntity);

	void mapBuffers(Camera *camera, const BufferMap &buffers, unsigned int mask);

	virtual int platformRegister(std::unique_ptr<CameraData> &cameraData,
				     MediaDevice *unicam, MediaDevice *isp) = 0;

private:
	CameraData *cameraData(Camera *camera)
	{
		return static_cast<CameraData *>(camera->_d());
	}

	int queueAllBuffers(Camera *camera);
	virtual int prepareBuffers(Camera *camera) = 0;
};

class RPiCameraConfiguration final : public CameraConfiguration
{
public:
	RPiCameraConfiguration(const CameraData *data)
		: CameraConfiguration(), data_(data)
	{
	}

	CameraConfiguration::Status validateColorSpaces(ColorSpaceFlags flags);
	Status validate() override;

	/* Cache the combinedTransform_ that will be applied to the sensor */
	Transform combinedTransform_;
	/* The sensor format computed in validate() */
	V4L2SubdeviceFormat sensorFormat_;

	struct StreamParams {
		StreamParams()
			: index(0), cfg(nullptr), dev(nullptr)
		{
		}

		StreamParams(unsigned int index_, StreamConfiguration *cfg_)
			: index(index_), cfg(cfg_), dev(nullptr)
		{
		}

		unsigned int index;
		StreamConfiguration *cfg;
		V4L2VideoDevice *dev;
		V4L2DeviceFormat format;
	};

	std::vector<StreamParams> rawStreams_;
	std::vector<StreamParams> outStreams_;

	/*
	 * Store the colour spaces that all our streams will have. RGB format streams
	 * will have the same colorspace as YUV streams, with YCbCr field cleared and
	 * range set to full.
	 */
	std::optional<ColorSpace> yuvColorSpace_;
	std::optional<ColorSpace> rgbColorSpace_;

private:
	const CameraData *data_;
};

} /* namespace RPi */

} /* namespace libcamera */