summaryrefslogtreecommitdiff
AgeCommit message (Expand)Author
2021-08-12libcamera: controls: Use ControlIdMap in deserializationJacopo Mondi
2021-08-12test: control serialization: Test lookup by ControlIdJacopo Mondi
2021-08-12libcamera: controls: Create ControlInfoMap with ControlIdMapJacopo Mondi
2021-08-11clang-format: Regroup sort ordersKieran Bingham
2021-08-10libcamera: MappedFrameBuffer: Print errno on mmap() failureUmang Jain
2021-08-10libcamera: MappedFrameBuffer: Use typed Flags<MapModes>Kieran Bingham
2021-08-10libcamera: Give MappedFrameBuffer its own implementationKieran Bingham
2021-08-10src: Remove all unused sys/mman.h inclusionsKieran Bingham
2021-08-09libcamera: Rename 'method' to 'function'Laurent Pinchart
2021-08-09ipu3: Disallow raw only camera configurationUmang Jain
2021-08-06android: mm: cros_camera_buffer: Fix unused parameterHirokazu Honda
2021-08-06android: camera_device: Propagate the requested test pattern modeHirokazu Honda
2021-08-05ipa: raspberrypi: AGC: handle modes with different sensitivitiesDavid Plowman
2021-08-05ipa: raspberrypi: Add sensitivity field to camera modeDavid Plowman
2021-08-05cam: Add support for viewfinder through DRM/KMSLaurent Pinchart
2021-08-05cam: kms_sink: Enable display on first frameLaurent Pinchart
2021-08-05cam: Add KMS sink classLaurent Pinchart
2021-08-05cam: Add DRM helper classesLaurent Pinchart
2021-08-05cam: Rename BufferWriter to FileSinkLaurent Pinchart
2021-08-05cam: Turn BufferWriter into a FrameSinkLaurent Pinchart
2021-08-05cam: Add FrameSink base classLaurent Pinchart
2021-08-05cam: event_loop: Add support for file descriptor eventsLaurent Pinchart
2021-08-05utils: ipc: Initialise ThreadProxyKieran Bingham
2021-08-05gstreamer: Update format specifier in Request Pad templateVedant Paranjape
2021-08-04android: nautilus: Add camera HAL configurationUmang Jain
2021-08-04android: Override camera as "Internal" provided found in HAL configUmang Jain
2021-08-04android: Disallow external location in HAL configUmang Jain
2021-08-04android: Instantiate CameraDevice after checking HAL config validityUmang Jain
2021-08-03qcam: Support OpenGL ES 2.0Laurent Pinchart
2021-08-03libcamera: camera: Make Camera::Private members privateLaurent Pinchart
2021-08-03libcamera: camera: Move Camera::Private to header fileLaurent Pinchart
2021-08-03Documentation: Doxygen: Don't exclude Private classesLaurent Pinchart
2021-08-03libcamera: frame_buffer: Document the FrameBuffer::Private classLaurent Pinchart
2021-08-03libcamera: base: class: Don't pass Extensible pointer to Private constructorLaurent Pinchart
2021-08-03libcamera: base: class: Link LIBCAMERA_O_PTR to Extensible documentationLaurent Pinchart
2021-08-03libcamera: base: class: Document Extensible::_d() functionsLaurent Pinchart
2021-08-03libcamera: file: Turn MapFlag and OpenModeFlag into enum classLaurent Pinchart
2021-08-03libcamera: file: Use Flags<> class for open flagsLaurent Pinchart
2021-08-03libcamera: file: Use Flags<> class for map flagsLaurent Pinchart
2021-08-03test: Add tests for the Flags classLaurent Pinchart
2021-08-03libcamera: flags: Add type-safe enum-based flagsLaurent Pinchart
2021-08-03utils: raspberrypi: ctt: Fix namespace for sklearn NearestCentroid functionDavid Plowman
2021-08-03meson: Update min clang version to 9Paul Elder
2021-08-02android, controls: Add and plumb MaxLatency controlPaul Elder
2021-08-02android: Add skeletal still and manual request templatesPaul Elder
2021-08-02android: capabilities: Make keys list into set and member variablePaul Elder
2021-08-02android: Add helpers for setting android metadata from libcamera controlsPaul Elder
2021-08-02android: Add infrastructure for determining capabilities and hardware levelPaul Elder
2021-08-02android: jpeg: get ISO from SENSOR_SENSITIVITYPaul Elder
2021-08-02android: metadata: Fix addEntry template typePaul Elder
/a> 282 283
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
 * Copyright (C) 2019-2023, Raspberry Pi Ltd
 *
 * pipeline_base.h - Pipeline handler base class for Raspberry Pi devices
 */

#include <map>
#include <memory>
#include <optional>
#include <queue>
#include <string>
#include <unordered_set>
#include <utility>
#include <vector>

#include <libcamera/controls.h>
#include <libcamera/request.h>

#include "libcamera/internal/bayer_format.h"
#include "libcamera/internal/camera.h"
#include "libcamera/internal/camera_sensor.h"
#include "libcamera/internal/framebuffer.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/media_object.h"
#include "libcamera/internal/pipeline_handler.h"
#include "libcamera/internal/v4l2_videodevice.h"
#include "libcamera/internal/yaml_parser.h"

#include <libcamera/ipa/raspberrypi_ipa_interface.h>
#include <libcamera/ipa/raspberrypi_ipa_proxy.h>

#include "delayed_controls.h"
#include "rpi_stream.h"

using namespace std::chrono_literals;

namespace libcamera {

namespace RPi {

/* Map of mbus codes to supported sizes reported by the sensor. */
using SensorFormats = std::map<unsigned int, std::vector<Size>>;

class RPiCameraConfiguration;
class CameraData : public Camera::Private
{
public:
	CameraData(PipelineHandler *pipe)
		: Camera::Private(pipe), state_(State::Stopped),
		  flipsAlterBayerOrder_(false), dropFrameCount_(0), buffersAllocated_(false),
		  ispOutputCount_(0), ispOutputTotal_(0)
	{
	}

	virtual ~CameraData()
	{
	}

	struct StreamParams {
		StreamParams()
			: index(0), cfg(nullptr), dev(nullptr)
		{
		}

		StreamParams(unsigned int index_, StreamConfiguration *cfg_)
			: index(index_), cfg(cfg_), dev(nullptr)
		{
		}

		unsigned int index;
		StreamConfiguration *cfg;
		V4L2VideoDevice *dev;
	};

	virtual CameraConfiguration::Status platformValidate(RPiCameraConfiguration *rpiConfig,
							     std::vector<StreamParams> &rawStreams,
							     std::vector<StreamParams> &outStreams) const = 0;
	virtual int platformConfigure(const V4L2SubdeviceFormat &sensorFormat,
				      std::optional<BayerFormat::Packing> packing,
				      std::vector<StreamParams> &rawStreams,
				      std::vector<StreamParams> &outStreams) = 0;
	virtual void platformStart() = 0;
	virtual void platformStop() = 0;

	double scoreFormat(double desired, double actual) const;
	V4L2SubdeviceFormat findBestFormat(const Size &req, unsigned int bitDepth) const;

	void freeBuffers();
	virtual void platformFreeBuffers() = 0;

	void enumerateVideoDevices(MediaLink *link, const std::string &frontend);

	int loadPipelineConfiguration();
	int loadIPA(ipa::RPi::InitResult *result);
	int configureIPA(const CameraConfiguration *config, ipa::RPi::ConfigResult *result);
	virtual int platformInitIpa(ipa::RPi::InitParams &params) = 0;
	virtual int platformConfigureIpa(ipa::RPi::ConfigParams &params) = 0;

	void metadataReady(const ControlList &metadata);
	void setDelayedControls(const ControlList &controls, uint32_t delayContext);
	void setLensControls(const ControlList &controls);
	void setSensorControls(ControlList &controls);

	Rectangle scaleIspCrop(const Rectangle &ispCrop) const;
	void applyScalerCrop(const ControlList &controls);
	virtual void platformSetIspCrop() = 0;

	void cameraTimeout();
	void frameStarted(uint32_t sequence);

	void clearIncompleteRequests();
	void handleStreamBuffer(FrameBuffer *buffer, Stream *stream);
	void handleState();

	virtual V4L2VideoDevice::Formats ispFormats() const = 0;
	virtual V4L2VideoDevice::Formats rawFormats() const = 0;
	virtual V4L2VideoDevice *frontendDevice() = 0;

	virtual int platformPipelineConfigure(const std::unique_ptr<YamlObject> &root) = 0;

	std::unique_ptr<ipa::RPi::IPAProxyRPi> ipa_;

	std::unique_ptr<CameraSensor> sensor_;
	SensorFormats sensorFormats_;

	/* The vector below is just for convenience when iterating over all streams. */
	std::vector<Stream *> streams_;
	/* Stores the ids of the buffers mapped in the IPA. */
	std::unordered_set<unsigned int> bufferIds_;
	/*
	 * Stores a cascade of Video Mux or Bridge devices between the sensor and
	 * Unicam together with media link across the entities.
	 */
	std::vector<std::pair<std::unique_ptr<V4L2Subdevice>, MediaLink *>> bridgeDevices_;

	std::unique_ptr<DelayedControls> delayedCtrls_;
	bool sensorMetadata_;

	/*
	 * All the functions in this class are called from a single calling
	 * thread. So, we do not need to have any mutex to protect access to any
	 * of the variables below.
	 */
	enum class State { Stopped, Idle, Busy, IpaComplete, Error };
	State state_;

	bool isRunning()
	{
		return state_ != State::Stopped && state_ != State::Error;
	}

	std::queue<Request *> requestQueue_;

	/* Store the "native" Bayer order (that is, with no transforms applied). */
	bool flipsAlterBayerOrder_;
	BayerFormat::Order nativeBayerOrder_;

	/* For handling digital zoom. */
	IPACameraSensorInfo sensorInfo_;
	Rectangle ispCrop_; /* crop in ISP (camera mode) pixels */
	Rectangle scalerCrop_; /* crop in sensor native pixels */
	Size ispMinCropSize_;

	unsigned int dropFrameCount_;

	/*
	 * If set, this stores the value that represets a gain of one for
	 * the V4L2_CID_NOTIFY_GAINS control.
	 */
	std::optional<int32_t> notifyGainsUnity_;

	/* Have internal buffers been allocated? */
	bool buffersAllocated_;

	struct Config {
		/*
		 * Override any request from the IPA to drop a number of startup
		 * frames.
		 */
		bool disableStartupFrameDrops;
		/*
		 * Override the camera timeout value calculated by the IPA based
		 * on frame durations.
		 */
		unsigned int cameraTimeoutValue;
	};

	Config config_;

protected:
	void fillRequestMetadata(const ControlList &bufferControls,
				 Request *request);

	virtual void tryRunPipeline() = 0;

	unsigned int ispOutputCount_;
	unsigned int ispOutputTotal_;

private:
	void checkRequestCompleted();
};

class PipelineHandlerBase : public PipelineHandler
{
public:
	PipelineHandlerBase(CameraManager *manager)
		: PipelineHandler(manager)
	{
	}

	virtual ~PipelineHandlerBase()
	{
	}

	static V4L2DeviceFormat toV4L2DeviceFormat(const V4L2VideoDevice *dev,
						   const V4L2SubdeviceFormat &format,
						   BayerFormat::Packing packingReq);

	std::unique_ptr<CameraConfiguration>
	generateConfiguration(Camera *camera, Span<const StreamRole> roles) override;
	int configure(Camera *camera, CameraConfiguration *config) override;

	int exportFrameBuffers(Camera *camera, libcamera::Stream *stream,
			       std::vector<std::unique_ptr<FrameBuffer>> *buffers) override;

	int start(Camera *camera, const ControlList *controls) override;
	void stopDevice(Camera *camera) override;
	void releaseDevice(Camera *camera) override;

	int queueRequestDevice(Camera *camera, Request *request) override;

protected:
	int registerCamera(std::unique_ptr<RPi::CameraData> &cameraData,
			   MediaDevice *frontent, const std::string &frontendName,
			   MediaDevice *backend, MediaEntity *sensorEntity);

	void mapBuffers(Camera *camera, const BufferMap &buffers, unsigned int mask);

	virtual int platformRegister(std::unique_ptr<CameraData> &cameraData,
				     MediaDevice *unicam, MediaDevice *isp) = 0;

private:
	CameraData *cameraData(Camera *camera)
	{
		return static_cast<CameraData *>(camera->_d());
	}

	int queueAllBuffers(Camera *camera);
	virtual int prepareBuffers(Camera *camera) = 0;
};

class RPiCameraConfiguration final : public CameraConfiguration
{
public:
	RPiCameraConfiguration(const CameraData *data)
		: CameraConfiguration(), data_(data)
	{
	}

	CameraConfiguration::Status validateColorSpaces(ColorSpaceFlags flags);
	Status validate() override;

	/* Cache the combinedTransform_ that will be applied to the sensor */
	Transform combinedTransform_;
	/* The sensor format computed in validate() */
	V4L2SubdeviceFormat sensorFormat_;

private:
	const CameraData *data_;

	/*
	 * Store the colour spaces that all our streams will have. RGB format streams
	 * will have the same colorspace as YUV streams, with YCbCr field cleared and
	 * range set to full.
         */
	std::optional<ColorSpace> yuvColorSpace_;
	std::optional<ColorSpace> rgbColorSpace_;