summaryrefslogtreecommitdiff
path: root/src/android/camera_stream.h
blob: 4c5078b2c26d3cf44dff92c6a82f895c81895540 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
 * Copyright (C) 2020, Google Inc.
 *
 * camera_stream.h - Camera HAL stream
 */

#pragma once

#include <memory>
#include <queue>
#include <vector>

#include <hardware/camera3.h>

#include <libcamera/base/mutex.h>
#include <libcamera/base/thread.h>

#include <libcamera/camera.h>
#include <libcamera/framebuffer.h>
#include <libcamera/geometry.h>
#include <libcamera/pixel_format.h>

#include "camera_request.h"
#include "post_processor.h"

class CameraDevice;
class PlatformFrameBufferAllocator;

class CameraStream
{
public:
	/*
	 * Enumeration of CameraStream types.
	 *
	 * A camera stream associates an Android stream to a libcamera stream.
	 * This enumeration describes how the two streams are associated and how
	 * and where data produced from libcamera are delivered to the
	 * Android framework.
	 *
	 * Direct:
	 *
	 * The Android stream is directly mapped onto a libcamera stream: frames
	 * are delivered by the library directly in the memory location
	 * specified by the Android stream (buffer_handle_t->data) and provided
	 * to the framework as they are. The Android stream characteristics are
	 * directly translated to the libcamera stream configuration.
	 *
	 * +-----+                +-----+
	 * |  A  |                |  L  |
	 * +-----+                +-----+
	 *    |                      |
	 *    V                      V
	 * +-----+                +------+
	 * |  B  |<---------------|  FB  |
	 * +-----+                +------+
	 *
	 *
	 * Internal:
	 *
	 * Data for the Android stream is produced by processing a libcamera
	 * stream created by the HAL for that purpose. The libcamera stream
	 * needs to be supplied with intermediate buffers where the library
	 * delivers frames to be processed and then provided to the framework.
	 * The libcamera stream configuration is not a direct translation of the
	 * Android stream characteristics, but it describes the format and size
	 * required for the processing procedure to produce frames in the
	 * Android required format.
	 *
	 * +-----+                +-----+
	 * |  A  |                |  L  |
	 * +-----+                +-----+
	 *    |                      |
	 *    V                      V
	 * +-----+                +------+
	 * |  B  |                |  FB  |
	 * +-----+                +------+
	 *   ^                       |
	 *   |-------Processing------|
	 *
	 *
	 * Mapped:
	 *
	 * Data for the Android stream is produced by processing a libcamera
	 * stream associated with another CameraStream. Mapped camera streams do
	 * not need any memory to be reserved for them as they process data
	 * produced by libcamera for a different stream whose format and size
	 * are compatible with the processing procedure requirements to produce
	 * frames in the Android required format.
	 *
	 * +-----+      +-----+          +-----+
	 * |  A  |      |  A' |          |  L  |
	 * +-----+      +-----+          +-----+
	 *    |            |                |
	 *    V            V                V
	 * +-----+      +-----+          +------+
	 * |  B  |      |  B' |<---------|  FB  |
	 * +-----+      +-----+          +------+
	 *   ^              |
	 *   |--Processing--|
	 *
	 *
	 * --------------------------------------------------------------------
	 * A  = Android stream
	 * L  = libcamera stream
	 * B  = memory buffer
	 * FB = libcamera FrameBuffer
	 * "Processing" = Frame processing procedure (Encoding, scaling etc)
	 */
	enum class Type {
		Direct,
		Internal,
		Mapped,
	};
	CameraStream(CameraDevice *const cameraDevice,
		     libcamera::CameraConfiguration *config, Type type,
		     camera3_stream_t *camera3Stream,
		     CameraStream *const sourceStream,
		     unsigned int index);
	CameraStream(CameraStream &&other);
	~CameraStream();

	Type type() const { return type_; }
	camera3_stream_t *camera3Stream() const { return camera3Stream_; }
	const libcamera::StreamConfiguration &configuration() const;
	libcamera::Stream *stream() const;
	CameraStream *sourceStream() const { return sourceStream_; }

	int configure();
	int process(Camera3RequestDescriptor::StreamBuffer *streamBuffer);
	libcamera::FrameBuffer *getBuffer();
	void putBuffer(libcamera::FrameBuffer *buffer);
	void flush();

private:
	class PostProcessorWorker : public libcamera::Thread
	{
	public:
		enum class State {
			Stopped,
			Running,
			Flushing,
		};

		PostProcessorWorker(PostProcessor *postProcessor);
		~PostProcessorWorker();

		void start();
		void queueRequest(Camera3RequestDescriptor::StreamBuffer *request);
		void flush();

	protected:
		void run() override;

	private:
		PostProcessor *postProcessor_;

		libcamera::Mutex mutex_;
		libcamera::ConditionVariable cv_;

		std::queue<Camera3RequestDescriptor::StreamBuffer *> requests_
			LIBCAMERA_TSA_GUARDED_BY(mutex_);

		State state_ LIBCAMERA_TSA_GUARDED_BY(mutex_) = State::Stopped;
	};

	int waitFence(int fence);

	CameraDevice *const cameraDevice_;
	const libcamera::CameraConfiguration *config_;
	const Type type_;
	camera3_stream_t *camera3Stream_;
	CameraStream *const sourceStream_;
	const unsigned int index_;

	std::unique_ptr<PlatformFrameBufferAllocator> allocator_;
	std::vector<std::unique_ptr<libcamera::FrameBuffer>> allocatedBuffers_;
	std::vector<libcamera::FrameBuffer *> buffers_ LIBCAMERA_TSA_GUARDED_BY(mutex_);
	/*
	 * The class has to be MoveConstructible as instances are stored in
	 * an std::vector in CameraDevice.
	 */
	std::unique_ptr<libcamera::Mutex> mutex_;
	std::unique_ptr<PostProcessor> postProcessor_;

	std::unique_ptr<PostProcessorWorker> worker_;
};
hl com"> * \brief State for the Filter algorithm * * \struct IPAActiveState::filter.denoise * \brief Denoising level * * \var IPAActiveState::filter.sharpness * \brief Sharpness level */ /** * \struct IPAFrameContext * \brief Per-frame context for algorithms * * The frame context stores two distinct categories of information: * * - The value of the controls to be applied to the frame. These values are * typically set in the queueRequest() function, from the consolidated * control values stored in the active state. The frame context thus stores * values for all controls related to the algorithm, not limited to the * controls specified in the corresponding request, but consolidated from all * requests that have been queued so far. * * For controls that can be set manually or computed by an algorithm * (depending on the algorithm operation mode), such as for instance the * colour gains for the AWB algorithm, the control value will be stored in * the frame context in the queueRequest() function only when operating in * manual mode. When operating in auto mode, the values are computed by the * algorithm in process(), stored in the active state, and copied to the * frame context in prepare(), just before being stored in the ISP parameters * buffer. * * The queueRequest() function can also store ancillary data in the frame * context, such as flags to indicate if (and what) control values have * changed compared to the previous request. * * - Status information computed by the algorithm for a frame. For instance, * the colour temperature estimated by the AWB algorithm from ISP statistics * calculated on a frame is stored in the frame context for that frame in * the process() function. */ /** * \var IPAFrameContext::agc * \brief Automatic Gain Control parameters for this frame * * The exposure and gain are provided by the AGC algorithm, and are to be * applied to the sensor in order to take effect for this frame. * * \var IPAFrameContext::agc.exposure * \brief Exposure time expressed as a number of lines * * \var IPAFrameContext::agc.gain * \brief Analogue gain multiplier * * The gain should be adapted to the sensor specific gain code before applying. */ /** * \var IPAFrameContext::awb * \brief Automatic White Balance parameters for this frame * * \struct IPAFrameContext::awb.gains * \brief White balance gains * * \var IPAFrameContext::awb.gains.red * \brief White balance gain for R channel * * \var IPAFrameContext::awb.gains.green * \brief White balance gain for G channel * * \var IPAFrameContext::awb.gains.blue * \brief White balance gain for B channel * * \var IPAFrameContext::awb.temperatureK * \brief Estimated color temperature * * \var IPAFrameContext::awb.autoEnabled * \brief Whether the Auto White Balance algorithm is enabled */ /** * \var IPAFrameContext::cproc * \brief Color Processing parameters for this frame * * \struct IPAFrameContext::cproc.brightness * \brief Brightness level * * \var IPAFrameContext::cproc.contrast * \brief Contrast level * * \var IPAFrameContext::cproc.saturation * \brief Saturation level * * \var IPAFrameContext::cproc.update * \brief Indicates if the color processing parameters have been updated * compared to the previous frame */ /** * \var IPAFrameContext::dpf * \brief Denoise Pre-Filter parameters for this frame * * \var IPAFrameContext::dpf.denoise * \brief Indicates if denoise is activated * * \var IPAFrameContext::dpf.update * \brief Indicates if the denoise pre-filter parameters have been updated * compared to the previous frame */ /** * \var IPAFrameContext::filter * \brief Filter parameters for this frame * * \struct IPAFrameContext::filter.denoise * \brief Denoising level * * \var IPAFrameContext::filter.sharpness * \brief Sharpness level * * \var IPAFrameContext::filter.updateParams * \brief Indicates if the filter parameters have been updated compared to the * previous frame */ /** * \var IPAFrameContext::sensor * \brief Sensor configuration that used been used for this frame * * \var IPAFrameContext::sensor.exposure * \brief Exposure time expressed as a number of lines * * \var IPAFrameContext::sensor.gain * \brief Analogue gain multiplier */ /** * \struct IPAContext * \brief Global IPA context data shared between all algorithms * * \var IPAContext::configuration * \brief The IPA session configuration, immutable during the session * * \var IPAContext::activeState * \brief The IPA active state, storing the latest state for all algorithms * * \var IPAContext::frameContexts * \brief Ring buffer of per-frame contexts */ } /* namespace libcamera::ipa::rkisp1 */