summaryrefslogtreecommitdiff
path: root/test/v4l2_videodevice/buffer_cache.cpp
blob: 5a9aa2199c504ca6925be6691eb066c3fd7b2133 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
/* SPDX-License-Identifier: GPL-2.0-or-later */
/*
 * Copyright (C) 2020, Google Inc.
 *
 * Test the buffer cache different operation modes
 */

#include <iostream>
#include <random>
#include <vector>

#include <libcamera/formats.h>
#include <libcamera/stream.h>

#include "buffer_source.h"

#include "test.h"

using namespace libcamera;

namespace {

class BufferCacheTest : public Test
{
public:
	/*
	 * Test that a cache with the same size as there are buffers results in
	 * a sequential run over; 0, 1, 2, 3, 4, 0, 1, 2, 3, 4, ...
	 *
	 * The test is only valid when the cache size is as least as big as the
	 * number of buffers.
	 */
	int testSequential(V4L2BufferCache *cache,
			   const std::vector<std::unique_ptr<FrameBuffer>> &buffers)
	{
		for (unsigned int i = 0; i < buffers.size() * 100; i++) {
			int nBuffer = i % buffers.size();
			int index = cache->get(*buffers[nBuffer].get());

			if (index != nBuffer) {
				std::cout << "Expected index " << nBuffer
					  << " got " << index << std::endl;
				return TestFail;
			}

			cache->put(index);
		}

		return TestPass;
	}

	/*
	 * Test that randomly putting buffers to the cache always results in a
	 * valid index.
	 */
	int testRandom(V4L2BufferCache *cache,
		       const std::vector<std::unique_ptr<FrameBuffer>> &buffers)
	{
		std::uniform_int_distribution<> dist(0, buffers.size() - 1);

		for (unsigned int i = 0; i < buffers.size() * 100; i++) {
			int nBuffer = dist(generator_);
			int index = cache->get(*buffers[nBuffer].get());

			if (index < 0) {
				std::cout << "Failed lookup from cache"
					  << std::endl;
				return TestFail;
			}

			cache->put(index);
		}

		return TestPass;
	}

	/*
	 * Test that using a buffer more frequently keeps it hot in the cache at
	 * all times.
	 */
	int testHot(V4L2BufferCache *cache,
		    const std::vector<std::unique_ptr<FrameBuffer>> &buffers,
		    unsigned int hotFrequency)
	{
		/* Run the random test on the cache to make it messy. */
		if (testRandom(cache, buffers) != TestPass)
			return TestFail;

		std::uniform_int_distribution<> dist(0, buffers.size() - 1);

		/* Pick a hot buffer at random and store its index. */
		int hotBuffer = dist(generator_);
		int hotIndex = cache->get(*buffers[hotBuffer].get());
		cache->put(hotIndex);

		/*
		 * Queue hot buffer at the requested frequency and make sure
		 * it stays hot.
		 */
		for (unsigned int i = 0; i < buffers.size() * 100; i++) {
			int nBuffer, index;
			bool hotQueue = i % hotFrequency == 0;

			if (hotQueue)
				nBuffer = hotBuffer;
			else
				nBuffer = dist(generator_);

			index = cache->get(*buffers[nBuffer].get());

			if (index < 0) {
				std::cout << "Failed lookup from cache"
					  << std::endl;
				return TestFail;
			}

			if (hotQueue && index != hotIndex) {
				std::cout << "Hot buffer got cold"
					  << std::endl;
				return TestFail;
			}

			cache->put(index);
		}

		return TestPass;
	}

	int testIsEmpty(const std::vector<std::unique_ptr<FrameBuffer>> &buffers)
	{
		V4L2BufferCache cache(buffers.size());

		if (!cache.isEmpty())
			return TestFail;

		for (auto const &buffer : buffers) {
			FrameBuffer &b = *buffer.get();
			cache.get(b);
		}

		if (cache.isEmpty())
			return TestFail;

		unsigned int i;
		for (i = 0; i < buffers.size() - 1; i++)
			cache.put(i);

		if (cache.isEmpty())
			return TestFail;

		cache.put(i);
		if (!cache.isEmpty())
			return TestFail;

		return TestPass;
	}

	int init() override
	{
		std::random_device rd;
		unsigned int seed = rd();

		std::cout << "Random seed is " << seed << std::endl;

		generator_.seed(seed);

		return TestPass;
	}

	int run() override
	{
		const unsigned int numBuffers = 8;

		StreamConfiguration cfg;
		cfg.pixelFormat = formats::YUYV;
		cfg.size = Size(600, 800);
		cfg.bufferCount = numBuffers;

		BufferSource source;
		int ret = source.allocate(cfg);
		if (ret != TestPass)
			return ret;

		const std::vector<std::unique_ptr<FrameBuffer>> &buffers =
			source.buffers();

		if (buffers.size() != numBuffers) {
			std::cout << "Got " << buffers.size()
				  << " buffers, expected " << numBuffers
				  << std::endl;
			return TestFail;
		}

		/*
		 * Test cache of same size as there are buffers, the cache is
		 * created from a list of buffers and will be pre-populated.
		 */
		V4L2BufferCache cacheFromBuffers(buffers);

		if (testSequential(&cacheFromBuffers, buffers) != TestPass)
			return TestFail;

		if (testRandom(&cacheFromBuffers, buffers) != TestPass)
			return TestFail;

		if (testHot(&cacheFromBuffers, buffers, numBuffers) != TestPass)
			return TestFail;

		/*
		 * Test cache of same size as there are buffers, the cache is
		 * not pre-populated.
		 */
		V4L2BufferCache cacheFromNumbers(numBuffers);

		if (testSequential(&cacheFromNumbers, buffers) != TestPass)
			return TestFail;

		if (testRandom(&cacheFromNumbers, buffers) != TestPass)
			return TestFail;

		if (testHot(&cacheFromNumbers, buffers, numBuffers) != TestPass)
			return TestFail;

		/*
		 * Test cache half the size of number of buffers used, the cache
		 * is not pre-populated.
		 */
		V4L2BufferCache cacheHalf(numBuffers / 2);

		if (testRandom(&cacheHalf, buffers) != TestPass)
			return TestFail;

		if (testHot(&cacheHalf, buffers, numBuffers / 2) != TestPass)
			return TestFail;

		/*
		 * Test that the isEmpty function reports the correct result at
		 * various levels of cache fullness.
		 */
		if (testIsEmpty(buffers) != TestPass)
			return TestFail;

		return TestPass;
	}

private:
	std::mt19937 generator_;
};

} /* namespace */

TEST_REGISTER(BufferCacheTest)
supported in the current implementation, and will use default settings as * provided by the kernel driver. * * Demosaicing is operating with the default parameters and could be further * optimised to provide improved sharpening coefficients, checker artifact * removal, and false color correction. * * Additional image enhancements can be made by providing lens and * sensor-specific tuning to adapt for Black Level compensation (BLC), Lens * shading correction (SHD) and Color correction (CCM). */ class IPAIPU3 : public IPAIPU3Interface { public: int init(const IPASettings &settings, const IPACameraSensorInfo &sensorInfo, const ControlInfoMap &sensorControls, ControlInfoMap *ipaControls) override; int start() override; void stop() override; int configure(const IPAConfigInfo &configInfo, ControlInfoMap *ipaControls) override; void mapBuffers(const std::vector<IPABuffer> &buffers) override; void unmapBuffers(const std::vector<unsigned int> &ids) override; void processEvent(const IPU3Event &event) override; private: void updateControls(const IPACameraSensorInfo &sensorInfo, const ControlInfoMap &sensorControls, ControlInfoMap *ipaControls); void updateSessionConfiguration(const ControlInfoMap &sensorControls); void processControls(unsigned int frame, const ControlList &controls); void fillParams(unsigned int frame, ipu3_uapi_params *params); void parseStatistics(unsigned int frame, int64_t frameTimestamp, const ipu3_uapi_stats_3a *stats); void setControls(unsigned int frame); void calculateBdsGrid(const Size &bdsOutputSize); std::map<unsigned int, MappedFrameBuffer> buffers_; ControlInfoMap ctrls_; IPACameraSensorInfo sensorInfo_; /* Camera sensor controls. */ uint32_t defVBlank_; uint32_t exposure_; uint32_t minExposure_; uint32_t maxExposure_; uint32_t gain_; uint32_t minGain_; uint32_t maxGain_; utils::Duration lineDuration_; /* Interface to the Camera Helper */ std::unique_ptr<CameraSensorHelper> camHelper_; /* Maintain the algorithms used by the IPA */ std::list<std::unique_ptr<ipa::ipu3::Algorithm>> algorithms_; /* Local parameter storage */ struct IPAContext context_; }; /** * \brief Compute IPASessionConfiguration using the sensor information and the * sensor V4L2 controls */ void IPAIPU3::updateSessionConfiguration(const ControlInfoMap &sensorControls) { const ControlInfo &v4l2Exposure = sensorControls.find(V4L2_CID_EXPOSURE)->second; int32_t minExposure = v4l2Exposure.min().get<int32_t>(); int32_t maxExposure = v4l2Exposure.max().get<int32_t>(); const ControlInfo &v4l2Gain = sensorControls.find(V4L2_CID_ANALOGUE_GAIN)->second; int32_t minGain = v4l2Gain.min().get<int32_t>(); int32_t maxGain = v4l2Gain.max().get<int32_t>(); /* * When the AGC computes the new exposure values for a frame, it needs * to know the limits for shutter speed and analogue gain. * As it depends on the sensor, update it with the controls. * * \todo take VBLANK into account for maximum shutter speed */ context_.configuration.agc.minShutterSpeed = minExposure * lineDuration_; context_.configuration.agc.maxShutterSpeed = maxExposure * lineDuration_; context_.configuration.agc.minAnalogueGain = camHelper_->gain(minGain); context_.configuration.agc.maxAnalogueGain = camHelper_->gain(maxGain); } /** * \brief Compute camera controls using the sensor information and the sensor * V4L2 controls * * Some of the camera controls are computed by the pipeline handler, some others * by the IPA module which is in charge of handling, for example, the exposure * time and the frame duration. * * This function computes: * - controls::ExposureTime * - controls::FrameDurationLimits */ void IPAIPU3::updateControls(const IPACameraSensorInfo &sensorInfo, const ControlInfoMap &sensorControls, ControlInfoMap *ipaControls) { ControlInfoMap::Map controls{}; /* * Compute exposure time limits by using line length and pixel rate * converted to microseconds. Use the V4L2_CID_EXPOSURE control to get * exposure min, max and default and convert it from lines to * microseconds. */ const ControlInfo &v4l2Exposure = sensorControls.find(V4L2_CID_EXPOSURE)->second; int32_t minExposure = v4l2Exposure.min().get<int32_t>() * lineDuration_.get<std::micro>(); int32_t maxExposure = v4l2Exposure.max().get<int32_t>() * lineDuration_.get<std::micro>(); int32_t defExposure = v4l2Exposure.def().get<int32_t>() * lineDuration_.get<std::micro>(); controls[&controls::ExposureTime] = ControlInfo(minExposure, maxExposure, defExposure); /* * Compute the frame duration limits. * * The frame length is computed assuming a fixed line length combined * with the vertical frame sizes. */ const ControlInfo &v4l2HBlank = sensorControls.find(V4L2_CID_HBLANK)->second; uint32_t hblank = v4l2HBlank.def().get<int32_t>(); uint32_t lineLength = sensorInfo.outputSize.width + hblank; const ControlInfo &v4l2VBlank = sensorControls.find(V4L2_CID_VBLANK)->second; std::array<uint32_t, 3> frameHeights{ v4l2VBlank.min().get<int32_t>() + sensorInfo.outputSize.height, v4l2VBlank.max().get<int32_t>() + sensorInfo.outputSize.height, v4l2VBlank.def().get<int32_t>() + sensorInfo.outputSize.height, }; std::array<int64_t, 3> frameDurations; for (unsigned int i = 0; i < frameHeights.size(); ++i) { uint64_t frameSize = lineLength * frameHeights[i]; frameDurations[i] = frameSize / (sensorInfo.pixelRate / 1000000U); } controls[&controls::FrameDurationLimits] = ControlInfo(frameDurations[0], frameDurations[1], frameDurations[2]); *ipaControls = ControlInfoMap(std::move(controls), controls::controls); } /** * \brief Initialize the IPA module and its controls * * This function receives the camera sensor information from the pipeline * handler, computes the limits of the controls it handles and returns * them in the \a ipaControls output parameter. */ int IPAIPU3::init(const IPASettings &settings, const IPACameraSensorInfo &sensorInfo, const ControlInfoMap &sensorControls, ControlInfoMap *ipaControls) { camHelper_ = CameraSensorHelperFactory::create(settings.sensorModel); if (camHelper_ == nullptr) { LOG(IPAIPU3, Error) << "Failed to create camera sensor helper for " << settings.sensorModel; return -ENODEV; } /* Construct our Algorithms */ algorithms_.push_back(std::make_unique<algorithms::Agc>()); algorithms_.push_back(std::make_unique<algorithms::Awb>()); algorithms_.push_back(std::make_unique<algorithms::BlackLevelCorrection>()); algorithms_.push_back(std::make_unique<algorithms::ToneMapping>()); /* Initialize controls. */ updateControls(sensorInfo, sensorControls, ipaControls); return 0; } /** * \brief Perform any processing required before the first frame */ int IPAIPU3::start() { /* * Set the sensors V4L2 controls before the first frame to ensure that * we have an expected and known configuration from the start. */ setControls(0); return 0; } /** * \brief Ensure that all processing has completed */ void IPAIPU3::stop() { } /** * \brief Calculate a grid for the AWB statistics * * This function calculates a grid for the AWB algorithm in the IPU3 firmware. * Its input is the BDS output size calculated in the ImgU. * It is limited for now to the simplest method: find the lesser error * with the width/height and respective log2 width/height of the cells. * * \todo The frame is divided into cells which can be 8x8 => 64x64. * As a smaller cell improves the algorithm precision, adapting the * x_start and y_start parameters of the grid would provoke a loss of * some pixels but would also result in more accurate algorithms. */ void IPAIPU3::calculateBdsGrid(const Size &bdsOutputSize) { Size best; Size bestLog2; /* Set the BDS output size in the IPAConfiguration structure */ context_.configuration.grid.bdsOutputSize = bdsOutputSize; uint32_t minError = std::numeric_limits<uint32_t>::max(); for (uint32_t shift = kMinCellSizeLog2; shift <= kMaxCellSizeLog2; ++shift) { uint32_t width = std::clamp(bdsOutputSize.width >> shift, kMinGridWidth, kMaxGridWidth); width = width << shift; uint32_t error = std::abs(static_cast<int>(width - bdsOutputSize.width)); if (error >= minError) continue; minError = error; best.width = width; bestLog2.width = shift; } minError = std::numeric_limits<uint32_t>::max(); for (uint32_t shift = kMinCellSizeLog2; shift <= kMaxCellSizeLog2; ++shift) { uint32_t height = std::clamp(bdsOutputSize.height >> shift, kMinGridHeight, kMaxGridHeight); height = height << shift; uint32_t error = std::abs(static_cast<int>(height - bdsOutputSize.height)); if (error >= minError) continue; minError = error; best.height = height; bestLog2.height = shift; } struct ipu3_uapi_grid_config &bdsGrid = context_.configuration.grid.bdsGrid; bdsGrid.x_start = 0; bdsGrid.y_start = 0; bdsGrid.width = best.width >> bestLog2.width; bdsGrid.block_width_log2 = bestLog2.width; bdsGrid.height = best.height >> bestLog2.height; bdsGrid.block_height_log2 = bestLog2.height; /* The ImgU pads the lines to a multiple of 4 cells. */ context_.configuration.grid.stride = utils::alignUp(bdsGrid.width, 4); LOG(IPAIPU3, Debug) << "Best grid found is: (" << (int)bdsGrid.width << " << " << (int)bdsGrid.block_width_log2 << ") x (" << (int)bdsGrid.height << " << " << (int)bdsGrid.block_height_log2 << ")"; } /** * \brief Configure the IPU3 IPA * \param[in] configInfo The IPA configuration data, received from the pipeline * handler * \param[in] ipaControls The IPA controls to update * * Calculate the best grid for the statistics based on the pipeline handler BDS * output, and parse the minimum and maximum exposure and analogue gain control * values. * * \todo Document what the BDS is, ideally in a block diagram of the ImgU. * * All algorithm modules are called to allow them to prepare the * \a IPASessionConfiguration structure for the \a IPAContext. */ int IPAIPU3::configure(const IPAConfigInfo &configInfo, ControlInfoMap *ipaControls) { if (configInfo.sensorControls.empty()) { LOG(IPAIPU3, Error) << "No sensor controls provided"; return -ENODATA; } sensorInfo_ = configInfo.sensorInfo; /* * Compute the sensor V4L2 controls to be used by the algorithms and * to be set on the sensor. */ ctrls_ = configInfo.sensorControls; const auto itExp = ctrls_.find(V4L2_CID_EXPOSURE); if (itExp == ctrls_.end()) { LOG(IPAIPU3, Error) << "Can't find exposure control"; return -EINVAL; } const auto itGain = ctrls_.find(V4L2_CID_ANALOGUE_GAIN); if (itGain == ctrls_.end()) { LOG(IPAIPU3, Error) << "Can't find gain control"; return -EINVAL; } const auto itVBlank = ctrls_.find(V4L2_CID_VBLANK); if (itVBlank == ctrls_.end()) { LOG(IPAIPU3, Error) << "Can't find VBLANK control"; return -EINVAL; } minExposure_ = itExp->second.min().get<int32_t>(); maxExposure_ = itExp->second.max().get<int32_t>(); exposure_ = minExposure_; minGain_ = itGain->second.min().get<int32_t>(); maxGain_ = itGain->second.max().get<int32_t>(); gain_ = minGain_; defVBlank_ = itVBlank->second.def().get<int32_t>(); /* Clean context at configuration */ context_ = {}; calculateBdsGrid(configInfo.bdsOutputSize); lineDuration_ = sensorInfo_.lineLength * 1.0s / sensorInfo_.pixelRate; /* Update the camera controls using the new sensor settings. */ updateControls(sensorInfo_, ctrls_, ipaControls); /* Update the IPASessionConfiguration using the sensor settings. */ updateSessionConfiguration(ctrls_); for (auto const &algo : algorithms_) { int ret = algo->configure(context_, configInfo); if (ret) return ret; } return 0; } /** * \brief Map the parameters and stats buffers allocated in the pipeline handler * \param[in] buffers The buffers to map */ void IPAIPU3::mapBuffers(const std::vector<IPABuffer> &buffers) { for (const IPABuffer &buffer : buffers) { const FrameBuffer fb(buffer.planes); buffers_.emplace(buffer.id, MappedFrameBuffer(&fb, MappedFrameBuffer::MapFlag::ReadWrite)); } } /** * \brief Unmap the parameters and stats buffers * \param[in] ids The IDs of the buffers to unmap */ void IPAIPU3::unmapBuffers(const std::vector<unsigned int> &ids) { for (unsigned int id : ids) { auto it = buffers_.find(id); if (it == buffers_.end()) continue; buffers_.erase(it); } } /** * \brief Process an event generated by the pipeline handler * \param[in] event The event sent from pipeline handler * * The expected event handling over the lifetime of a Request has * the following sequence: * * - EventProcessControls : Handle controls from a new Request * - EventFillParams : Prepare the ISP to process the Request * - EventStatReady : Process statistics after ISP completion */ void IPAIPU3::processEvent(const IPU3Event &event) { switch (event.op) { case EventProcessControls: { processControls(event.frame, event.controls); break; } case EventFillParams: { auto it = buffers_.find(event.bufferId); if (it == buffers_.end()) { LOG(IPAIPU3, Error) << "Could not find param buffer!"; return; } Span<uint8_t> mem = it->second.planes()[0]; ipu3_uapi_params *params = reinterpret_cast<ipu3_uapi_params *>(mem.data()); fillParams(event.frame, params); break; } case EventStatReady: { auto it = buffers_.find(event.bufferId); if (it == buffers_.end()) { LOG(IPAIPU3, Error) << "Could not find stats buffer!"; return; } Span<uint8_t> mem = it->second.planes()[0]; const ipu3_uapi_stats_3a *stats = reinterpret_cast<ipu3_uapi_stats_3a *>(mem.data()); context_.frameContext.sensor.exposure = event.sensorControls.get(V4L2_CID_EXPOSURE).get<int32_t>(); context_.frameContext.sensor.gain = camHelper_->gain(event.sensorControls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>()); parseStatistics(event.frame, event.frameTimestamp, stats); break; } default: LOG(IPAIPU3, Error) << "Unknown event " << event.op; break; } } /** * \brief Process a control list for a request from the application * \param[in] frame The number of the frame which will be processed next * \param[in] controls The controls for the \a frame * * Parse the request to handle any IPA-managed controls that were set from the * application such as manual sensor settings. */ void IPAIPU3::processControls([[maybe_unused]] unsigned int frame, [[maybe_unused]] const ControlList &controls) { /* \todo Start processing for 'frame' based on 'controls'. */ } /** * \brief Fill the ImgU parameter buffer for the next frame * \param[in] frame The number of the latest frame processed * \param[out] params The parameter buffer to fill * * Algorithms are expected to fill the IPU3 parameter buffer for the next * frame given their most recent processing of the ImgU statistics. */ void IPAIPU3::fillParams(unsigned int frame, ipu3_uapi_params *params) { /* * The incoming params buffer may contain uninitialised data, or the * parameters of previously queued frames. Clearing the entire buffer * may be an expensive operation, and the kernel will only read from * structures which have their associated use-flag set. * * It is the responsibility of the algorithms to set the use flags * accordingly for any data structure they update during prepare(). */ params->use = {}; for (auto const &algo : algorithms_) algo->prepare(context_, params); IPU3Action op; op.op = ActionParamFilled; queueFrameAction.emit(frame, op); } /** * \brief Process the statistics generated by the ImgU * \param[in] frame The number of the latest frame processed * \param[in] frameTimestamp The current frame timestamp * \param[in] stats The IPU3 statistics and ISP results * * Parse the most recently processed image statistics from the ImgU. The * statistics are passed to each algorithm module to run their calculations and * update their state accordingly. */ void IPAIPU3::parseStatistics(unsigned int frame, [[maybe_unused]] int64_t frameTimestamp, const ipu3_uapi_stats_3a *stats) { ControlList ctrls(controls::controls); for (auto const &algo : algorithms_) algo->process(context_, stats); setControls(frame); /* \todo Use VBlank value calculated from each frame exposure. */ int64_t frameDuration = (defVBlank_ + sensorInfo_.outputSize.height) * lineDuration_.get<std::micro>(); ctrls.set(controls::FrameDuration, frameDuration); ctrls.set(controls::AnalogueGain, context_.frameContext.sensor.gain); ctrls.set(controls::ColourTemperature, context_.frameContext.awb.temperatureK); ctrls.set(controls::ExposureTime, context_.frameContext.sensor.exposure * lineDuration_.get<std::micro>()); /* * \todo The Metadata provides a path to getting extended data * out to the application. Further data such as a simplifed Histogram * might have value to be exposed, however such data may be * difficult to report in a generically parsable way and we * likely want to avoid putting platform specific metadata in. */ IPU3Action op; op.op = ActionMetadataReady; op.controls = ctrls; queueFrameAction.emit(frame, op); } /** * \brief Handle sensor controls for a given \a frame number * \param[in] frame The frame on which the sensor controls should be set * * Send the desired sensor control values to the pipeline handler to request * that they are applied on the camera sensor. */ void IPAIPU3::setControls(unsigned int frame) { IPU3Action op; op.op = ActionSetSensorControls; exposure_ = context_.frameContext.agc.exposure; gain_ = camHelper_->gainCode(context_.frameContext.agc.gain); ControlList ctrls(ctrls_); ctrls.set(V4L2_CID_EXPOSURE, static_cast<int32_t>(exposure_)); ctrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast<int32_t>(gain_)); op.sensorControls = ctrls; queueFrameAction.emit(frame, op); } } /* namespace ipa::ipu3 */ /** * \brief External IPA module interface * * The IPAModuleInfo is required to match an IPA module construction against the * intented pipeline handler with the module. The API and pipeline handler * versions must match the corresponding IPA interface and pipeline handler. * * \sa struct IPAModuleInfo */ extern "C" { const struct IPAModuleInfo ipaModuleInfo = { IPA_MODULE_API_VERSION, 1, "PipelineHandlerIPU3", "ipu3", }; /** * \brief Create an instance of the IPA interface * * This function is the entry point of the IPA module. It is called by the IPA * manager to create an instance of the IPA interface for each camera. When * matched against with a pipeline handler, the IPAManager will construct an IPA * instance for each associated Camera. */ IPAInterface *ipaCreate() { return new ipa::ipu3::IPAIPU3(); } } } /* namespace libcamera */