summaryrefslogtreecommitdiff
path: root/src/ipa/raspberrypi/controller/rpi/agc.cpp
blob: c02b5ece242f79baadd0cbe522d98d72d5009ec6 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
/* SPDX-License-Identifier: BSD-2-Clause */
/*
 * Copyright (C) 2019, Raspberry Pi (Trading) Limited
 *
 * agc.cpp - AGC/AEC control algorithm
 */

#include <map>

#include "linux/bcm2835-isp.h"

#include "../awb_status.h"
#include "../device_status.h"
#include "../histogram.hpp"
#include "../logging.hpp"
#include "../lux_status.h"
#include "../metadata.hpp"

#include "agc.hpp"

using namespace RPi;

#define NAME "rpi.agc"

#define PIPELINE_BITS 13 // seems to be a 13-bit pipeline

void AgcMeteringMode::Read(boost::property_tree::ptree const &params)
{
	int num = 0;
	for (auto &p : params.get_child("weights")) {
		if (num == AGC_STATS_SIZE)
			throw std::runtime_error("AgcConfig: too many weights");
		weights[num++] = p.second.get_value<double>();
	}
	if (num != AGC_STATS_SIZE)
		throw std::runtime_error("AgcConfig: insufficient weights");
}

static std::string
read_metering_modes(std::map<std::string, AgcMeteringMode> &metering_modes,
		    boost::property_tree::ptree const &params)
{
	std::string first;
	for (auto &p : params) {
		AgcMeteringMode metering_mode;
		metering_mode.Read(p.second);
		metering_modes[p.first] = std::move(metering_mode);
		if (first.empty())
			first = p.first;
	}
	return first;
}

static int read_double_list(std::vector<double> &list,
			    boost::property_tree::ptree const &params)
{
	for (auto &p : params)
		list.push_back(p.second.get_value<double>());
	return list.size();
}

void AgcExposureMode::Read(boost::property_tree::ptree const &params)
{
	int num_shutters =
		read_double_list(shutter, params.get_child("shutter"));
	int num_ags = read_double_list(gain, params.get_child("gain"));
	if (num_shutters < 2 || num_ags < 2)
		throw std::runtime_error(
			"AgcConfig: must have at least two entries in exposure profile");
	if (num_shutters != num_ags)
		throw std::runtime_error(
			"AgcConfig: expect same number of exposure and gain entries in exposure profile");
}

static std::string
read_exposure_modes(std::map<std::string, AgcExposureMode> &exposure_modes,
		    boost::property_tree::ptree const &params)
{
	std::string first;
	for (auto &p : params) {
		AgcExposureMode exposure_mode;
		exposure_mode.Read(p.second);
		exposure_modes[p.first] = std::move(exposure_mode);
		if (first.empty())
			first = p.first;
	}
	return first;
}

void AgcConstraint::Read(boost::property_tree::ptree const &params)
{
	std::string bound_string = params.get<std::string>("bound", "");
	transform(bound_string.begin(), bound_string.end(),
		  bound_string.begin(), ::toupper);
	if (bound_string != "UPPER" && bound_string != "LOWER")
		throw std::runtime_error(
			"AGC constraint type should be UPPER or LOWER");
	bound = bound_string == "UPPER" ? Bound::UPPER : Bound::LOWER;
	q_lo = params.get<double>("q_lo");
	q_hi = params.get<double>("q_hi");
	Y_target.Read(params.get_child("y_target"));
}

static AgcConstraintMode
read_constraint_mode(boost::property_tree::ptree const &params)
{
	AgcConstraintMode mode;
	for (auto &p : params) {
		AgcConstraint constraint;
		constraint.Read(p.second);
		mode.push_back(std::move(constraint));
	}
	return mode;
}

static std::string read_constraint_modes(
	std::map<std::string, AgcConstraintMode> &constraint_modes,
	boost::property_tree::ptree const &params)
{
	std::string first;
	for (auto &p : params) {
		constraint_modes[p.first] = read_constraint_mode(p.second);
		if (first.empty())
			first = p.first;
	}
	return first;
}

void AgcConfig::Read(boost::property_tree::ptree const &params)
{
	RPI_LOG("AgcConfig");
	default_metering_mode = read_metering_modes(
		metering_modes, params.get_child("metering_modes"));
	default_exposure_mode = read_exposure_modes(
		exposure_modes, params.get_child("exposure_modes"));
	default_constraint_mode = read_constraint_modes(
		constraint_modes, params.get_child("constraint_modes"));
	Y_target.Read(params.get_child("y_target"));
	speed = params.get<double>("speed", 0.2);
	startup_frames = params.get<uint16_t>("startup_frames", 10);
	fast_reduce_threshold =
		params.get<double>("fast_reduce_threshold", 0.4);
	base_ev = params.get<double>("base_ev", 1.0);
}

Agc::Agc(Controller *controller)
	: AgcAlgorithm(controller), metering_mode_(nullptr),
	  exposure_mode_(nullptr), constraint_mode_(nullptr),
	  frame_count_(0), lock_count_(0)
{
	ev_ = status_.ev = 1.0;
	flicker_period_ = status_.flicker_period = 0.0;
	fixed_shutter_ = status_.fixed_shutter = 0;
	fixed_analogue_gain_ = status_.fixed_analogue_gain = 0.0;
	// set to zero initially, so we can tell it's not been calculated
	status_.total_exposure_value = 0.0;
	status_.target_exposure_value = 0.0;
	status_.locked = false;
	output_status_ = status_;
}

char const *Agc::Name() const
{
	return NAME;
}

void Agc::Read(boost::property_tree::ptree const &params)
{
	RPI_LOG("Agc");
	config_.Read(params);
	// Set the config's defaults (which are the first ones it read) as our
	// current modes, until someone changes them.  (they're all known to
	// exist at this point)
	metering_mode_name_ = config_.default_metering_mode;
	metering_mode_ = &config_.metering_modes[metering_mode_name_];
	exposure_mode_name_ = config_.default_exposure_mode;
	exposure_mode_ = &config_.exposure_modes[exposure_mode_name_];
	constraint_mode_name_ = config_.default_constraint_mode;
	constraint_mode_ = &config_.constraint_modes[constraint_mode_name_];
}

void Agc::SetEv(double ev)
{
	std::unique_lock<std::mutex> lock(settings_mutex_);
	ev_ = ev;
}

void Agc::SetFlickerPeriod(double flicker_period)
{
	std::unique_lock<std::mutex> lock(settings_mutex_);
	flicker_period_ = flicker_period;
}

void Agc::SetFixedShutter(double fixed_shutter)
{
	std::unique_lock<std::mutex> lock(settings_mutex_);
	fixed_shutter_ = fixed_shutter;
}

void Agc::SetFixedAnalogueGain(double fixed_analogue_gain)
{
	std::unique_lock<std::mutex> lock(settings_mutex_);
	fixed_analogue_gain_ = fixed_analogue_gain;
}

void Agc::SetMeteringMode(std::string const &metering_mode_name)
{
	std::unique_lock<std::mutex> lock(settings_mutex_);
	metering_mode_name_ = metering_mode_name;
}

void Agc::SetExposureMode(std::string const &exposure_mode_name)
{
	std::unique_lock<std::mutex> lock(settings_mutex_);
	exposure_mode_name_ = exposure_mode_name;
}

void Agc::SetConstraintMode(std::string const &constraint_mode_name)
{
	std::unique_lock<std::mutex> lock(settings_mutex_);
	constraint_mode_name_ = constraint_mode_name;
}

void Agc::SwitchMode(CameraMode const &camera_mode, Metadata *metadata)
{
	// On a mode switch, it's possible the exposure profile could change,
	// so we run through the dividing up of exposure/gain again and
	// write the results into the metadata we've been given.
	if (status_.total_exposure_value) {
		housekeepConfig();
		divvyupExposure();
		writeAndFinish(metadata, false);
	}
}

void Agc::Prepare(Metadata *image_metadata)
{
	AgcStatus status;
	{
		std::unique_lock<std::mutex> lock(output_mutex_);
		status = output_status_;
	}
	int lock_count = lock_count_;
	lock_count_ = 0;
	status.digital_gain = 1.0;
	if (status_.total_exposure_value) {
		// Process has run, so we have meaningful values.
		DeviceStatus device_status;
		if (image_metadata->Get("device.status", device_status) == 0) {
			double actual_exposure = device_status.shutter_speed *
						 device_status.analogue_gain;
			if (actual_exposure) {
				status.digital_gain =
					status_.total_exposure_value /
					actual_exposure;
				RPI_LOG("Want total exposure " << status_.total_exposure_value);
				// Never ask for a gain < 1.0, and also impose
				// some upper limit. Make it customisable?
				status.digital_gain = std::max(
					1.0,
					std::min(status.digital_gain, 4.0));
				RPI_LOG("Actual exposure " << actual_exposure);
				RPI_LOG("Use digital_gain " << status.digital_gain);
				RPI_LOG("Effective exposure " << actual_exposure * status.digital_gain);
				// Decide whether AEC/AGC has converged.
				// Insist AGC is steady for MAX_LOCK_COUNT
				// frames before we say we are "locked".
				// (The hard-coded constants may need to
				// become customisable.)
				if (status.target_exposure_value) {
#define MAX_LOCK_COUNT 3
					double err = 0.10 * status.target_exposure_value + 200;
					if (actual_exposure <
					    status.target_exposure_value + err
					    && actual_exposure >
					    status.target_exposure_value - err)
						lock_count_ =
							std::min(lock_count + 1,
							       MAX_LOCK_COUNT);
					else if (actual_exposure <
						 status.target_exposure_value
						 + 1.5 * err &&
						 actual_exposure >
						 status.target_exposure_value
						 - 1.5 * err)
						lock_count_ = lock_count;
					RPI_LOG("Lock count: " << lock_count_);
				}
			}
		} else
			RPI_LOG(Name() << ": no device metadata");
		status.locked = lock_count_ >= MAX_LOCK_COUNT;
		//printf("%s\n", status.locked ? "+++++++++" : "-");
		image_metadata->Set("agc.status", status);
	}
}

void Agc::Process(StatisticsPtr &stats, Metadata *image_metadata)
{
	frame_count_++;
	// First a little bit of housekeeping, fetching up-to-date settings and
	// configuration, that kind of thing.
	housekeepConfig();
	// Get the current exposure values for the frame that's just arrived.
	fetchCurrentExposure(image_metadata);
	// Compute the total gain we require relative to the current exposure.
	double gain, target_Y;
	computeGain(stats.get(), image_metadata, gain, target_Y);
	// Now compute the target (final) exposure which we think we want.
	computeTargetExposure(gain);
	// Some of the exposure has to be applied as digital gain, so work out
	// what that is. This function also tells us whether it's decided to
	// "desaturate" the image more quickly.
	bool desaturate = applyDigitalGain(image_metadata, gain, target_Y);
	// The results have to be filtered so as not to change too rapidly.
	filterExposure(desaturate);
	// The last thing is to divvy up the exposure value into a shutter time
	// and analogue_gain, according to the current exposure mode.
	divvyupExposure();
	// Finally advertise what we've done.
	writeAndFinish(image_metadata, desaturate);
}

static void copy_string(std::string const &s, char *d, size_t size)
{
	size_t length = s.copy(d, size - 1);
	d[length] = '\0';
}

void Agc::housekeepConfig()
{
	// First fetch all the up-to-date settings, so no one else has to do it.
	std::string new_exposure_mode_name, new_constraint_mode_name,
		new_metering_mode_name;
	{
		std::unique_lock<std::mutex> lock(settings_mutex_);
		new_metering_mode_name = metering_mode_name_;
		new_exposure_mode_name = exposure_mode_name_;
		new_constraint_mode_name = constraint_mode_name_;
		status_.ev = ev_;
		status_.fixed_shutter = fixed_shutter_;
		status_.fixed_analogue_gain = fixed_analogue_gain_;
		status_.flicker_period = flicker_period_;
	}
	RPI_LOG("ev " << status_.ev << " fixed_shutter "
		      << status_.fixed_shutter << " fixed_analogue_gain "
		      << status_.fixed_analogue_gain);
	// Make sure the "mode" pointers point to the up-to-date things, if
	// they've changed.
	if (strcmp(new_metering_mode_name.c_str(), status_.metering_mode)) {
		auto it = config_.metering_modes.find(new_metering_mode_name);
		if (it == config_.metering_modes.end())
			throw std::runtime_error("Agc: no metering mode " +
						 new_metering_mode_name);
		metering_mode_ = &it->second;
		copy_string(new_metering_mode_name, status_.metering_mode,
			    sizeof(status_.metering_mode));
	}
	if (strcmp(new_exposure_mode_name.c_str(), status_.exposure_mode)) {
		auto it = config_.exposure_modes.find(new_exposure_mode_name);
		if (it == config_.exposure_modes.end())
			throw std::runtime_error("Agc: no exposure profile " +
						 new_exposure_mode_name);
		exposure_mode_ = &it->second;
		copy_string(new_exposure_mode_name, status_.exposure_mode,
			    sizeof(status_.exposure_mode));
	}
	if (strcmp(new_constraint_mode_name.c_str(), status_.constraint_mode)) {
		auto it =
			config_.constraint_modes.find(new_constraint_mode_name);
		if (it == config_.constraint_modes.end())
			throw std::runtime_error("Agc: no constraint list " +
						 new_constraint_mode_name);
		constraint_mode_ = &it->second;
		copy_string(new_constraint_mode_name, status_.constraint_mode,
			    sizeof(status_.constraint_mode));
	}
	RPI_LOG("exposure_mode "
		<< new_exposure_mode_name << " constraint_mode "
		<< new_constraint_mode_name << " metering_mode "
		<< new_metering_mode_name);
}

void Agc::fetchCurrentExposure(Metadata *image_metadata)
{
	std::unique_lock<Metadata> lock(*image_metadata);
	DeviceStatus *device_status =
		image_metadata->GetLocked<DeviceStatus>("device.status");
	if (!device_status)
		throw std::runtime_error("Agc: no device metadata");
	current_.shutter = device_status->shutter_speed;
	current_.analogue_gain = device_status->analogue_gain;
	AgcStatus *agc_status =
		image_metadata->GetLocked<AgcStatus>("agc.status");
	current_.total_exposure = agc_status ? agc_status->total_exposure_value : 0;
	current_.total_exposure_no_dg = current_.shutter * current_.analogue_gain;
}

static double compute_initial_Y(bcm2835_isp_stats *stats, Metadata *image_metadata,
				double weights[])
{
	bcm2835_isp_stats_region *regions = stats->agc_stats;
	struct AwbStatus awb;
	awb.gain_r = awb.gain_g = awb.gain_b = 1.0; // in case no metadata
	if (image_metadata->Get("awb.status", awb) != 0)
		RPI_WARN("Agc: no AWB status found");
	double Y_sum = 0, weight_sum = 0;
	for (int i = 0; i < AGC_STATS_SIZE; i++) {
		if (regions[i].counted == 0)
			continue;
		weight_sum += weights[i];
		double Y = regions[i].r_sum * awb.gain_r * .299 +
			   regions[i].g_sum * awb.gain_g * .587 +
			   regions[i].b_sum * awb.gain_b * .114;
		Y /= regions[i].counted;
		Y_sum += Y * weights[i];
	}
	return Y_sum / weight_sum / (1 << PIPELINE_BITS);
}

// We handle extra gain through EV by adjusting our Y targets. However, you
// simply can't monitor histograms once they get very close to (or beyond!)
// saturation, so we clamp the Y targets to this value. It does mean that EV
// increases don't necessarily do quite what you might expect in certain
// (contrived) cases.

#define EV_GAIN_Y_TARGET_LIMIT 0.9

static double constraint_compute_gain(AgcConstraint &c, Histogram &h,
				      double lux, double ev_gain,
				      double &target_Y)
{
	target_Y = c.Y_target.Eval(c.Y_target.Domain().Clip(lux));
	target_Y = std::min(EV_GAIN_Y_TARGET_LIMIT, target_Y * ev_gain);
	double iqm = h.InterQuantileMean(c.q_lo, c.q_hi);
	return (target_Y * NUM_HISTOGRAM_BINS) / iqm;
}

void Agc::computeGain(bcm2835_isp_stats *statistics, Metadata *image_metadata,
		      double &gain, double &target_Y)
{
	struct LuxStatus lux = {};
	lux.lux = 400; // default lux level to 400 in case no metadata found
	if (image_metadata->Get("lux.status", lux) != 0)
		RPI_WARN("Agc: no lux level found");
	Histogram h(statistics->hist[0].g_hist, NUM_HISTOGRAM_BINS);
	double ev_gain = status_.ev * config_.base_ev;
	// The initial gain and target_Y come from some of the regions. After
	// that we consider the histogram constraints.
	target_Y =
		config_.Y_target.Eval(config_.Y_target.Domain().Clip(lux.lux));
	target_Y = std::min(EV_GAIN_Y_TARGET_LIMIT, target_Y * ev_gain);
	double initial_Y = compute_initial_Y(statistics, image_metadata,
					     metering_mode_->weights);
	gain = std::min(10.0, target_Y / (initial_Y + .001));
	RPI_LOG("Initially Y " << initial_Y << " target " << target_Y
			       << " gives gain " << gain);
	for (auto &c : *constraint_mode_) {
		double new_target_Y;
		double new_gain =
			constraint_compute_gain(c, h, lux.lux, ev_gain,
						new_target_Y);
		RPI_LOG("Constraint has target_Y "
			<< new_target_Y << " giving gain " << new_gain);
		if (c.bound == AgcConstraint::Bound::LOWER &&
		    new_gain > gain) {
			RPI_LOG("Lower bound constraint adopted");
			gain = new_gain, target_Y = new_target_Y;
		} else if (c.bound == AgcConstraint::Bound::UPPER &&
			   new_gain < gain) {
			RPI_LOG("Upper bound constraint adopted");
			gain = new_gain, target_Y = new_target_Y;
		}
	}
	RPI_LOG("Final gain " << gain << " (target_Y " << target_Y << " ev "
			      << status_.ev << " base_ev " << config_.base_ev
			      << ")");
}

void Agc::computeTargetExposure(double gain)
{
	// The statistics reflect the image without digital gain, so the final
	// total exposure we're aiming for is:
	target_.total_exposure = current_.total_exposure_no_dg * gain;
	// The final target exposure is also limited to what the exposure
	// mode allows.
	double max_total_exposure =
		(status_.fixed_shutter != 0.0
			 ? status_.fixed_shutter
			 : exposure_mode_->shutter.back()) *
		(status_.fixed_analogue_gain != 0.0
			 ? status_.fixed_analogue_gain
			 : exposure_mode_->gain.back());
	target_.total_exposure = std::min(target_.total_exposure,
					  max_total_exposure);
	RPI_LOG("Target total_exposure " << target_.total_exposure);
}

bool Agc::applyDigitalGain(Metadata *image_metadata, double gain,
			   double target_Y)
{
	double dg = 1.0;
	// I think this pipeline subtracts black level and rescales before we
	// get the stats, so no need to worry about it.
	struct AwbStatus awb;
	if (image_metadata->Get("awb.status", awb) == 0) {
		double min_gain = std::min(awb.gain_r,
					   std::min(awb.gain_g, awb.gain_b));
		dg *= std::max(1.0, 1.0 / min_gain);
	} else
		RPI_WARN("Agc: no AWB status found");
	RPI_LOG("after AWB, target dg " << dg << " gain " << gain
					<< " target_Y " << target_Y);
	// Finally, if we're trying to reduce exposure but the target_Y is
	// "close" to 1.0, then the gain computed for that constraint will be
	// only slightly less than one, because the measured Y can never be
	// larger than 1.0. When this happens, demand a large digital gain so
	// that the exposure can be reduced, de-saturating the image much more
	// quickly (and we then approach the correct value more quickly from
	// below).
	bool desaturate = target_Y > config_.fast_reduce_threshold &&
			  gain < sqrt(target_Y);
	if (desaturate)
		dg /= config_.fast_reduce_threshold;
	RPI_LOG("Digital gain " << dg << " desaturate? " << desaturate);
	target_.total_exposure_no_dg = target_.total_exposure / dg;
	RPI_LOG("Target total_exposure_no_dg " << target_.total_exposure_no_dg);
	return desaturate;
}

void Agc::filterExposure(bool desaturate)
{
	double speed = frame_count_ <= config_.startup_frames ? 1.0 : config_.speed;
	if (filtered_.total_exposure == 0.0) {
		filtered_.total_exposure = target_.total_exposure;
		filtered_.total_exposure_no_dg = target_.total_exposure_no_dg;
	} else {
		// If close to the result go faster, to save making so many
		// micro-adjustments on the way. (Make this customisable?)
		if (filtered_.total_exposure < 1.2 * target_.total_exposure &&
		    filtered_.total_exposure > 0.8 * target_.total_exposure)
			speed = sqrt(speed);
		filtered_.total_exposure = speed * target_.total_exposure +
					   filtered_.total_exposure * (1.0 - speed);
		// When desaturing, take a big jump down in exposure_no_dg,
		// which we'll hide with digital gain.
		if (desaturate)
			filtered_.total_exposure_no_dg =
				target_.total_exposure_no_dg;
		else
			filtered_.total_exposure_no_dg =
				speed * target_.total_exposure_no_dg +
				filtered_.total_exposure_no_dg * (1.0 - speed);
	}
	// We can't let the no_dg exposure deviate too far below the
	// total exposure, as there might not be enough digital gain available
	// in the ISP to hide it (which will cause nasty oscillation).
	if (filtered_.total_exposure_no_dg <
	    filtered_.total_exposure * config_.fast_reduce_threshold)
		filtered_.total_exposure_no_dg = filtered_.total_exposure *
						 config_.fast_reduce_threshold;
	RPI_LOG("After filtering, total_exposure " << filtered_.total_exposure <<
		" no dg " << filtered_.total_exposure_no_dg);
}

void Agc::divvyupExposure()
{
	// Sending the fixed shutter/gain cases through the same code may seem
	// unnecessary, but it will make more sense when extend this to cover
	// variable aperture.
	double exposure_value = filtered_.total_exposure_no_dg;
	double shutter_time, analogue_gain;
	shutter_time = status_.fixed_shutter != 0.0
			       ? status_.fixed_shutter
			       : exposure_mode_->shutter[0];
	analogue_gain = status_.fixed_analogue_gain != 0.0
				? status_.fixed_analogue_gain
				: exposure_mode_->gain[0];
	if (shutter_time * analogue_gain < exposure_value) {
		for (unsigned int stage = 1;
		     stage < exposure_mode_->gain.size(); stage++) {
			if (status_.fixed_shutter == 0.0) {
				if (exposure_mode_->shutter[stage] *
					    analogue_gain >=
				    exposure_value) {
					shutter_time =
						exposure_value / analogue_gain;
					break;
				}
				shutter_time = exposure_mode_->shutter[stage];
			}
			if (status_.fixed_analogue_gain == 0.0) {
				if (exposure_mode_->gain[stage] *
					    shutter_time >=
				    exposure_value) {
					analogue_gain =
						exposure_value / shutter_time;
					break;
				}
				analogue_gain = exposure_mode_->gain[stage];
			}
		}
	}
	RPI_LOG("Divided up shutter and gain are " << shutter_time << " and "
						   << analogue_gain);
	// Finally adjust shutter time for flicker avoidance (require both
	// shutter and gain not to be fixed).
	if (status_.fixed_shutter == 0.0 &&
	    status_.fixed_analogue_gain == 0.0 &&
	    status_.flicker_period != 0.0) {
		int flicker_periods = shutter_time / status_.flicker_period;
		if (flicker_periods > 0) {
			double new_shutter_time = flicker_periods * status_.flicker_period;
			analogue_gain *= shutter_time / new_shutter_time;
			// We should still not allow the ag to go over the
			// largest value in the exposure mode. Note that this
			// may force more of the total exposure into the digital
			// gain as a side-effect.
			analogue_gain = std::min(analogue_gain,
						 exposure_mode_->gain.back());
			shutter_time = new_shutter_time;
		}
		RPI_LOG("After flicker avoidance, shutter "
			<< shutter_time << " gain " << analogue_gain);
	}
	filtered_.shutter = shutter_time;
	filtered_.analogue_gain = analogue_gain;
}

void Agc::writeAndFinish(Metadata *image_metadata, bool desaturate)
{
	status_.total_exposure_value = filtered_.total_exposure;
	status_.target_exposure_value = desaturate ? 0 : target_.total_exposure_no_dg;
	status_.shutter_time = filtered_.shutter;
	status_.analogue_gain = filtered_.analogue_gain;
	{
		std::unique_lock<std::mutex> lock(output_mutex_);
		output_status_ = status_;
	}
	// Write to metadata as well, in case anyone wants to update the camera
	// immediately.
	image_metadata->Set("agc.status", status_);
	RPI_LOG("Output written, total exposure requested is "
		<< filtered_.total_exposure);
	RPI_LOG("Camera exposure update: shutter time " << filtered_.shutter <<
		" analogue gain " << filtered_.analogue_gain);
}

// Register algorithm with the system.
static Algorithm *Create(Controller *controller)
{
	return (Algorithm *)new Agc(controller);
}
static RegisterAlgorithm reg(NAME, &Create);
FAULT = 0, /* ITU-R 601 -- SDTV */ V4L2_YCBCR_ENC_601 = 1, /* Rec. 709 -- HDTV */ V4L2_YCBCR_ENC_709 = 2, /* ITU-R 601/EN 61966-2-4 Extended Gamut -- SDTV */ V4L2_YCBCR_ENC_XV601 = 3, /* Rec. 709/EN 61966-2-4 Extended Gamut -- HDTV */ V4L2_YCBCR_ENC_XV709 = 4, /* * sYCC (Y'CbCr encoding of sRGB), identical to ENC_601. It was added * originally due to a misunderstanding of the sYCC standard. It should * not be used, instead use V4L2_YCBCR_ENC_601. */ V4L2_YCBCR_ENC_SYCC = 5, /* BT.2020 Non-constant Luminance Y'CbCr */ V4L2_YCBCR_ENC_BT2020 = 6, /* BT.2020 Constant Luminance Y'CbcCrc */ V4L2_YCBCR_ENC_BT2020_CONST_LUM = 7, /* SMPTE 240M -- Obsolete HDTV */ V4L2_YCBCR_ENC_SMPTE240M = 8, }; /* * enum v4l2_hsv_encoding values should not collide with the ones from * enum v4l2_ycbcr_encoding. */ enum v4l2_hsv_encoding { /* Hue mapped to 0 - 179 */ V4L2_HSV_ENC_180 = 128, /* Hue mapped to 0-255 */ V4L2_HSV_ENC_256 = 129, }; /* * Determine how YCBCR_ENC_DEFAULT should map to a proper Y'CbCr encoding. * This depends on the colorspace. */ #define V4L2_MAP_YCBCR_ENC_DEFAULT(colsp) \ (((colsp) == V4L2_COLORSPACE_REC709 || \ (colsp) == V4L2_COLORSPACE_DCI_P3) ? V4L2_YCBCR_ENC_709 : \ ((colsp) == V4L2_COLORSPACE_BT2020 ? V4L2_YCBCR_ENC_BT2020 : \ ((colsp) == V4L2_COLORSPACE_SMPTE240M ? V4L2_YCBCR_ENC_SMPTE240M : \ V4L2_YCBCR_ENC_601))) enum v4l2_quantization { /* * The default for R'G'B' quantization is always full range, except * for the BT2020 colorspace. For Y'CbCr the quantization is always * limited range, except for COLORSPACE_JPEG: this is full range. */ V4L2_QUANTIZATION_DEFAULT = 0, V4L2_QUANTIZATION_FULL_RANGE = 1, V4L2_QUANTIZATION_LIM_RANGE = 2, }; /* * Determine how QUANTIZATION_DEFAULT should map to a proper quantization. * This depends on whether the image is RGB or not, the colorspace and the * Y'CbCr encoding. */ #define V4L2_MAP_QUANTIZATION_DEFAULT(is_rgb_or_hsv, colsp, ycbcr_enc) \ (((is_rgb_or_hsv) && (colsp) == V4L2_COLORSPACE_BT2020) ? \ V4L2_QUANTIZATION_LIM_RANGE : \ (((is_rgb_or_hsv) || (colsp) == V4L2_COLORSPACE_JPEG) ? \ V4L2_QUANTIZATION_FULL_RANGE : V4L2_QUANTIZATION_LIM_RANGE)) /* * Deprecated names for opRGB colorspace (IEC 61966-2-5) * * WARNING: Please don't use these deprecated defines in your code, as * there is a chance we have to remove them in the future. */ #define V4L2_COLORSPACE_ADOBERGB V4L2_COLORSPACE_OPRGB #define V4L2_XFER_FUNC_ADOBERGB V4L2_XFER_FUNC_OPRGB enum v4l2_priority { V4L2_PRIORITY_UNSET = 0, /* not initialized */ V4L2_PRIORITY_BACKGROUND = 1, V4L2_PRIORITY_INTERACTIVE = 2, V4L2_PRIORITY_RECORD = 3, V4L2_PRIORITY_DEFAULT = V4L2_PRIORITY_INTERACTIVE, }; struct v4l2_rect { __s32 left; __s32 top; __u32 width; __u32 height; }; struct v4l2_fract { __u32 numerator; __u32 denominator; }; /** * struct v4l2_capability - Describes V4L2 device caps returned by VIDIOC_QUERYCAP * * @driver: name of the driver module (e.g. "bttv") * @card: name of the card (e.g. "Hauppauge WinTV") * @bus_info: name of the bus (e.g. "PCI:" + pci_name(pci_dev) ) * @version: KERNEL_VERSION * @capabilities: capabilities of the physical device as a whole * @device_caps: capabilities accessed via this particular device (node) * @reserved: reserved fields for future extensions */ struct v4l2_capability { __u8 driver[16]; __u8 card[32]; __u8 bus_info[32]; __u32 version; __u32 capabilities; __u32 device_caps; __u32 reserved[3]; }; /* Values for 'capabilities' field */ #define V4L2_CAP_VIDEO_CAPTURE 0x00000001 /* Is a video capture device */ #define V4L2_CAP_VIDEO_OUTPUT 0x00000002 /* Is a video output device */ #define V4L2_CAP_VIDEO_OVERLAY 0x00000004 /* Can do video overlay */ #define V4L2_CAP_VBI_CAPTURE 0x00000010 /* Is a raw VBI capture device */ #define V4L2_CAP_VBI_OUTPUT 0x00000020 /* Is a raw VBI output device */ #define V4L2_CAP_SLICED_VBI_CAPTURE 0x00000040 /* Is a sliced VBI capture device */ #define V4L2_CAP_SLICED_VBI_OUTPUT 0x00000080 /* Is a sliced VBI output device */ #define V4L2_CAP_RDS_CAPTURE 0x00000100 /* RDS data capture */ #define V4L2_CAP_VIDEO_OUTPUT_OVERLAY 0x00000200 /* Can do video output overlay */ #define V4L2_CAP_HW_FREQ_SEEK 0x00000400 /* Can do hardware frequency seek */ #define V4L2_CAP_RDS_OUTPUT 0x00000800 /* Is an RDS encoder */ /* Is a video capture device that supports multiplanar formats */ #define V4L2_CAP_VIDEO_CAPTURE_MPLANE 0x00001000 /* Is a video output device that supports multiplanar formats */ #define V4L2_CAP_VIDEO_OUTPUT_MPLANE 0x00002000 /* Is a video mem-to-mem device that supports multiplanar formats */ #define V4L2_CAP_VIDEO_M2M_MPLANE 0x00004000 /* Is a video mem-to-mem device */ #define V4L2_CAP_VIDEO_M2M 0x00008000 #define V4L2_CAP_TUNER 0x00010000 /* has a tuner */ #define V4L2_CAP_AUDIO 0x00020000 /* has audio support */ #define V4L2_CAP_RADIO 0x00040000 /* is a radio device */ #define V4L2_CAP_MODULATOR 0x00080000 /* has a modulator */ #define V4L2_CAP_SDR_CAPTURE 0x00100000 /* Is a SDR capture device */ #define V4L2_CAP_EXT_PIX_FORMAT 0x00200000 /* Supports the extended pixel format */ #define V4L2_CAP_SDR_OUTPUT 0x00400000 /* Is a SDR output device */ #define V4L2_CAP_META_CAPTURE 0x00800000 /* Is a metadata capture device */ #define V4L2_CAP_READWRITE 0x01000000 /* read/write systemcalls */ #define V4L2_CAP_ASYNCIO 0x02000000 /* async I/O */ #define V4L2_CAP_STREAMING 0x04000000 /* streaming I/O ioctls */ #define V4L2_CAP_META_OUTPUT 0x08000000 /* Is a metadata output device */ #define V4L2_CAP_TOUCH 0x10000000 /* Is a touch device */ #define V4L2_CAP_IO_MC 0x20000000 /* Is input/output controlled by the media controller */ #define V4L2_CAP_DEVICE_CAPS 0x80000000 /* sets device capabilities field */ /* * V I D E O I M A G E F O R M A T */ struct v4l2_pix_format { __u32 width; __u32 height; __u32 pixelformat; __u32 field; /* enum v4l2_field */ __u32 bytesperline; /* for padding, zero if unused */ __u32 sizeimage; __u32 colorspace; /* enum v4l2_colorspace */ __u32 priv; /* private data, depends on pixelformat */ __u32 flags; /* format flags (V4L2_PIX_FMT_FLAG_*) */ union { /* enum v4l2_ycbcr_encoding */ __u32 ycbcr_enc; /* enum v4l2_hsv_encoding */ __u32 hsv_enc; }; __u32 quantization; /* enum v4l2_quantization */ __u32 xfer_func; /* enum v4l2_xfer_func */ }; /* Pixel format FOURCC depth Description */ /* RGB formats */ #define V4L2_PIX_FMT_RGB332 v4l2_fourcc('R', 'G', 'B', '1') /* 8 RGB-3-3-2 */ #define V4L2_PIX_FMT_RGB444 v4l2_fourcc('R', '4', '4', '4') /* 16 xxxxrrrr ggggbbbb */ #define V4L2_PIX_FMT_ARGB444 v4l2_fourcc('A', 'R', '1', '2') /* 16 aaaarrrr ggggbbbb */ #define V4L2_PIX_FMT_XRGB444 v4l2_fourcc('X', 'R', '1', '2') /* 16 xxxxrrrr ggggbbbb */ #define V4L2_PIX_FMT_RGBA444 v4l2_fourcc('R', 'A', '1', '2') /* 16 rrrrgggg bbbbaaaa */ #define V4L2_PIX_FMT_RGBX444 v4l2_fourcc('R', 'X', '1', '2') /* 16 rrrrgggg bbbbxxxx */ #define V4L2_PIX_FMT_ABGR444 v4l2_fourcc('A', 'B', '1', '2') /* 16 aaaabbbb ggggrrrr */ #define V4L2_PIX_FMT_XBGR444 v4l2_fourcc('X', 'B', '1', '2') /* 16 xxxxbbbb ggggrrrr */ /* * Originally this had 'BA12' as fourcc, but this clashed with the older * V4L2_PIX_FMT_SGRBG12 which inexplicably used that same fourcc. * So use 'GA12' instead for V4L2_PIX_FMT_BGRA444. */ #define V4L2_PIX_FMT_BGRA444 v4l2_fourcc('G', 'A', '1', '2') /* 16 bbbbgggg rrrraaaa */ #define V4L2_PIX_FMT_BGRX444 v4l2_fourcc('B', 'X', '1', '2') /* 16 bbbbgggg rrrrxxxx */ #define V4L2_PIX_FMT_RGB555 v4l2_fourcc('R', 'G', 'B', 'O') /* 16 RGB-5-5-5 */ #define V4L2_PIX_FMT_ARGB555 v4l2_fourcc('A', 'R', '1', '5') /* 16 ARGB-1-5-5-5 */ #define V4L2_PIX_FMT_XRGB555 v4l2_fourcc('X', 'R', '1', '5') /* 16 XRGB-1-5-5-5 */ #define V4L2_PIX_FMT_RGBA555 v4l2_fourcc('R', 'A', '1', '5') /* 16 RGBA-5-5-5-1 */ #define V4L2_PIX_FMT_RGBX555 v4l2_fourcc('R', 'X', '1', '5') /* 16 RGBX-5-5-5-1 */ #define V4L2_PIX_FMT_ABGR555 v4l2_fourcc('A', 'B', '1', '5') /* 16 ABGR-1-5-5-5 */ #define V4L2_PIX_FMT_XBGR555 v4l2_fourcc('X', 'B', '1', '5') /* 16 XBGR-1-5-5-5 */ #define V4L2_PIX_FMT_BGRA555 v4l2_fourcc('B', 'A', '1', '5') /* 16 BGRA-5-5-5-1 */ #define V4L2_PIX_FMT_BGRX555 v4l2_fourcc('B', 'X', '1', '5') /* 16 BGRX-5-5-5-1 */ #define V4L2_PIX_FMT_RGB565 v4l2_fourcc('R', 'G', 'B', 'P') /* 16 RGB-5-6-5 */ #define V4L2_PIX_FMT_RGB555X v4l2_fourcc('R', 'G', 'B', 'Q') /* 16 RGB-5-5-5 BE */ #define V4L2_PIX_FMT_ARGB555X v4l2_fourcc_be('A', 'R', '1', '5') /* 16 ARGB-5-5-5 BE */ #define V4L2_PIX_FMT_XRGB555X v4l2_fourcc_be('X', 'R', '1', '5') /* 16 XRGB-5-5-5 BE */ #define V4L2_PIX_FMT_RGB565X v4l2_fourcc('R', 'G', 'B', 'R') /* 16 RGB-5-6-5 BE */ #define V4L2_PIX_FMT_BGR666 v4l2_fourcc('B', 'G', 'R', 'H') /* 18 BGR-6-6-6 */ #define V4L2_PIX_FMT_BGR24 v4l2_fourcc('B', 'G', 'R', '3') /* 24 BGR-8-8-8 */ #define V4L2_PIX_FMT_RGB24 v4l2_fourcc('R', 'G', 'B', '3') /* 24 RGB-8-8-8 */ #define V4L2_PIX_FMT_BGR32 v4l2_fourcc('B', 'G', 'R', '4') /* 32 BGR-8-8-8-8 */ #define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4') /* 32 BGRA-8-8-8-8 */ #define V4L2_PIX_FMT_XBGR32 v4l2_fourcc('X', 'R', '2', '4') /* 32 BGRX-8-8-8-8 */ #define V4L2_PIX_FMT_BGRA32 v4l2_fourcc('R', 'A', '2', '4') /* 32 ABGR-8-8-8-8 */ #define V4L2_PIX_FMT_BGRX32 v4l2_fourcc('R', 'X', '2', '4') /* 32 XBGR-8-8-8-8 */ #define V4L2_PIX_FMT_RGB32 v4l2_fourcc('R', 'G', 'B', '4') /* 32 RGB-8-8-8-8 */ #define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4') /* 32 RGBA-8-8-8-8 */ #define V4L2_PIX_FMT_RGBX32 v4l2_fourcc('X', 'B', '2', '4') /* 32 RGBX-8-8-8-8 */ #define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4') /* 32 ARGB-8-8-8-8 */ #define V4L2_PIX_FMT_XRGB32 v4l2_fourcc('B', 'X', '2', '4') /* 32 XRGB-8-8-8-8 */ /* Grey formats */ #define V4L2_PIX_FMT_GREY v4l2_fourcc('G', 'R', 'E', 'Y') /* 8 Greyscale */ #define V4L2_PIX_FMT_Y4 v4l2_fourcc('Y', '0', '4', ' ') /* 4 Greyscale */ #define V4L2_PIX_FMT_Y6 v4l2_fourcc('Y', '0', '6', ' ') /* 6 Greyscale */ #define V4L2_PIX_FMT_Y10 v4l2_fourcc('Y', '1', '0', ' ') /* 10 Greyscale */ #define V4L2_PIX_FMT_Y12 v4l2_fourcc('Y', '1', '2', ' ') /* 12 Greyscale */ #define V4L2_PIX_FMT_Y16 v4l2_fourcc('Y', '1', '6', ' ') /* 16 Greyscale */ #define V4L2_PIX_FMT_Y16_BE v4l2_fourcc_be('Y', '1', '6', ' ') /* 16 Greyscale BE */ /* Grey bit-packed formats */ #define V4L2_PIX_FMT_Y10BPACK v4l2_fourcc('Y', '1', '0', 'B') /* 10 Greyscale bit-packed */ #define V4L2_PIX_FMT_Y10P v4l2_fourcc('Y', '1', '0', 'P') /* 10 Greyscale, MIPI RAW10 packed */ /* Palette formats */ #define V4L2_PIX_FMT_PAL8 v4l2_fourcc('P', 'A', 'L', '8') /* 8 8-bit palette */ /* Chrominance formats */ #define V4L2_PIX_FMT_UV8 v4l2_fourcc('U', 'V', '8', ' ') /* 8 UV 4:4 */ /* Luminance+Chrominance formats */ #define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') /* 16 YUV 4:2:2 */ #define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') /* 16 YUV 4:2:2 */ #define V4L2_PIX_FMT_YVYU v4l2_fourcc('Y', 'V', 'Y', 'U') /* 16 YVU 4:2:2 */ #define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') /* 16 YUV 4:2:2 */ #define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') /* 16 YUV 4:2:2 */ #define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') /* 12 YUV 4:1:1 */ #define V4L2_PIX_FMT_YUV444 v4l2_fourcc('Y', '4', '4', '4') /* 16 xxxxyyyy uuuuvvvv */ #define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') /* 16 YUV-5-5-5 */ #define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') /* 16 YUV-5-6-5 */ #define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') /* 32 YUV-8-8-8-8 */ #define V4L2_PIX_FMT_AYUV32 v4l2_fourcc('A', 'Y', 'U', 'V') /* 32 AYUV-8-8-8-8 */ #define V4L2_PIX_FMT_XYUV32 v4l2_fourcc('X', 'Y', 'U', 'V') /* 32 XYUV-8-8-8-8 */ #define V4L2_PIX_FMT_VUYA32 v4l2_fourcc('V', 'U', 'Y', 'A') /* 32 VUYA-8-8-8-8 */ #define V4L2_PIX_FMT_VUYX32 v4l2_fourcc('V', 'U', 'Y', 'X') /* 32 VUYX-8-8-8-8 */ #define V4L2_PIX_FMT_HI240 v4l2_fourcc('H', 'I', '2', '4') /* 8 8-bit color */ #define V4L2_PIX_FMT_HM12 v4l2_fourcc('H', 'M', '1', '2') /* 8 YUV 4:2:0 16x16 macroblocks */ #define V4L2_PIX_FMT_M420 v4l2_fourcc('M', '4', '2', '0') /* 12 YUV 4:2:0 2 lines y, 1 line uv interleaved */ /* two planes -- one Y, one Cr + Cb interleaved */ #define V4L2_PIX_FMT_NV12 v4l2_fourcc('N', 'V', '1', '2') /* 12 Y/CbCr 4:2:0 */ #define V4L2_PIX_FMT_NV21 v4l2_fourcc('N', 'V', '2', '1') /* 12 Y/CrCb 4:2:0 */ #define V4L2_PIX_FMT_NV16 v4l2_fourcc('N', 'V', '1', '6') /* 16 Y/CbCr 4:2:2 */ #define V4L2_PIX_FMT_NV61 v4l2_fourcc('N', 'V', '6', '1') /* 16 Y/CrCb 4:2:2 */ #define V4L2_PIX_FMT_NV24 v4l2_fourcc('N', 'V', '2', '4') /* 24 Y/CbCr 4:4:4 */ #define V4L2_PIX_FMT_NV42 v4l2_fourcc('N', 'V', '4', '2') /* 24 Y/CrCb 4:4:4 */ /* two non contiguous planes - one Y, one Cr + Cb interleaved */ #define V4L2_PIX_FMT_NV12M v4l2_fourcc('N', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 */ #define V4L2_PIX_FMT_NV21M v4l2_fourcc('N', 'M', '2', '1') /* 21 Y/CrCb 4:2:0 */ #define V4L2_PIX_FMT_NV16M v4l2_fourcc('N', 'M', '1', '6') /* 16 Y/CbCr 4:2:2 */ #define V4L2_PIX_FMT_NV61M v4l2_fourcc('N', 'M', '6', '1') /* 16 Y/CrCb 4:2:2 */ #define V4L2_PIX_FMT_NV12MT v4l2_fourcc('T', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 64x32 macroblocks */ #define V4L2_PIX_FMT_NV12MT_16X16 v4l2_fourcc('V', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 16x16 macroblocks */ /* three planes - Y Cb, Cr */ #define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') /* 9 YUV 4:1:0 */ #define V4L2_PIX_FMT_YVU410 v4l2_fourcc('Y', 'V', 'U', '9') /* 9 YVU 4:1:0 */ #define V4L2_PIX_FMT_YUV411P v4l2_fourcc('4', '1', '1', 'P') /* 12 YVU411 planar */ #define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') /* 12 YUV 4:2:0 */ #define V4L2_PIX_FMT_YVU420 v4l2_fourcc('Y', 'V', '1', '2') /* 12 YVU 4:2:0 */ #define V4L2_PIX_FMT_YUV422P v4l2_fourcc('4', '2', '2', 'P') /* 16 YVU422 planar */ /* three non contiguous planes - Y, Cb, Cr */ #define V4L2_PIX_FMT_YUV420M v4l2_fourcc('Y', 'M', '1', '2') /* 12 YUV420 planar */ #define V4L2_PIX_FMT_YVU420M v4l2_fourcc('Y', 'M', '2', '1') /* 12 YVU420 planar */ #define V4L2_PIX_FMT_YUV422M v4l2_fourcc('Y', 'M', '1', '6') /* 16 YUV422 planar */ #define V4L2_PIX_FMT_YVU422M v4l2_fourcc('Y', 'M', '6', '1') /* 16 YVU422 planar */ #define V4L2_PIX_FMT_YUV444M v4l2_fourcc('Y', 'M', '2', '4') /* 24 YUV444 planar */ #define V4L2_PIX_FMT_YVU444M v4l2_fourcc('Y', 'M', '4', '2') /* 24 YVU444 planar */ /* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */ #define V4L2_PIX_FMT_SBGGR8 v4l2_fourcc('B', 'A', '8', '1') /* 8 BGBG.. GRGR.. */ #define V4L2_PIX_FMT_SGBRG8 v4l2_fourcc('G', 'B', 'R', 'G') /* 8 GBGB.. RGRG.. */ #define V4L2_PIX_FMT_SGRBG8 v4l2_fourcc('G', 'R', 'B', 'G') /* 8 GRGR.. BGBG.. */ #define V4L2_PIX_FMT_SRGGB8 v4l2_fourcc('R', 'G', 'G', 'B') /* 8 RGRG.. GBGB.. */ #define V4L2_PIX_FMT_SBGGR10 v4l2_fourcc('B', 'G', '1', '0') /* 10 BGBG.. GRGR.. */ #define V4L2_PIX_FMT_SGBRG10 v4l2_fourcc('G', 'B', '1', '0') /* 10 GBGB.. RGRG.. */ #define V4L2_PIX_FMT_SGRBG10 v4l2_fourcc('B', 'A', '1', '0') /* 10 GRGR.. BGBG.. */ #define V4L2_PIX_FMT_SRGGB10 v4l2_fourcc('R', 'G', '1', '0') /* 10 RGRG.. GBGB.. */ /* 10bit raw bayer packed, 5 bytes for every 4 pixels */ #define V4L2_PIX_FMT_SBGGR10P v4l2_fourcc('p', 'B', 'A', 'A') #define V4L2_PIX_FMT_SGBRG10P v4l2_fourcc('p', 'G', 'A', 'A') #define V4L2_PIX_FMT_SGRBG10P v4l2_fourcc('p', 'g', 'A', 'A') #define V4L2_PIX_FMT_SRGGB10P v4l2_fourcc('p', 'R', 'A', 'A') /* 10bit raw bayer a-law compressed to 8 bits */ #define V4L2_PIX_FMT_SBGGR10ALAW8 v4l2_fourcc('a', 'B', 'A', '8') #define V4L2_PIX_FMT_SGBRG10ALAW8 v4l2_fourcc('a', 'G', 'A', '8') #define V4L2_PIX_FMT_SGRBG10ALAW8 v4l2_fourcc('a', 'g', 'A', '8') #define V4L2_PIX_FMT_SRGGB10ALAW8 v4l2_fourcc('a', 'R', 'A', '8') /* 10bit raw bayer DPCM compressed to 8 bits */ #define V4L2_PIX_FMT_SBGGR10DPCM8 v4l2_fourcc('b', 'B', 'A', '8') #define V4L2_PIX_FMT_SGBRG10DPCM8 v4l2_fourcc('b', 'G', 'A', '8') #define V4L2_PIX_FMT_SGRBG10DPCM8 v4l2_fourcc('B', 'D', '1', '0') #define V4L2_PIX_FMT_SRGGB10DPCM8 v4l2_fourcc('b', 'R', 'A', '8') #define V4L2_PIX_FMT_SBGGR12 v4l2_fourcc('B', 'G', '1', '2') /* 12 BGBG.. GRGR.. */ #define V4L2_PIX_FMT_SGBRG12 v4l2_fourcc('G', 'B', '1', '2') /* 12 GBGB.. RGRG.. */ #define V4L2_PIX_FMT_SGRBG12 v4l2_fourcc('B', 'A', '1', '2') /* 12 GRGR.. BGBG.. */ #define V4L2_PIX_FMT_SRGGB12 v4l2_fourcc('R', 'G', '1', '2') /* 12 RGRG.. GBGB.. */ /* 12bit raw bayer packed, 6 bytes for every 4 pixels */ #define V4L2_PIX_FMT_SBGGR12P v4l2_fourcc('p', 'B', 'C', 'C') #define V4L2_PIX_FMT_SGBRG12P v4l2_fourcc('p', 'G', 'C', 'C') #define V4L2_PIX_FMT_SGRBG12P v4l2_fourcc('p', 'g', 'C', 'C') #define V4L2_PIX_FMT_SRGGB12P v4l2_fourcc('p', 'R', 'C', 'C') /* 14bit raw bayer packed, 7 bytes for every 4 pixels */ #define V4L2_PIX_FMT_SBGGR14P v4l2_fourcc('p', 'B', 'E', 'E') #define V4L2_PIX_FMT_SGBRG14P v4l2_fourcc('p', 'G', 'E', 'E') #define V4L2_PIX_FMT_SGRBG14P v4l2_fourcc('p', 'g', 'E', 'E') #define V4L2_PIX_FMT_SRGGB14P v4l2_fourcc('p', 'R', 'E', 'E') #define V4L2_PIX_FMT_SBGGR16 v4l2_fourcc('B', 'Y', 'R', '2') /* 16 BGBG.. GRGR.. */ #define V4L2_PIX_FMT_SGBRG16 v4l2_fourcc('G', 'B', '1', '6') /* 16 GBGB.. RGRG.. */ #define V4L2_PIX_FMT_SGRBG16 v4l2_fourcc('G', 'R', '1', '6') /* 16 GRGR.. BGBG.. */ #define V4L2_PIX_FMT_SRGGB16 v4l2_fourcc('R', 'G', '1', '6') /* 16 RGRG.. GBGB.. */ /* HSV formats */ #define V4L2_PIX_FMT_HSV24 v4l2_fourcc('H', 'S', 'V', '3') #define V4L2_PIX_FMT_HSV32 v4l2_fourcc('H', 'S', 'V', '4') /* compressed formats */ #define V4L2_PIX_FMT_MJPEG v4l2_fourcc('M', 'J', 'P', 'G') /* Motion-JPEG */ #define V4L2_PIX_FMT_JPEG v4l2_fourcc('J', 'P', 'E', 'G') /* JFIF JPEG */ #define V4L2_PIX_FMT_DV v4l2_fourcc('d', 'v', 's', 'd') /* 1394 */ #define V4L2_PIX_FMT_MPEG v4l2_fourcc('M', 'P', 'E', 'G') /* MPEG-1/2/4 Multiplexed */ #define V4L2_PIX_FMT_H264 v4l2_fourcc('H', '2', '6', '4') /* H264 with start codes */ #define V4L2_PIX_FMT_H264_NO_SC v4l2_fourcc('A', 'V', 'C', '1') /* H264 without start codes */ #define V4L2_PIX_FMT_H264_MVC v4l2_fourcc('M', '2', '6', '4') /* H264 MVC */ #define V4L2_PIX_FMT_H263 v4l2_fourcc('H', '2', '6', '3') /* H263 */ #define V4L2_PIX_FMT_MPEG1 v4l2_fourcc('M', 'P', 'G', '1') /* MPEG-1 ES */ #define V4L2_PIX_FMT_MPEG2 v4l2_fourcc('M', 'P', 'G', '2') /* MPEG-2 ES */ #define V4L2_PIX_FMT_MPEG2_SLICE v4l2_fourcc('M', 'G', '2', 'S') /* MPEG-2 parsed slice data */ #define V4L2_PIX_FMT_MPEG4 v4l2_fourcc('M', 'P', 'G', '4') /* MPEG-4 part 2 ES */ #define V4L2_PIX_FMT_XVID v4l2_fourcc('X', 'V', 'I', 'D') /* Xvid */ #define V4L2_PIX_FMT_VC1_ANNEX_G v4l2_fourcc('V', 'C', '1', 'G') /* SMPTE 421M Annex G compliant stream */ #define V4L2_PIX_FMT_VC1_ANNEX_L v4l2_fourcc('V', 'C', '1', 'L') /* SMPTE 421M Annex L compliant stream */ #define V4L2_PIX_FMT_VP8 v4l2_fourcc('V', 'P', '8', '0') /* VP8 */ #define V4L2_PIX_FMT_VP9 v4l2_fourcc('V', 'P', '9', '0') /* VP9 */ #define V4L2_PIX_FMT_HEVC v4l2_fourcc('H', 'E', 'V', 'C') /* HEVC aka H.265 */ #define V4L2_PIX_FMT_FWHT v4l2_fourcc('F', 'W', 'H', 'T') /* Fast Walsh Hadamard Transform (vicodec) */ #define V4L2_PIX_FMT_FWHT_STATELESS v4l2_fourcc('S', 'F', 'W', 'H') /* Stateless FWHT (vicodec) */ /* Vendor-specific formats */ #define V4L2_PIX_FMT_CPIA1 v4l2_fourcc('C', 'P', 'I', 'A') /* cpia1 YUV */ #define V4L2_PIX_FMT_WNVA v4l2_fourcc('W', 'N', 'V', 'A') /* Winnov hw compress */ #define V4L2_PIX_FMT_SN9C10X v4l2_fourcc('S', '9', '1', '0') /* SN9C10x compression */ #define V4L2_PIX_FMT_SN9C20X_I420 v4l2_fourcc('S', '9', '2', '0') /* SN9C20x YUV 4:2:0 */ #define V4L2_PIX_FMT_PWC1 v4l2_fourcc('P', 'W', 'C', '1') /* pwc older webcam */ #define V4L2_PIX_FMT_PWC2 v4l2_fourcc('P', 'W', 'C', '2') /* pwc newer webcam */ #define V4L2_PIX_FMT_ET61X251 v4l2_fourcc('E', '6', '2', '5') /* ET61X251 compression */ #define V4L2_PIX_FMT_SPCA501 v4l2_fourcc('S', '5', '0', '1') /* YUYV per line */ #define V4L2_PIX_FMT_SPCA505 v4l2_fourcc('S', '5', '0', '5') /* YYUV per line */ #define V4L2_PIX_FMT_SPCA508 v4l2_fourcc('S', '5', '0', '8') /* YUVY per line */ #define V4L2_PIX_FMT_SPCA561 v4l2_fourcc('S', '5', '6', '1') /* compressed GBRG bayer */ #define V4L2_PIX_FMT_PAC207 v4l2_fourcc('P', '2', '0', '7') /* compressed BGGR bayer */ #define V4L2_PIX_FMT_MR97310A v4l2_fourcc('M', '3', '1', '0') /* compressed BGGR bayer */ #define V4L2_PIX_FMT_JL2005BCD v4l2_fourcc('J', 'L', '2', '0') /* compressed RGGB bayer */ #define V4L2_PIX_FMT_SN9C2028 v4l2_fourcc('S', 'O', 'N', 'X') /* compressed GBRG bayer */ #define V4L2_PIX_FMT_SQ905C v4l2_fourcc('9', '0', '5', 'C') /* compressed RGGB bayer */ #define V4L2_PIX_FMT_PJPG v4l2_fourcc('P', 'J', 'P', 'G') /* Pixart 73xx JPEG */ #define V4L2_PIX_FMT_OV511 v4l2_fourcc('O', '5', '1', '1') /* ov511 JPEG */ #define V4L2_PIX_FMT_OV518 v4l2_fourcc('O', '5', '1', '8') /* ov518 JPEG */ #define V4L2_PIX_FMT_STV0680 v4l2_fourcc('S', '6', '8', '0') /* stv0680 bayer */ #define V4L2_PIX_FMT_TM6000 v4l2_fourcc('T', 'M', '6', '0') /* tm5600/tm60x0 */ #define V4L2_PIX_FMT_CIT_YYVYUY v4l2_fourcc('C', 'I', 'T', 'V') /* one line of Y then 1 line of VYUY */ #define V4L2_PIX_FMT_KONICA420 v4l2_fourcc('K', 'O', 'N', 'I') /* YUV420 planar in blocks of 256 pixels */ #define V4L2_PIX_FMT_JPGL v4l2_fourcc('J', 'P', 'G', 'L') /* JPEG-Lite */ #define V4L2_PIX_FMT_SE401 v4l2_fourcc('S', '4', '0', '1') /* se401 janggu compressed rgb */ #define V4L2_PIX_FMT_S5C_UYVY_JPG v4l2_fourcc('S', '5', 'C', 'I') /* S5C73M3 interleaved UYVY/JPEG */ #define V4L2_PIX_FMT_Y8I v4l2_fourcc('Y', '8', 'I', ' ') /* Greyscale 8-bit L/R interleaved */ #define V4L2_PIX_FMT_Y12I v4l2_fourcc('Y', '1', '2', 'I') /* Greyscale 12-bit L/R interleaved */ #define V4L2_PIX_FMT_Z16 v4l2_fourcc('Z', '1', '6', ' ') /* Depth data 16-bit */ #define V4L2_PIX_FMT_MT21C v4l2_fourcc('M', 'T', '2', '1') /* Mediatek compressed block mode */ #define V4L2_PIX_FMT_INZI v4l2_fourcc('I', 'N', 'Z', 'I') /* Intel Planar Greyscale 10-bit and Depth 16-bit */ #define V4L2_PIX_FMT_SUNXI_TILED_NV12 v4l2_fourcc('S', 'T', '1', '2') /* Sunxi Tiled NV12 Format */ #define V4L2_PIX_FMT_CNF4 v4l2_fourcc('C', 'N', 'F', '4') /* Intel 4-bit packed depth confidence information */ /* 10bit raw bayer packed, 32 bytes for every 25 pixels, last LSB 6 bits unused */ #define V4L2_PIX_FMT_IPU3_SBGGR10 v4l2_fourcc('i', 'p', '3', 'b') /* IPU3 packed 10-bit BGGR bayer */ #define V4L2_PIX_FMT_IPU3_SGBRG10 v4l2_fourcc('i', 'p', '3', 'g') /* IPU3 packed 10-bit GBRG bayer */ #define V4L2_PIX_FMT_IPU3_SGRBG10 v4l2_fourcc('i', 'p', '3', 'G') /* IPU3 packed 10-bit GRBG bayer */ #define V4L2_PIX_FMT_IPU3_SRGGB10 v4l2_fourcc('i', 'p', '3', 'r') /* IPU3 packed 10-bit RGGB bayer */ /* SDR formats - used only for Software Defined Radio devices */ #define V4L2_SDR_FMT_CU8 v4l2_fourcc('C', 'U', '0', '8') /* IQ u8 */ #define V4L2_SDR_FMT_CU16LE v4l2_fourcc('C', 'U', '1', '6') /* IQ u16le */ #define V4L2_SDR_FMT_CS8 v4l2_fourcc('C', 'S', '0', '8') /* complex s8 */ #define V4L2_SDR_FMT_CS14LE v4l2_fourcc('C', 'S', '1', '4') /* complex s14le */ #define V4L2_SDR_FMT_RU12LE v4l2_fourcc('R', 'U', '1', '2') /* real u12le */ #define V4L2_SDR_FMT_PCU16BE v4l2_fourcc('P', 'C', '1', '6') /* planar complex u16be */ #define V4L2_SDR_FMT_PCU18BE v4l2_fourcc('P', 'C', '1', '8') /* planar complex u18be */ #define V4L2_SDR_FMT_PCU20BE v4l2_fourcc('P', 'C', '2', '0') /* planar complex u20be */ /* Touch formats - used for Touch devices */ #define V4L2_TCH_FMT_DELTA_TD16 v4l2_fourcc('T', 'D', '1', '6') /* 16-bit signed deltas */ #define V4L2_TCH_FMT_DELTA_TD08 v4l2_fourcc('T', 'D', '0', '8') /* 8-bit signed deltas */ #define V4L2_TCH_FMT_TU16 v4l2_fourcc('T', 'U', '1', '6') /* 16-bit unsigned touch data */ #define V4L2_TCH_FMT_TU08 v4l2_fourcc('T', 'U', '0', '8') /* 8-bit unsigned touch data */ /* Meta-data formats */ #define V4L2_META_FMT_VSP1_HGO v4l2_fourcc('V', 'S', 'P', 'H') /* R-Car VSP1 1-D Histogram */ #define V4L2_META_FMT_VSP1_HGT v4l2_fourcc('V', 'S', 'P', 'T') /* R-Car VSP1 2-D Histogram */