summaryrefslogtreecommitdiff
path: root/test/camera/buffer_import.cpp
blob: 4b626dffa57525792cbca153725728d32037e467 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
/* SPDX-License-Identifier: GPL-2.0-or-later */
/*
 * Copyright (C) 2019, Google Inc.
 *
 * libcamera Camera API tests
 *
 * Test importing buffers exported from the VIVID output device into a Camera
 */

#include <algorithm>
#include <iostream>
#include <numeric>
#include <vector>

#include <libcamera/event_dispatcher.h>
#include <libcamera/timer.h>

#include "libcamera/internal/device_enumerator.h"
#include "libcamera/internal/media_device.h"
#include "libcamera/internal/v4l2_videodevice.h"

#include "buffer_source.h"
#include "camera_test.h"
#include "test.h"

using namespace libcamera;

namespace {

class BufferImportTest : public CameraTest, public Test
{
public:
	BufferImportTest()
		: CameraTest("platform/vimc.0 Sensor B")
	{
	}

protected:
	void bufferComplete([[maybe_unused]] Request *request,
			    FrameBuffer *buffer)
	{
		if (buffer->metadata().status != FrameMetadata::FrameSuccess)
			return;

		completeBuffersCount_++;
	}

	void requestComplete(Request *request)
	{
		if (request->status() != Request::RequestComplete)
			return;

		const Request::BufferMap &buffers = request->buffers();

		completeRequestsCount_++;

		/* Create a new request. */
		const Stream *stream = buffers.begin()->first;
		FrameBuffer *buffer = buffers.begin()->second;

		request->reuse();
		request->addBuffer(stream, buffer);
		camera_->queueRequest(request);
	}

	int init() override
	{
		if (status_ != TestPass)
			return status_;

		config_ = camera_->generateConfiguration({ StreamRole::VideoRecording });
		if (!config_ || config_->size() != 1) {
			std::cout << "Failed to generate default configuration" << std::endl;
			return TestFail;
		}

		return TestPass;
	}

	int run() override
	{
		StreamConfiguration &cfg = config_->at(0);

		if (camera_->acquire()) {
			std::cout << "Failed to acquire the camera" << std::endl;
			return TestFail;
		}

		if (camera_->configure(config_.get())) {
			std::cout << "Failed to set default configuration" << std::endl;
			return TestFail;
		}

		Stream *stream = cfg.stream();

		BufferSource source;
		int ret = source.allocate(cfg);
		if (ret != TestPass)
			return ret;

		for (const std::unique_ptr<FrameBuffer> &buffer : source.buffers()) {
			std::unique_ptr<Request> request = camera_->createRequest();
			if (!request) {
				std::cout << "Failed to create request" << std::endl;
				return TestFail;
			}

			if (request->addBuffer(stream, buffer.get())) {
				std::cout << "Failed to associating buffer with request" << std::endl;
				return TestFail;
			}

			requests_.push_back(std::move(request));
		}

		completeRequestsCount_ = 0;
		completeBuffersCount_ = 0;

		camera_->bufferCompleted.connect(this, &BufferImportTest::bufferComplete);
		camera_->requestCompleted.connect(this, &BufferImportTest::requestComplete);

		if (camera_->start()) {
			std::cout << "Failed to start camera" << std::endl;
			return TestFail;
		}

		for (std::unique_ptr<Request> &request : requests_) {
			if (camera_->queueRequest(request.get())) {
				std::cout << "Failed to queue request" << std::endl;
				return TestFail;
			}
		}

		EventDispatcher *dispatcher = cm_->eventDispatcher();

		Timer timer;
		timer.start(1000);
		while (timer.isRunning())
			dispatcher->processEvents();

		if (completeRequestsCount_ <= cfg.bufferCount * 2) {
			std::cout << "Failed to capture enough frames (got "
				  << completeRequestsCount_ << " expected at least "
				  << cfg.bufferCount * 2 << ")" << std::endl;
			return TestFail;
		}

		if (completeRequestsCount_ != completeBuffersCount_) {
			std::cout << "Number of completed buffers and requests differ" << std::endl;
			return TestFail;
		}

		if (camera_->stop()) {
			std::cout << "Failed to stop camera" << std::endl;
			return TestFail;
		}

		return TestPass;
	}

private:
	std::vector<std::unique_ptr<Request>> requests_;

	unsigned int completeBuffersCount_;
	unsigned int completeRequestsCount_;
	std::unique_ptr<CameraConfiguration> config_;
};

} /* namespace */

TEST_REGISTER(BufferImportTest)
d='n683' href='#n683'>683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765
/* SPDX-License-Identifier: BSD-2-Clause */
/*
 * Copyright (C) 2019, Raspberry Pi (Trading) Limited
 *
 * agc.cpp - AGC/AEC control algorithm
 */

#include <map>

#include "linux/bcm2835-isp.h"

#include "libcamera/internal/log.h"

#include "../awb_status.h"
#include "../device_status.h"
#include "../histogram.hpp"
#include "../lux_status.h"
#include "../metadata.hpp"

#include "agc.hpp"

using namespace RPiController;
using namespace libcamera;

LOG_DEFINE_CATEGORY(RPiAgc)

#define NAME "rpi.agc"

#define PIPELINE_BITS 13 // seems to be a 13-bit pipeline

void AgcMeteringMode::Read(boost::property_tree::ptree const &params)
{
	int num = 0;
	for (auto &p : params.get_child("weights")) {
		if (num == AGC_STATS_SIZE)
			throw std::runtime_error("AgcConfig: too many weights");
		weights[num++] = p.second.get_value<double>();
	}
	if (num != AGC_STATS_SIZE)
		throw std::runtime_error("AgcConfig: insufficient weights");
}

static std::string
read_metering_modes(std::map<std::string, AgcMeteringMode> &metering_modes,
		    boost::property_tree::ptree const &params)
{
	std::string first;
	for (auto &p : params) {
		AgcMeteringMode metering_mode;
		metering_mode.Read(p.second);
		metering_modes[p.first] = std::move(metering_mode);
		if (first.empty())
			first = p.first;
	}
	return first;
}

static int read_double_list(std::vector<double> &list,
			    boost::property_tree::ptree const &params)
{
	for (auto &p : params)
		list.push_back(p.second.get_value<double>());
	return list.size();
}

void AgcExposureMode::Read(boost::property_tree::ptree const &params)
{
	int num_shutters =
		read_double_list(shutter, params.get_child("shutter"));
	int num_ags = read_double_list(gain, params.get_child("gain"));
	if (num_shutters < 2 || num_ags < 2)
		throw std::runtime_error(
			"AgcConfig: must have at least two entries in exposure profile");
	if (num_shutters != num_ags)
		throw std::runtime_error(
			"AgcConfig: expect same number of exposure and gain entries in exposure profile");
}

static std::string
read_exposure_modes(std::map<std::string, AgcExposureMode> &exposure_modes,
		    boost::property_tree::ptree const &params)
{
	std::string first;
	for (auto &p : params) {
		AgcExposureMode exposure_mode;
		exposure_mode.Read(p.second);
		exposure_modes[p.first] = std::move(exposure_mode);
		if (first.empty())
			first = p.first;
	}
	return first;
}

void AgcConstraint::Read(boost::property_tree::ptree const &params)
{
	std::string bound_string = params.get<std::string>("bound", "");
	transform(bound_string.begin(), bound_string.end(),
		  bound_string.begin(), ::toupper);
	if (bound_string != "UPPER" && bound_string != "LOWER")
		throw std::runtime_error(
			"AGC constraint type should be UPPER or LOWER");
	bound = bound_string == "UPPER" ? Bound::UPPER : Bound::LOWER;
	q_lo = params.get<double>("q_lo");
	q_hi = params.get<double>("q_hi");
	Y_target.Read(params.get_child("y_target"));
}

static AgcConstraintMode
read_constraint_mode(boost::property_tree::ptree const &params)
{
	AgcConstraintMode mode;
	for (auto &p : params) {
		AgcConstraint constraint;
		constraint.Read(p.second);
		mode.push_back(std::move(constraint));
	}
	return mode;
}

static std::string read_constraint_modes(
	std::map<std::string, AgcConstraintMode> &constraint_modes,
	boost::property_tree::ptree const &params)
{
	std::string first;
	for (auto &p : params) {
		constraint_modes[p.first] = read_constraint_mode(p.second);
		if (first.empty())
			first = p.first;
	}
	return first;
}

void AgcConfig::Read(boost::property_tree::ptree const &params)
{
	LOG(RPiAgc, Debug) << "AgcConfig";
	default_metering_mode = read_metering_modes(
		metering_modes, params.get_child("metering_modes"));
	default_exposure_mode = read_exposure_modes(
		exposure_modes, params.get_child("exposure_modes"));
	default_constraint_mode = read_constraint_modes(
		constraint_modes, params.get_child("constraint_modes"));
	Y_target.Read(params.get_child("y_target"));
	speed = params.get<double>("speed", 0.2);
	startup_frames = params.get<uint16_t>("startup_frames", 10);
	convergence_frames = params.get<unsigned int>("convergence_frames", 6);
	fast_reduce_threshold =
		params.get<double>("fast_reduce_threshold", 0.4);
	base_ev = params.get<double>("base_ev", 1.0);
	// Start with quite a low value as ramping up is easier than ramping down.
	default_exposure_time = params.get<double>("default_exposure_time", 1000);
	default_analogue_gain = params.get<double>("default_analogue_gain", 1.0);
}

Agc::Agc(Controller *controller)
	: AgcAlgorithm(controller), metering_mode_(nullptr),
	  exposure_mode_(nullptr), constraint_mode_(nullptr),
	  frame_count_(0), lock_count_(0),
	  last_target_exposure_(0.0),
	  ev_(1.0), flicker_period_(0.0),
	  max_shutter_(0), fixed_shutter_(0), fixed_analogue_gain_(0.0)
{
	memset(&awb_, 0, sizeof(awb_));
	// Setting status_.total_exposure_value_ to zero initially tells us
	// it's not been calculated yet (i.e. Process hasn't yet run).
	memset(&status_, 0, sizeof(status_));
	status_.ev = ev_;
	memset(&last_device_status_, 0, sizeof(last_device_status_));
}

char const *Agc::Name() const
{
	return NAME;
}

void Agc::Read(boost::property_tree::ptree const &params)
{
	LOG(RPiAgc, Debug) << "Agc";
	config_.Read(params);
	// Set the config's defaults (which are the first ones it read) as our
	// current modes, until someone changes them.  (they're all known to
	// exist at this point)
	metering_mode_name_ = config_.default_metering_mode;
	metering_mode_ = &config_.metering_modes[metering_mode_name_];
	exposure_mode_name_ = config_.default_exposure_mode;
	exposure_mode_ = &config_.exposure_modes[exposure_mode_name_];
	constraint_mode_name_ = config_.default_constraint_mode;
	constraint_mode_ = &config_.constraint_modes[constraint_mode_name_];
	// Set up the "last shutter/gain" values, in case AGC starts "disabled".
	status_.shutter_time = config_.default_exposure_time;
	status_.analogue_gain = config_.default_analogue_gain;
}

bool Agc::IsPaused() const
{
	return false;
}

void Agc::Pause()
{
	fixed_shutter_ = status_.shutter_time;
	fixed_analogue_gain_ = status_.analogue_gain;
}

void Agc::Resume()
{
	fixed_shutter_ = 0;
	fixed_analogue_gain_ = 0;
}

unsigned int Agc::GetConvergenceFrames() const
{
	// If shutter and gain have been explicitly set, there is no
	// convergence to happen, so no need to drop any frames - return zero.
	if (fixed_shutter_ && fixed_analogue_gain_)
		return 0;
	else
		return config_.convergence_frames;
}

void Agc::SetEv(double ev)
{
	ev_ = ev;
}

void Agc::SetFlickerPeriod(double flicker_period)
{
	flicker_period_ = flicker_period;
}

void Agc::SetMaxShutter(double max_shutter)
{
	max_shutter_ = max_shutter;
}

void Agc::SetFixedShutter(double fixed_shutter)
{
	fixed_shutter_ = fixed_shutter;
	// Set this in case someone calls Pause() straight after.
	status_.shutter_time = clipShutter(fixed_shutter_);
}

void Agc::SetFixedAnalogueGain(double fixed_analogue_gain)
{
	fixed_analogue_gain_ = fixed_analogue_gain;
	// Set this in case someone calls Pause() straight after.
	status_.analogue_gain = fixed_analogue_gain;
}

void Agc::SetMeteringMode(std::string const &metering_mode_name)
{
	metering_mode_name_ = metering_mode_name;
}

void Agc::SetExposureMode(std::string const &exposure_mode_name)
{
	exposure_mode_name_ = exposure_mode_name;
}

void Agc::SetConstraintMode(std::string const &constraint_mode_name)
{
	constraint_mode_name_ = constraint_mode_name;
}

void Agc::SwitchMode([[maybe_unused]] CameraMode const &camera_mode,
		     Metadata *metadata)
{
	housekeepConfig();

	double fixed_shutter = clipShutter(fixed_shutter_);
	if (fixed_shutter != 0.0 && fixed_analogue_gain_ != 0.0) {
		// We're going to reset the algorithm here with these fixed values.

		fetchAwbStatus(metadata);
		double min_colour_gain = std::min({ awb_.gain_r, awb_.gain_g, awb_.gain_b, 1.0 });
		ASSERT(min_colour_gain != 0.0);

		// This is the equivalent of computeTargetExposure and applyDigitalGain.
		target_.total_exposure_no_dg = fixed_shutter * fixed_analogue_gain_;
		target_.total_exposure = target_.total_exposure_no_dg / min_colour_gain;

		// Equivalent of filterExposure. This resets any "history".
		filtered_ = target_;

		// Equivalent of divideUpExposure.
		filtered_.shutter = fixed_shutter;
		filtered_.analogue_gain = fixed_analogue_gain_;
	} else if (status_.total_exposure_value) {
		// On a mode switch, it's possible the exposure profile could change,
		// or a fixed exposure/gain might be set so we divide up the exposure/
		// gain again, but we don't change any target values.
		divideUpExposure();
	} else {
		// We come through here on startup, when at least one of the shutter
		// or gain has not been fixed. We must still write those values out so
		// that they will be applied immediately. We supply some arbitrary defaults
		// for any that weren't set.

		// Equivalent of divideUpExposure.
		filtered_.shutter = fixed_shutter ? fixed_shutter : config_.default_exposure_time;
		filtered_.analogue_gain = fixed_analogue_gain_ ? fixed_analogue_gain_ : config_.default_analogue_gain;
	}

	writeAndFinish(metadata, false);
}

void Agc::Prepare(Metadata *image_metadata)
{
	status_.digital_gain = 1.0;
	fetchAwbStatus(image_metadata); // always fetch it so that Process knows it's been done

	if (status_.total_exposure_value) {
		// Process has run, so we have meaningful values.
		DeviceStatus device_status;
		if (image_metadata->Get("device.status", device_status) == 0) {
			double actual_exposure = device_status.shutter_speed *
						 device_status.analogue_gain;
			if (actual_exposure) {
				status_.digital_gain =
					status_.total_exposure_value /
					actual_exposure;
				LOG(RPiAgc, Debug) << "Want total exposure " << status_.total_exposure_value;
				// Never ask for a gain < 1.0, and also impose
				// some upper limit. Make it customisable?
				status_.digital_gain = std::max(
					1.0,
					std::min(status_.digital_gain, 4.0));
				LOG(RPiAgc, Debug) << "Actual exposure " << actual_exposure;
				LOG(RPiAgc, Debug) << "Use digital_gain " << status_.digital_gain;
				LOG(RPiAgc, Debug) << "Effective exposure " << actual_exposure * status_.digital_gain;
				// Decide whether AEC/AGC has converged.
				updateLockStatus(device_status);
			}
		} else
			LOG(RPiAgc, Warning) << Name() << ": no device metadata";
		image_metadata->Set("agc.status", status_);
	}
}

void Agc::Process(StatisticsPtr &stats, Metadata *image_metadata)
{
	frame_count_++;
	// First a little bit of housekeeping, fetching up-to-date settings and
	// configuration, that kind of thing.
	housekeepConfig();
	// Get the current exposure values for the frame that's just arrived.
	fetchCurrentExposure(image_metadata);
	// Compute the total gain we require relative to the current exposure.
	double gain, target_Y;
	computeGain(stats.get(), image_metadata, gain, target_Y);
	// Now compute the target (final) exposure which we think we want.
	computeTargetExposure(gain);
	// Some of the exposure has to be applied as digital gain, so work out
	// what that is. This function also tells us whether it's decided to
	// "desaturate" the image more quickly.
	bool desaturate = applyDigitalGain(gain, target_Y);
	// The results have to be filtered so as not to change too rapidly.
	filterExposure(desaturate);
	// The last thing is to divide up the exposure value into a shutter time
	// and analogue_gain, according to the current exposure mode.
	divideUpExposure();
	// Finally advertise what we've done.
	writeAndFinish(image_metadata, desaturate);
}

void Agc::updateLockStatus(DeviceStatus const &device_status)
{
	const double ERROR_FACTOR = 0.10; // make these customisable?
	const int MAX_LOCK_COUNT = 5;
	// Reset "lock count" when we exceed this multiple of ERROR_FACTOR
	const double RESET_MARGIN = 1.5;

	// Add 200us to the exposure time error to allow for line quantisation.
	double exposure_error = last_device_status_.shutter_speed * ERROR_FACTOR + 200;
	double gain_error = last_device_status_.analogue_gain * ERROR_FACTOR;
	double target_error = last_target_exposure_ * ERROR_FACTOR;

	// Note that we don't know the exposure/gain limits of the sensor, so
	// the values we keep requesting may be unachievable. For this reason
	// we only insist that we're close to values in the past few frames.
	if (device_status.shutter_speed > last_device_status_.shutter_speed - exposure_error &&
	    device_status.shutter_speed < last_device_status_.shutter_speed + exposure_error &&
	    device_status.analogue_gain > last_device_status_.analogue_gain - gain_error &&
	    device_status.analogue_gain < last_device_status_.analogue_gain + gain_error &&
	    status_.target_exposure_value > last_target_exposure_ - target_error &&
	    status_.target_exposure_value < last_target_exposure_ + target_error)
		lock_count_ = std::min(lock_count_ + 1, MAX_LOCK_COUNT);
	else if (device_status.shutter_speed < last_device_status_.shutter_speed - RESET_MARGIN * exposure_error ||
		 device_status.shutter_speed > last_device_status_.shutter_speed + RESET_MARGIN * exposure_error ||
		 device_status.analogue_gain < last_device_status_.analogue_gain - RESET_MARGIN * gain_error ||
		 device_status.analogue_gain > last_device_status_.analogue_gain + RESET_MARGIN * gain_error ||
		 status_.target_exposure_value < last_target_exposure_ - RESET_MARGIN * target_error ||
		 status_.target_exposure_value > last_target_exposure_ + RESET_MARGIN * target_error)
		lock_count_ = 0;

	last_device_status_ = device_status;
	last_target_exposure_ = status_.target_exposure_value;

	LOG(RPiAgc, Debug) << "Lock count updated to " << lock_count_;
	status_.locked = lock_count_ == MAX_LOCK_COUNT;
}

static void copy_string(std::string const &s, char *d, size_t size)
{
	size_t length = s.copy(d, size - 1);
	d[length] = '\0';
}

void Agc::housekeepConfig()
{
	// First fetch all the up-to-date settings, so no one else has to do it.
	status_.ev = ev_;
	status_.fixed_shutter = clipShutter(fixed_shutter_);
	status_.fixed_analogue_gain = fixed_analogue_gain_;
	status_.flicker_period = flicker_period_;
	LOG(RPiAgc, Debug) << "ev " << status_.ev << " fixed_shutter "
			   << status_.fixed_shutter << " fixed_analogue_gain "
			   << status_.fixed_analogue_gain;
	// Make sure the "mode" pointers point to the up-to-date things, if
	// they've changed.
	if (strcmp(metering_mode_name_.c_str(), status_.metering_mode)) {
		auto it = config_.metering_modes.find(metering_mode_name_);
		if (it == config_.metering_modes.end())
			throw std::runtime_error("Agc: no metering mode " +
						 metering_mode_name_);
		metering_mode_ = &it->second;
		copy_string(metering_mode_name_, status_.metering_mode,
			    sizeof(status_.metering_mode));
	}
	if (strcmp(exposure_mode_name_.c_str(), status_.exposure_mode)) {
		auto it = config_.exposure_modes.find(exposure_mode_name_);
		if (it == config_.exposure_modes.end())
			throw std::runtime_error("Agc: no exposure profile " +
						 exposure_mode_name_);
		exposure_mode_ = &it->second;
		copy_string(exposure_mode_name_, status_.exposure_mode,
			    sizeof(status_.exposure_mode));
	}
	if (strcmp(constraint_mode_name_.c_str(), status_.constraint_mode)) {
		auto it =
			config_.constraint_modes.find(constraint_mode_name_);
		if (it == config_.constraint_modes.end())
			throw std::runtime_error("Agc: no constraint list " +
						 constraint_mode_name_);
		constraint_mode_ = &it->second;
		copy_string(constraint_mode_name_, status_.constraint_mode,
			    sizeof(status_.constraint_mode));
	}
	LOG(RPiAgc, Debug) << "exposure_mode "
			   << exposure_mode_name_ << " constraint_mode "
			   << constraint_mode_name_ << " metering_mode "
			   << metering_mode_name_;
}

void Agc::fetchCurrentExposure(Metadata *image_metadata)
{
	std::unique_lock<Metadata> lock(*image_metadata);
	DeviceStatus *device_status =
		image_metadata->GetLocked<DeviceStatus>("device.status");
	if (!device_status)
		throw std::runtime_error("Agc: no device metadata");
	current_.shutter = device_status->shutter_speed;
	current_.analogue_gain = device_status->analogue_gain;
	AgcStatus *agc_status =
		image_metadata->GetLocked<AgcStatus>("agc.status");
	current_.total_exposure = agc_status ? agc_status->total_exposure_value : 0;
	current_.total_exposure_no_dg = current_.shutter * current_.analogue_gain;
}

void Agc::fetchAwbStatus(Metadata *image_metadata)
{
	awb_.gain_r = 1.0; // in case not found in metadata
	awb_.gain_g = 1.0;
	awb_.gain_b = 1.0;
	if (image_metadata->Get("awb.status", awb_) != 0)
		LOG(RPiAgc, Warning) << "Agc: no AWB status found";
}

static double compute_initial_Y(bcm2835_isp_stats *stats, AwbStatus const &awb,
				double weights[], double gain)
{
	bcm2835_isp_stats_region *regions = stats->agc_stats;
	// Note how the calculation below means that equal weights give you
	// "average" metering (i.e. all pixels equally important).
	double R_sum = 0, G_sum = 0, B_sum = 0, pixel_sum = 0;
	for (int i = 0; i < AGC_STATS_SIZE; i++) {
		double counted = regions[i].counted;