diff options
Diffstat (limited to 'src/qcam/assets/feathericons/plus-square.svg')
0 files changed, 0 insertions, 0 deletions
![]() |
index : libcamera/vivid.git | |
libcamera pipeline handler for VIVID | git repository hosting on libcamera.org |
summaryrefslogtreecommitdiff |
/* SPDX-License-Identifier: BSD-2-Clause */
/*
* Copyright (C) 2019-2020, Raspberry Pi (Trading) Ltd.
*
* rpi.cpp - Raspberry Pi Image Processing Algorithms
*/
#include <algorithm>
#include <fcntl.h>
#include <math.h>
#include <stdint.h>
#include <string.h>
#include <sys/mman.h>
#include <libcamera/buffer.h>
#include <libcamera/control_ids.h>
#include <libcamera/controls.h>
#include <libcamera/file_descriptor.h>
#include <libcamera/ipa/ipa_interface.h>
#include <libcamera/ipa/ipa_module_info.h>
#include <libcamera/ipa/raspberrypi.h>
#include <libcamera/request.h>
#include <libcamera/span.h>
#include <libipa/ipa_interface_wrapper.h>
#include "libcamera/internal/camera_sensor.h"
#include "libcamera/internal/log.h"
#include "libcamera/internal/utils.h"
#include <linux/bcm2835-isp.h>
#include "agc_algorithm.hpp"
#include "agc_status.h"
#include "alsc_status.h"
#include "awb_algorithm.hpp"
#include "awb_status.h"
#include "black_level_status.h"
#include "cam_helper.hpp"
#include "ccm_algorithm.hpp"
#include "ccm_status.h"
#include "contrast_algorithm.hpp"
#include "contrast_status.h"
#include "controller.hpp"
#include "dpc_status.h"
#include "focus_status.h"
#include "geq_status.h"
#include "lux_status.h"
#include "metadata.hpp"
#include "noise_status.h"
#include "sdn_status.h"
#include "sharpen_algorithm.hpp"
#include "sharpen_status.h"
namespace libcamera {
/* Configure the sensor with these values initially. */
constexpr double DefaultAnalogueGain = 1.0;
constexpr unsigned int DefaultExposureTime = 20000;
LOG_DEFINE_CATEGORY(IPARPI)
class IPARPi : public IPAInterface
{
public:
IPARPi()
: lastMode_({}), controller_(), controllerInit_(false),
frameCount_(0), checkCount_(0), mistrustCount_(0),
lsTable_(nullptr)
{
}
~IPARPi()
{
if (lsTable_)
munmap(lsTable_, RPi::MaxLsGridSize);
}
int init(const IPASettings &settings) override;
int start() override { return 0; }
void stop() override {}
void configure(const CameraSensorInfo &sensorInfo,
const std::map<unsigned int, IPAStream> &streamConfig,
const std::map<unsigned int, const ControlInfoMap &> &entityControls,
const IPAOperationData &data,
IPAOperationData *response) override;
void mapBuffers(const std::vector<IPABuffer> &buffers) override;
void unmapBuffers(const std::vector<unsigned int> &ids) override;
void processEvent(const IPAOperationData &event) override;
private:
void setMode(const CameraSensorInfo &sensorInfo);
void queueRequest(const ControlList &controls);
void returnEmbeddedBuffer(unsigned int bufferId);
void prepareISP(unsigned int bufferId);
void reportMetadata();
bool parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &deviceStatus);
void processStats(unsigned int bufferId);
void applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls);
void applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls);
void applyDG(const struct AgcStatus *dgStatus, ControlList &ctrls);
void applyCCM(const struct CcmStatus *ccmStatus, ControlList &ctrls);
void applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, ControlList &ctrls);
void applyGamma(const struct ContrastStatus *contrastStatus, ControlList &ctrls);
void applyGEQ(const struct GeqStatus *geqStatus, ControlList &ctrls);
void applyDenoise(const struct SdnStatus *denoiseStatus, ControlList &ctrls);
void applySharpen(const struct SharpenStatus *sharpenStatus, ControlList &ctrls);
void applyDPC(const struct DpcStatus *dpcStatus, ControlList &ctrls);
void applyLS(const struct AlscStatus *lsStatus, ControlList &ctrls);
void resampleTable(uint16_t dest[], double const src[12][16], int destW, int destH);
std::map<unsigned int, FrameBuffer> buffers_;
std::map<unsigned int, void *> buffersMemory_;
ControlInfoMap unicamCtrls_;
ControlInfoMap ispCtrls_;
ControlList libcameraMetadata_;
/* IPA configuration. */
std::string tuningFile_;
/* Camera sensor params. */
CameraMode mode_;
CameraMode lastMode_;
/* Raspberry Pi controller specific defines. */
std::unique_ptr<RPiController::CamHelper> helper_;
RPiController::Controller controller_;
bool controllerInit_;
RPiController::Metadata rpiMetadata_;
/*
* We count frames to decide if the frame must be hidden (e.g. from
* display) or mistrusted (i.e. not given to the control algos).
*/
uint64_t frameCount_;
/* For checking the sequencing of Prepare/Process calls. */
uint64_t checkCount_;
/* How many frames we should avoid running control algos on. */
unsigned int mistrustCount_;
/* LS table allocation passed in from the pipeline handler. */
FileDescriptor lsTableHandle_;
void *lsTable_;
};
int IPARPi::init(const IPASettings &settings)
{
tuningFile_ = settings.configurationFile;
return 0;
}
void IPARPi::setMode(const CameraSensorInfo &sensorInfo)
{
mode_.bitdepth = sensorInfo.bitsPerPixel;
mode_.width = sensorInfo.outputSize.width;
mode_.height = sensorInfo.outputSize.height;
mode_.sensor_width = sensorInfo.activeAreaSize.width;
mode_.sensor_height = sensorInfo.activeAreaSize.height;
mode_.crop_x = sensorInfo.analogCrop.x;
mode_.crop_y = sensorInfo.analogCrop.y;
/*
* Calculate scaling parameters. The scale_[xy] factors are determined
* by the ratio between the crop rectangle size and the output size.
*/
mode_.scale_x = sensorInfo.analogCrop.width / sensorInfo.outputSize.width;
mode_.scale_y = sensorInfo.analogCrop.height / sensorInfo.outputSize.height;
/*
* We're not told by the pipeline handler how scaling is split between
* binning and digital scaling. For now, as a heuristic, assume that
* downscaling up to 2 is achieved through binning, and that any
* additional scaling is achieved through digital scaling.
*
* \todo Get the pipeline handle to provide the full data
*/
mode_.bin_x = std::min(2, static_cast<int>(mode_.scale_x));
mode_.bin_y = std::min(2, static_cast<int>(mode_.scale_y));
/* The noise factor is the square root of the total binning factor. */
mode_.noise_factor = sqrt(mode_.bin_x * mode_.bin_y);
/*
* Calculate the line length in nanoseconds as the ratio between
* the line length in pixels and the pixel rate.
*/
mode_.line_length = 1e9 * sensorInfo.lineLength / sensorInfo.pixelRate;
}
void IPARPi::configure(const CameraSensorInfo &sensorInfo,
[[maybe_unused]] const std::map<unsigned int, IPAStream> &streamConfig,
const std::map<unsigned int, const ControlInfoMap &> &entityControls,
const IPAOperationData &ipaConfig,
IPAOperationData *result)
{
if (entityControls.empty())
return;
result->operation = 0;
unicamCtrls_ = entityControls.at(0);
ispCtrls_ = entityControls.at(1);
/* Setup a metadata ControlList to output metadata. */
libcameraMetadata_ = ControlList(controls::controls);
/*
* Load the "helper" for this sensor. This tells us all the device specific stuff
* that the kernel driver doesn't. We only do this the first time; we don't need
* to re-parse the metadata after a simple mode-switch for no reason.
*/
std::string cameraName(sensorInfo.model);
if (!helper_) {
helper_ = std::unique_ptr<RPiController::CamHelper>(RPiController::CamHelper::Create(cameraName));
/*
* Pass out the sensor config to the pipeline handler in order
* to setup the staggered writer class.
*/
int gainDelay, exposureDelay, sensorMetadata;
helper_->GetDelays(exposureDelay, gainDelay);
sensorMetadata = helper_->SensorEmbeddedDataPresent();
result->data.push_back(gainDelay);
result->data.push_back(exposureDelay);
result->data.push_back(sensorMetadata);
result->operation |= RPi::IPA_CONFIG_STAGGERED_WRITE;
}
/* Re-assemble camera mode using the sensor info. */
setMode(sensorInfo);
/*
* The ipaConfig.data always gives us the user transform first. Note that
* this will always make the LS table pointer (if present) element 1.
*/
mode_.transform = static_cast<libcamera::Transform>(ipaConfig.data[0]);
/* Store the lens shading table pointer and handle if available. */
if (ipaConfig.operation & RPi::IPA_CONFIG_LS_TABLE) {
/* Remove any previous table, if there was one. */
if (lsTable_) {
munmap(lsTable_, RPi::MaxLsGridSize);
lsTable_ = nullptr;
}
/* Map the LS table buffer into user space (now element 1). */
lsTableHandle_ = FileDescriptor(ipaConfig.data[1]);
if (lsTableHandle_.isValid()) {
lsTable_ = mmap(nullptr, RPi::MaxLsGridSize, PROT_READ | PROT_WRITE,
MAP_SHARED, lsTableHandle_.fd(), 0);
if (lsTable_ == MAP_FAILED) {
LOG(IPARPI, Error) << "dmaHeap mmap failure for LS table.";
lsTable_ = nullptr;
}
}
}
/* Pass the camera mode to the CamHelper to setup algorithms. */
helper_->SetCameraMode(mode_);
/*
* Initialise frame counts, and decide how many frames must be hidden or
*"mistrusted", which depends on whether this is a startup from cold,
* or merely a mode switch in a running system.
*/
frameCount_ = 0;
checkCount_ = 0;
unsigned int dropFrame = 0;
if (controllerInit_) {
dropFrame = helper_->HideFramesModeSwitch();
mistrustCount_ = helper_->MistrustFramesModeSwitch();
} else {
dropFrame = helper_->HideFramesStartup();
mistrustCount_ = helper_->MistrustFramesStartup();
}
result->data.push_back(dropFrame);
result->operation |= RPi::IPA_CONFIG_DROP_FRAMES;
/* These zero values mean not program anything (unless overwritten). */
struct AgcStatus agcStatus;
agcStatus.shutter_time = 0.0;
agcStatus.analogue_gain = 0.0;
if (!controllerInit_) {
/* Load the tuning file for this sensor. */
controller_.Read(tuningFile_.c_str());
controller_.Initialise();
controllerInit_ = true;
/* Supply initial values for gain and exposure. */
agcStatus.shutter_time = DefaultExposureTime;
agcStatus.analogue_gain = DefaultAnalogueGain;
}
RPiController::Metadata metadata;
controller_.SwitchMode(mode_, &metadata);
/* SwitchMode may supply updated exposure/gain values to use. */
metadata.Get("agc.status", agcStatus);
if (agcStatus.shutter_time != 0.0 && agcStatus.analogue_gain != 0.0) {
ControlList ctrls(unicamCtrls_);
applyAGC(&agcStatus, ctrls);
result->controls.push_back(ctrls);
result->operation |= RPi::IPA_CONFIG_SENSOR;
}
lastMode_ = mode_;
}
void IPARPi::mapBuffers(const std::vector<IPABuffer> &buffers)
{
for (const IPABuffer &buffer : buffers) {
auto elem = buffers_.emplace(std::piecewise_construct,
std::forward_as_tuple(buffer.id),
std::forward_as_tuple(buffer.planes));
const FrameBuffer &fb = elem.first->second;
buffersMemory_[buffer.id] = mmap(nullptr, fb.planes()[0].length,
PROT_READ | PROT_WRITE, MAP_SHARED,
fb.planes()[0].fd.fd(), 0);
if (buffersMemory_[buffer.id] == MAP_FAILED) {
int ret = -errno;
LOG(IPARPI, Fatal) << "Failed to mmap buffer: " << strerror(-ret);
}
}
}
void IPARPi::unmapBuffers(const std::vector<unsigned int> &ids)
{
for (unsigned int id : ids) {
const auto fb = buffers_.find(id);
if (fb == buffers_.end())
continue;
munmap(buffersMemory_[id], fb->second.planes()[0].length);
buffersMemory_.erase(id);
buffers_.erase(id);
}
}
void IPARPi::processEvent(const IPAOperationData &event)
{
switch (event.operation) {
case RPi::IPA_EVENT_SIGNAL_STAT_READY: {
unsigned int bufferId = event.data[0];
if (++checkCount_ != frameCount_) /* assert here? */
LOG(IPARPI, Error) << "WARNING: Prepare/Process mismatch!!!";
if (frameCount_ > mistrustCount_)
processStats(bufferId);
reportMetadata();
IPAOperationData op;
op.operation = RPi::IPA_ACTION_STATS_METADATA_COMPLETE;
op.data = { bufferId & RPi::BufferMask::ID };
op.controls = { libcameraMetadata_ };
queueFrameAction.emit(0, op);
break;
}
case RPi::IPA_EVENT_SIGNAL_ISP_PREPARE: {
unsigned int embeddedbufferId = event.data[0];
unsigned int bayerbufferId = event.data[1];
/*
* At start-up, or after a mode-switch, we may want to
* avoid running the control algos for a few frames in case
* they are "unreliable".
*/
prepareISP(embeddedbufferId);
frameCount_++;
/* Ready to push the input buffer into the ISP. */
IPAOperationData op;
op.operation = RPi::IPA_ACTION_RUN_ISP;
op.data = { bayerbufferId & RPi::BufferMask::ID };
queueFrameAction.emit(0, op);
break;
}
case RPi::IPA_EVENT_QUEUE_REQUEST: {
queueRequest(event.controls[0]);
break;
}
default:
LOG(IPARPI, Error) << "Unknown event " << event.operation;
break;
}
}
void IPARPi::reportMetadata()
{
std::unique_lock<RPiController::Metadata> lock(rpiMetadata_);
/*
* Certain information about the current frame and how it will be
* processed can be extracted and placed into the libcamera metadata
* buffer, where an application could query it.
*/
DeviceStatus *deviceStatus = rpiMetadata_.GetLocked<DeviceStatus>("device.status");
if (deviceStatus) {
libcameraMetadata_.set(controls::ExposureTime, deviceStatus->shutter_speed);
libcameraMetadata_.set(controls::AnalogueGain, deviceStatus->analogue_gain);
}
AgcStatus *agcStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
if (agcStatus)
libcameraMetadata_.set(controls::AeLocked, agcStatus->locked);
LuxStatus *luxStatus = rpiMetadata_.GetLocked<LuxStatus>("lux.status");
if (luxStatus)
libcameraMetadata_.set(controls::Lux, luxStatus->lux);
AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
if (awbStatus) {
libcameraMetadata_.set(controls::ColourGains, { static_cast<float>(awbStatus->gain_r),
static_cast<float>(awbStatus->gain_b) });
libcameraMetadata_.set(controls::ColourTemperature, awbStatus->temperature_K);
}
BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
if (blackLevelStatus)
libcameraMetadata_.set(controls::SensorBlackLevels,
{ static_cast<int32_t>(blackLevelStatus->black_level_r),
static_cast<int32_t>(blackLevelStatus->black_level_g),
static_cast<int32_t>(blackLevelStatus->black_level_g),
static_cast<int32_t>(blackLevelStatus->black_level_b) });
FocusStatus *focusStatus = rpiMetadata_.GetLocked<FocusStatus>("focus.status");
if (focusStatus && focusStatus->num == 12) {
/*
* We get a 4x3 grid of regions by default. Calculate the average
* FoM over the central two positions to give an overall scene FoM.
* This can change later if it is not deemed suitable.
*/
int32_t focusFoM = (focusStatus->focus_measures[5] + focusStatus->focus_measures[6]) / 2;
libcameraMetadata_.set(controls::FocusFoM, focusFoM);
}
CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
if (ccmStatus) {
float m[9];
for (unsigned int i = 0; i < 9; i++)
m[i] = ccmStatus->matrix[i];
libcameraMetadata_.set(controls::ColourCorrectionMatrix, m);
}
}
/*