/* SPDX-License-Identifier: Apache-2.0 */ /* * Copyright (C) 2020, Google Inc. * * ipu3.cpp - IPU3 Image Processing Algorithms */ #include #include #include #include #include #include #include #include #include #include #include #include "libcamera-helpers/mapped_framebuffer.h" /* IA AIQ Wrapper API */ #include "aic/aic.h" #include "aiq/aiq.h" #include "binary_data.h" namespace libcamera { LOG_DEFINE_CATEGORY(IPAIPU3) namespace ipa::ipu3 { class IPAIPU3 : public IPAIPU3Interface { public: int init(const IPASettings &settings, const IPACameraSensorInfo &sensorInfo, const ControlInfoMap &sensorControls, ControlInfoMap *ipaControls) override; int start() override; void stop() override {} int configure(const IPAConfigInfo &configInfo, ControlInfoMap *ipaControls) override; void mapBuffers(const std::vector &buffers) override; void unmapBuffers(const std::vector &ids) override; void queueRequest(const uint32_t frame, const ControlList &controls) override; void fillParamsBuffer(const uint32_t frame, const uint32_t bufferId) override; void processStatsBuffer(const uint32_t frame, const int64_t frameTimestamp, const uint32_t bufferId, const ControlList &sensorControls) override; private: void updateControls(const IPACameraSensorInfo &sensorInfo, const ControlInfoMap &sensorControls, ControlInfoMap *ipaControls); void runAiq(unsigned int frame); void setControls(unsigned int frame); std::map buffers_; ControlInfoMap ctrls_; IPACameraSensorInfo sensorInfo_; /* Camera sensor controls. */ uint32_t exposure_; uint32_t minExposure_; uint32_t maxExposure_; uint32_t gain_; uint32_t minGain_; uint32_t maxGain_; int32_t lensPosition_; /* Intel AF library relies on timestamp to wait for lens movement */ uint64_t lensMovementStartTime_; /* Intel Library Instances. */ aiq::AIQ aiq_; aic::AIC aic_; /* Temporary storage until we have a FrameContext object / struct */ aiq::AiqInputParameters aiqInputParams_; aiq::AiqResultsRingBuffer resultsHistory_; BinaryData aiqb_; BinaryData nvm_; BinaryData aiqd_; }; /* * Compute camera controls using the sensor information and the sensor * v4l2 controls. * * Some of the camera controls are computed by the pipeline handler, some others * by the IPA module which is in charge of handling, for example, the exposure * time and the frame duration. * * This function computes: * - controls::ExposureTime * - controls::FrameDurationLimits */ void IPAIPU3::updateControls(const IPACameraSensorInfo &sensorInfo, const ControlInfoMap &sensorControls, ControlInfoMap *ipaControls) { ControlInfoMap::Map controls{}; /* * Compute exposure time limits by using line length and pixel rate * converted to microseconds. Use the V4L2_CID_EXPOSURE control to get * exposure min, max and default and convert it from lines to * microseconds. */ double lineDuration = sensorInfo.minLineLength / (sensorInfo.pixelRate / 1e6); const ControlInfo &v4l2Exposure = sensorControls.find(V4L2_CID_EXPOSURE)->second; int32_t minExposure = v4l2Exposure.min().get() * lineDuration; int32_t maxExposure = v4l2Exposure.max().get() * lineDuration; int32_t defExposure = v4l2Exposure.def().get() * lineDuration; controls[&controls::ExposureTime] = ControlInfo(minExposure, maxExposure, defExposure); /* * Compute the frame duration limits. * * The frame length is computed assuming a fixed line length combined * with the vertical frame sizes. */ const ControlInfo &v4l2HBlank = sensorControls.find(V4L2_CID_HBLANK)->second; uint32_t hblank = v4l2HBlank.def().get(); uint32_t lineLength = sensorInfo.outputSize.width + hblank; const ControlInfo &v4l2VBlank = sensorControls.find(V4L2_CID_VBLANK)->second; std::array frameHeights{ v4l2VBlank.min().get() + sensorInfo.outputSize.height, v4l2VBlank.max().get() + sensorInfo.outputSize.height, v4l2VBlank.def().get() + sensorInfo.outputSize.height, }; std::array frameDurations; for (unsigned int i = 0; i < frameHeights.size(); ++i) { uint64_t frameSize = lineLength * frameHeights[i]; frameDurations[i] = frameSize / (sensorInfo.pixelRate / 1000000U); } controls[&controls::FrameDurationLimits] = ControlInfo(frameDurations[0], frameDurations[1], frameDurations[2]); *ipaControls = ControlInfoMap(std::move(controls), controls::controls); } int IPAIPU3::init(const IPASettings &settings, const IPACameraSensorInfo &sensorInfo, const ControlInfoMap &sensorControls, ControlInfoMap *ipaControls) { int ret; /* * Temporary mapping of the sensor name to the AIQB data file. * * The tuningPath used here is specific to ChromeOS. * * \todo This mapping table should be handled more generically * or through the configuration interfaces. */ std::map aiqb_paths = { { "ov13858", "00ov13858.aiqb" }, { "ov5670", "01ov5670.aiqb" }, { "imx258", "00imx258.aiqb" }, }; LOG(IPAIPU3, Info) << "Initialising IPA IPU3 for " << settings.sensorModel; auto it = aiqb_paths.find(settings.sensorModel); if (it == aiqb_paths.end()) { LOG(IPAIPU3, Error) << "Failed to identify tuning data"; return -EINVAL; } std::string tuningPath = "/etc/camera/ipu3/"; std::string tuningFile = tuningPath + it->second; LOG(IPAIPU3, Info) << "Using tuning file: " << tuningFile; ret = aiqb_.load(tuningFile.c_str()); if (ret) { LOG(IPAIPU3, Error) << "Failed to load AIQB"; return -ENODATA; } /* * Todo: nvm_ and aiqd_ are left as empty nullptrs. * These need to be identified and loaded as required. */ ret = aiq_.init(aiqb_, nvm_, aiqd_); if (ret) return ret; ret = aic_.init(aiqb_); if (ret) return ret; aiqInputParams_.init(); /* Initialize controls. */ updateControls(sensorInfo, sensorControls, ipaControls); return 0; } int IPAIPU3::start() { runAiq(0); setControls(0); return 0; } int IPAIPU3::configure(const IPAConfigInfo &configInfo, ControlInfoMap *ipaControls) { if (configInfo.sensorControls.empty()) { LOG(IPAIPU3, Error) << "No sensor controls provided"; return -ENODATA; } sensorInfo_ = configInfo.sensorInfo; /* * Compute the sensor V4L2 controls to be used by the algorithms and * to be set on the sensor. */ ctrls_ = configInfo.sensorControls; const auto itExp = ctrls_.find(V4L2_CID_EXPOSURE); if (itExp == ctrls_.end()) { LOG(IPAIPU3, Error) << "Can't find exposure control"; return -EINVAL; } const auto itGain = ctrls_.find(V4L2_CID_ANALOGUE_GAIN); if (itGain == ctrls_.end()) { LOG(IPAIPU3, Error) << "Can't find gain control"; return -EINVAL; } minExposure_ = std::max(itExp->second.min().get(), 1); maxExposure_ = itExp->second.max().get(); exposure_ = maxExposure_; minGain_ = std::max(itGain->second.min().get(), 1); maxGain_ = itGain->second.max().get(); gain_ = maxGain_; lensMovementStartTime_ = 0; lensPosition_ = 0; int ret; ret = aiq_.configure(); if (ret) { LOG(IPAIPU3, Error) << "Failed to configure the AIQ"; return ret; } ret = aiqInputParams_.configure(configInfo); if (ret) { LOG(IPAIPU3, Error) << "Failed to configure AiqInputParams"; return ret; } ret = aic_.configure(configInfo); if (ret) { LOG(IPAIPU3, Error) << "Failed to configure the AIC"; return ret; } /* Set AE/AWB defaults, this typically might not belong here */ aiqInputParams_.setAeAwbAfDefaults(); /* Upate the camera controls using the new sensor settings. */ updateControls(sensorInfo_, ctrls_, ipaControls); resultsHistory_.reset(); return 0; } void IPAIPU3::mapBuffers(const std::vector &buffers) { /* * todo: Statistics buffers could be mapped read-only if they * could be easily identified. */ for (const IPABuffer &buffer : buffers) { const FrameBuffer fb(buffer.planes); buffers_.emplace(buffer.id, MappedFrameBuffer(&fb, MappedFrameBuffer::MapFlag::ReadWrite)); } } void IPAIPU3::unmapBuffers(const std::vector &ids) { for (unsigned int id : ids) { auto it = buffers_.find(id); if (it == buffers_.end()) continue; buffers_.erase(it); } } void IPAIPU3::queueRequest([[maybe_unused]] unsigned int frame, [[maybe_unused]] const ControlList &controls) { /* \todo Start processing for 'frame' based on 'controls'. */ } void IPAIPU3::fillParamsBuffer(const uint32_t frame, const uint32_t bufferId) { auto it = buffers_.find(bufferId); if (it == buffers_.end()) { LOG(IPAIPU3, Error) << "Could not find params buffer"; return; } Span mem = it->second.maps()[0]; ipu3_uapi_params *params = reinterpret_cast(mem.data()); /* Prepare parameters buffer. */ memset(params, 0, sizeof(*params)); /* * Call into the AIQ object, and set up the library with any requested * controls or settings from the incoming request. * * (statistics are fed into the library as a separate event * when available) * * - Run algorithms * * - Fill params buffer with the results of the algorithms. */ runAiq(frame); aiq::AiqResults& latestResults = resultsHistory_.latest(); aic_.updateRuntimeParams(latestResults); aic_.run(params); setControls(frame); paramsBufferReady.emit(frame); } void IPAIPU3::processStatsBuffer(const uint32_t frame, const int64_t frameTimestamp, const uint32_t bufferId, const ControlList &sensorControls) { auto it = buffers_.find(bufferId); if (it == buffers_.end()) { LOG(IPAIPU3, Error) << "Could not find stats buffer"; return; } Span mem = it->second.maps()[0]; const ipu3_uapi_stats_3a *stats = reinterpret_cast(mem.data()); ControlList ctrls(controls::controls); /* \todo React to statistics and update internal state machine. */ /* *stats comes from the IPU3 hardware. We need to give this data into * the AIQ library. */ ASSERT (frameTimestamp > 0); /* * Ae algorithm expects the statistics to be set with its corresponding * Ae result, i.e., the Ae result should match the exposure time and * analog gain with the the effective sensor controls of the statistics. * Search the required Ae result in the result history and combine it * with the latest result as the input to AIQ::setStatistics(). */ int32_t effectiveExpo = 0; int32_t effectiveGain = 0; ControlValue ctrlValue; ctrlValue = sensorControls.get(V4L2_CID_EXPOSURE); if (!ctrlValue.isNone()) effectiveExpo = ctrlValue.get(); ctrlValue = sensorControls.get(V4L2_CID_ANALOGUE_GAIN); if (!ctrlValue.isNone()) effectiveGain = ctrlValue.get(); auto pred = [effectiveExpo, effectiveGain] (aiq::AiqResults& result) { ia_aiq_exposure_sensor_parameters* sensorExposure = result.ae()->exposures[0].sensor_exposure; return (effectiveExpo == sensorExposure->coarse_integration_time || effectiveGain == sensorExposure->analog_gain_code_global); }; aiq::AiqResults& latestResults = resultsHistory_.latest(); aiq::AiqResults& aeMatchedResults = resultsHistory_.searchBackward(pred, latestResults); aiq::AiqResults combinedResults = latestResults; combinedResults.setAe(aeMatchedResults.ae()); /* Aiq library expects timestamp in microseconds */ aiq_.setStatistics(frame, (frameTimestamp / 1000), combinedResults, stats); /* Set frame durations from exposure results */ ia_aiq_exposure_sensor_parameters *sensorExposure = combinedResults.ae()->exposures->sensor_exposure; int64_t frameDuration = (sensorExposure->line_length_pixels * sensorExposure->frame_length_lines) / (sensorInfo_.pixelRate / 1e6); ctrls.set(controls::FrameDuration, frameDuration); metadataReady.emit(frame, ctrls); } void IPAIPU3::runAiq([[maybe_unused]] unsigned int frame) { /* Run algorithms into/using this context structure */ resultsHistory_.extendOne(); aiq::AiqResults& latestResults = resultsHistory_.latest(); /* Todo: Refactor AiqInputParameters interface to set following parameters. */ aiqInputParams_.afParams.lens_position = lensPosition_; aiqInputParams_.afParams.lens_movement_start_timestamp = lensMovementStartTime_; aiq_.run2a(frame, aiqInputParams_, latestResults); exposure_ = latestResults.ae()->exposures[0].sensor_exposure->coarse_integration_time; gain_ = latestResults.ae()->exposures[0].sensor_exposure->analog_gain_code_global; /* * Af algorithm compares the timestamp of start of the lens movement and * that of the statistics generated to estimate whether next lens * position should be produced. * \todo use the lens movement start time reported by the pipeline handler. */ if (lensPosition_ != latestResults.af()->next_lens_position) { utils::time_point time = utils::clock::now(); uint64_t usecs = std::chrono::duration_cast(time.time_since_epoch()).count(); lensMovementStartTime_ = usecs; } lensPosition_ = latestResults.af()->next_lens_position; } void IPAIPU3::setControls(unsigned int frame) { ControlList sensorCtrls(ctrls_); sensorCtrls.set(V4L2_CID_EXPOSURE, static_cast(exposure_)); sensorCtrls.set(V4L2_CID_ANALOGUE_GAIN, static_cast(gain_)); ControlList lensCtrls(ctrls_); lensCtrls.set(V4L2_CID_FOCUS_ABSOLUTE, lensPosition_); setSensorControls.emit(frame, sensorCtrls, lensCtrls); } } /* namespace ipa::ipu3 */ /* * External IPA module interface */ extern "C" { const struct IPAModuleInfo ipaModuleInfo = { IPA_MODULE_API_VERSION, 1, "PipelineHandlerIPU3", "ipu3", }; IPAInterface *ipaCreate() { return new ipa::ipu3::IPAIPU3(); } } } /* namespace libcamera */