/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * rkisp1.cpp - Pipeline handler for Rockchip ISP1 */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "libcamera/internal/camera_sensor.h" #include "libcamera/internal/device_enumerator.h" #include "libcamera/internal/ipa_manager.h" #include "libcamera/internal/log.h" #include "libcamera/internal/media_device.h" #include "libcamera/internal/pipeline_handler.h" #include "libcamera/internal/utils.h" #include "libcamera/internal/v4l2_subdevice.h" #include "libcamera/internal/v4l2_videodevice.h" #include "rkisp1_path.h" #include "timeline.h" namespace libcamera { LOG_DEFINE_CATEGORY(RkISP1) class PipelineHandlerRkISP1; class RkISP1ActionQueueBuffers; class RkISP1CameraData; enum RkISP1ActionType { SetSensor, SOE, QueueBuffers, }; struct RkISP1FrameInfo { unsigned int frame; Request *request; FrameBuffer *paramBuffer; FrameBuffer *statBuffer; FrameBuffer *mainPathBuffer; FrameBuffer *selfPathBuffer; bool paramFilled; bool paramDequeued; bool metadataProcessed; }; class RkISP1Frames { public: RkISP1Frames(PipelineHandler *pipe); RkISP1FrameInfo *create(const RkISP1CameraData *data, Request *request); int destroy(unsigned int frame); void clear(); RkISP1FrameInfo *find(unsigned int frame); RkISP1FrameInfo *find(FrameBuffer *buffer); RkISP1FrameInfo *find(Request *request); private: PipelineHandlerRkISP1 *pipe_; std::map frameInfo_; }; class RkISP1Timeline : public Timeline { public: RkISP1Timeline() : Timeline() { setDelay(SetSensor, -1, 5); setDelay(SOE, 0, -1); setDelay(QueueBuffers, -1, 10); } void bufferReady(FrameBuffer *buffer) { /* * Calculate SOE by taking the end of DMA set by the kernel and applying * the time offsets provideprovided by the IPA to find the best estimate * of SOE. */ ASSERT(frameOffset(SOE) == 0); utils::time_point soe = std::chrono::time_point() + std::chrono::nanoseconds(buffer->metadata().timestamp) + timeOffset(SOE); notifyStartOfExposure(buffer->metadata().sequence, soe); } void setDelay(unsigned int type, int frame, int msdelay) { utils::duration delay = std::chrono::milliseconds(msdelay); setRawDelay(type, frame, delay); } }; class RkISP1CameraData : public CameraData { public: RkISP1CameraData(PipelineHandler *pipe, RkISP1MainPath *mainPath, RkISP1SelfPath *selfPath) : CameraData(pipe), sensor_(nullptr), frame_(0), frameInfo_(pipe), mainPath_(mainPath), selfPath_(selfPath) { } ~RkISP1CameraData() { delete sensor_; } int loadIPA(); Stream mainPathStream_; Stream selfPathStream_; CameraSensor *sensor_; unsigned int frame_; std::vector ipaBuffers_; RkISP1Frames frameInfo_; RkISP1Timeline timeline_; RkISP1MainPath *mainPath_; RkISP1SelfPath *selfPath_; private: void queueFrameAction(unsigned int frame, const IPAOperationData &action); void metadataReady(unsigned int frame, const ControlList &metadata); }; class RkISP1CameraConfiguration : public CameraConfiguration { public: RkISP1CameraConfiguration(Camera *camera, RkISP1CameraData *data); Status validate() override; const V4L2SubdeviceFormat &sensorFormat() { return sensorFormat_; } private: bool fitsAllPaths(const StreamConfiguration &cfg); /* * The RkISP1CameraData instance is guaranteed to be valid as long as the * corresponding Camera instance is valid. In order to borrow a * reference to the camera data, store a new reference to the camera. */ std::shared_ptr camera_; const RkISP1CameraData *data_; V4L2SubdeviceFormat sensorFormat_; }; class PipelineHandlerRkISP1 : public PipelineHandler { public: PipelineHandlerRkISP1(CameraManager *manager); ~PipelineHandlerRkISP1(); CameraConfiguration *generateConfiguration(Camera *camera, const StreamRoles &roles) override; int configure(Camera *camera, CameraConfiguration *config) override; int exportFrameBuffers(Camera *camera, Stream *stream, std::vector> *buffers) override; int start(Camera *camera) override; void stop(Camera *camera) override; int queueRequestDevice(Camera *camera, Request *request) override; bool match(DeviceEnumerator *enumerator) override; private: RkISP1CameraData *cameraData(const Camera *camera) { return static_cast( PipelineHandler::cameraData(camera)); } friend RkISP1ActionQueueBuffers; friend RkISP1CameraData; friend RkISP1Frames; int initLinks(const Camera *camera, const CameraSensor *sensor, const RkISP1CameraConfiguration &config); int createCamera(MediaEntity *sensor); void tryCompleteRequest(Request *request); void bufferReady(FrameBuffer *buffer); void paramReady(FrameBuffer *buffer); void statReady(FrameBuffer *buffer); int allocateBuffers(Camera *camera); int freeBuffers(Camera *camera); MediaDevice *media_; V4L2Subdevice *isp_; V4L2VideoDevice *param_; V4L2VideoDevice *stat_; RkISP1MainPath mainPath_; RkISP1SelfPath selfPath_; std::vector> paramBuffers_; std::vector> statBuffers_; std::queue availableParamBuffers_; std::queue availableStatBuffers_; Camera *activeCamera_; }; RkISP1Frames::RkISP1Frames(PipelineHandler *pipe) : pipe_(static_cast(pipe)) { } RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *request) { unsigned int frame = data->frame_; if (pipe_->availableParamBuffers_.empty()) { LOG(RkISP1, Error) << "Parameters buffer underrun"; return nullptr; } FrameBuffer *paramBuffer = pipe_->availableParamBuffers_.front(); if (pipe_->availableStatBuffers_.empty()) { LOG(RkISP1, Error) << "Statisitc buffer underrun"; return nullptr; } FrameBuffer *statBuffer = pipe_->availableStatBuffers_.front(); FrameBuffer *mainPathBuffer = request->findBuffer(&data->mainPathStream_); FrameBuffer *selfPathBuffer = request->findBuffer(&data->selfPathStream_); pipe_->availableParamBuffers_.pop(); pipe_->availableStatBuffers_.pop(); RkISP1FrameInfo *info = new RkISP1FrameInfo; info->frame = frame; info->request = request; info->paramBuffer = paramBuffer; info->mainPathBuffer = mainPathBuffer; info->selfPathBuffer = selfPathBuffer; info->statBuffer = statBuffer; info->paramFilled = false; info->paramDequeued = false; info->metadataProcessed = false; frameInfo_[frame] = info; return info; } int RkISP1Frames::destroy(unsigned int frame) { RkISP1FrameInfo *info = find(frame); if (!info) return -ENOENT; pipe_->availableParamBuffers_.push(info->paramBuffer); pipe_->availableStatBuffers_.push(info->statBuffer); frameInfo_.erase(info->frame); delete info; return 0; } void RkISP1Frames::clear() { for (const auto &entry : frameInfo_) { RkISP1FrameInfo *info = entry.second; pipe_->availableParamBuffers_.push(info->paramBuffer); pipe_->availableStatBuffers_.push(info->statBuffer); delete info; } frameInfo_.clear(); } RkISP1FrameInfo *RkISP1Frames::find(unsigned int frame) { auto itInfo = frameInfo_.find(frame); if (itInfo != frameInfo_.end()) return itInfo->second; LOG(RkISP1, Error) << "Can't locate info from frame"; return nullptr; } RkISP1FrameInfo *RkISP1Frames::find(FrameBuffer *buffer) { for (auto &itInfo : frameInfo_) { RkISP1FrameInfo *info = itInfo.second; if (info->paramBuffer == buffer || info->statBuffer == buffer || info->mainPathBuffer == buffer || info->selfPathBuffer == buffer) return info; } LOG(RkISP1, Error) << "Can't locate info from buffer"; return nullptr; } RkISP1FrameInfo *RkISP1Frames::find(Request *request) { for (auto &itInfo : frameInfo_) { RkISP1FrameInfo *info = itInfo.second; if (info->request == request) return info; } LOG(RkISP1, Error) << "Can't locate info from request"; return nullptr; } class RkISP1ActionSetSensor : public FrameAction { public: RkISP1ActionSetSensor(unsigned int frame, CameraSensor *sensor, const ControlList &controls) : FrameAction(frame, SetSensor), sensor_(sensor), controls_(controls) {} protected: void run() override { sensor_->setControls(&controls_); } private: CameraSensor *sensor_; ControlList controls_; }; class RkISP1ActionQueueBuffers : public FrameAction { public: RkISP1ActionQueueBuffers(unsigned int frame, RkISP1CameraData *data, PipelineHandlerRkISP1 *pipe) : FrameAction(frame, QueueBuffers), data_(data), pipe_(pipe) { } protected: void run() override { RkISP1FrameInfo *info = data_->frameInfo_.find(frame()); if (!info) LOG(RkISP1, Fatal) << "Frame not known"; /* * \todo: If parameters are not filled a better method to handle * the situation than queuing a buffer with unknown content * should be used. * * It seems excessive to keep an internal zeroed scratch * parameters buffer around as this should not happen unless the * devices is under too much load. Perhaps failing the request * and returning it to the application with an error code is * better than queue it to hardware? */ if (!info->paramFilled) LOG(RkISP1, Error) << "Parameters not ready on time for frame " << frame(); pipe_->param_->queueBuffer(info->paramBuffer); pipe_->stat_->queueBuffer(info->statBuffer); if (info->mainPathBuffer) pipe_->mainPath_.queueBuffer(info->mainPathBuffer); if (info->selfPathBuffer) pipe_->selfPath_.queueBuffer(info->selfPathBuffer); } private: RkISP1CameraData *data_; PipelineHandlerRkISP1 *pipe_; }; int RkISP1CameraData::loadIPA() { ipa_ = IPAManager::createIPA(pipe_, 1, 1); if (!ipa_) return -ENOENT; ipa_->queueFrameAction.connect(this, &RkISP1CameraData::queueFrameAction); ipa_->init(IPASettings{}); return 0; } void RkISP1CameraData::queueFrameAction(unsigned int frame, const IPAOperationData &action) { switch (action.operation) { case RKISP1_IPA_ACTION_V4L2_SET: { const ControlList &controls = action.controls[0]; timeline_.scheduleAction(std::make_unique(frame, sensor_, controls)); break; } case RKISP1_IPA_ACTION_PARAM_FILLED: { RkISP1FrameInfo *info = frameInfo_.find(frame); if (info) info->paramFilled = true; break; } case RKISP1_IPA_ACTION_METADATA: metadataReady(frame, action.controls[0]); break; default: LOG(RkISP1, Error) << "Unknown action " << action.operation; break; } } void RkISP1CameraData::metadataReady(unsigned int frame, const ControlList &metadata) { PipelineHandlerRkISP1 *pipe = static_cast(pipe_); RkISP1FrameInfo *info = frameInfo_.find(frame); if (!info) return; info->request->metadata() = metadata; info->metadataProcessed = true; pipe->tryCompleteRequest(info->request); } RkISP1CameraConfiguration::RkISP1CameraConfiguration(Camera *camera, RkISP1CameraData *data) : CameraConfiguration() { camera_ = camera->shared_from_this(); data_ = data; } bool RkISP1CameraConfiguration::fitsAllPaths(const StreamConfiguration &cfg) { StreamConfiguration config; config = cfg; if (data_->mainPath_->validate(&config) != Valid) return false; config = cfg; if (data_->selfPath_->validate(&config) != Valid) return false; return true; } CameraConfiguration::Status RkISP1CameraConfiguration::validate() { const CameraSensor *sensor = data_->sensor_; Status status = Valid; if (config_.empty()) return Invalid; if (transform != Transform::Identity) { transform = Transform::Identity; status = Adjusted; } /* Cap the number of entries to the available streams. */ if (config_.size() > 2) { config_.resize(2); status = Adjusted; } /* * If there are more than one stream in the configuration figure out the * order to evaluate the streams. The first stream has the highest * priority but if both main path and self path can satisfy it evaluate * the second stream first as the first stream is guaranteed to work * with whichever path is not used by the second one. */ std::vector order(config_.size()); std::iota(order.begin(), order.end(), 0); if (config_.size() == 2 && fitsAllPaths(config_[0])) std::reverse(order.begin(), order.end()); bool mainPathAvailable = true; bool selfPathAvailable = true; for (unsigned int index : order) { StreamConfiguration &cfg = config_[index]; /* Try to match stream without adjusting configuration. */ if (mainPathAvailable) { StreamConfiguration tryCfg = cfg; if (data_->mainPath_->validate(&tryCfg) == Valid) { mainPathAvailable = false; cfg = tryCfg; cfg.setStream(const_cast(&data_->mainPathStream_)); continue; } } if (selfPathAvailable) { StreamConfiguration tryCfg = cfg; if (data_->selfPath_->validate(&tryCfg) == Valid) { selfPathAvailable = false; cfg = tryCfg; cfg.setStream(const_cast(&data_->selfPathStream_)); continue; } } /* Try to match stream allowing adjusting configuration. */ if (mainPathAvailable) { StreamConfiguration tryCfg = cfg; if (data_->mainPath_->validate(&tryCfg) == Adjusted) { mainPathAvailable = false; cfg = tryCfg; cfg.setStream(const_cast(&data_->mainPathStream_)); status = Adjusted; continue; } } if (selfPathAvailable) { StreamConfiguration tryCfg = cfg; if (data_->selfPath_->validate(&tryCfg) == Adjusted) { selfPathAvailable = false; cfg = tryCfg; cfg.setStream(const_cast(&data_->selfPathStream_)); status = Adjusted; continue; } } /* All paths rejected configuraiton. */ LOG(RkISP1, Debug) << "Camera configuration not supported " << cfg.toString(); return Invalid; } /* Select the sensor format. */ Size maxSize; for (const StreamConfiguration &cfg : config_) maxSize = std::max(maxSize, cfg.size); sensorFormat_ = sensor->getFormat({ MEDIA_BUS_FMT_SBGGR12_1X12, MEDIA_BUS_FMT_SGBRG12_1X12, MEDIA_BUS_FMT_SGRBG12_1X12, MEDIA_BUS_FMT_SRGGB12_1X12, MEDIA_BUS_FMT_SBGGR10_1X10, MEDIA_BUS_FMT_SGBRG10_1X10, MEDIA_BUS_FMT_SGRBG10_1X10, MEDIA_BUS_FMT_SRGGB10_1X10, MEDIA_BUS_FMT_SBGGR8_1X8, MEDIA_BUS_FMT_SGBRG8_1X8, MEDIA_BUS_FMT_SGRBG8_1X8, MEDIA_BUS_FMT_SRGGB8_1X8 }, maxSize); if (sensorFormat_.size.isNull()) sensorFormat_.size = sensor->resolution(); return status; } PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager) : PipelineHandler(manager), isp_(nullptr), param_(nullptr), stat_(nullptr) { } PipelineHandlerRkISP1::~PipelineHandlerRkISP1() { delete param_; delete stat_; delete isp_; } /* ----------------------------------------------------------------------------- * Pipeline Operations */ CameraConfiguration *PipelineHandlerRkISP1::generateConfiguration(Camera *camera, const StreamRoles &roles) { RkISP1CameraData *data = cameraData(camera); CameraConfiguration *config = new RkISP1CameraConfiguration(camera, data); if (roles.empty()) return config; bool mainPathAvailable = true; bool selfPathAvailable = true; for (const StreamRole role : roles) { bool useMainPath; switch (role) { case StreamRole::StillCapture: { useMainPath = mainPathAvailable; break; } case StreamRole::Viewfinder: case StreamRole::VideoRecording: { useMainPath = !selfPathAvailable; break; } default: LOG(RkISP1, Warning) << "Requested stream role not supported: " << role; delete config; return nullptr; } StreamConfiguration cfg; if (useMainPath) { cfg = data->mainPath_->generateConfiguration( data->sensor_->resolution()); mainPathAvailable = false; } else { cfg = data->selfPath_->generateConfiguration( data->sensor_->resolution()); selfPathAvailable = false; } config->addConfiguration(cfg); } config->validate(); return config; } int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c) { RkISP1CameraConfiguration *config = static_cast(c); RkISP1CameraData *data = cameraData(camera); CameraSensor *sensor = data->sensor_; int ret; ret = initLinks(camera, sensor, *config); if (ret) return ret; /* * Configure the format on the sensor output and propagate it through * the pipeline. */ V4L2SubdeviceFormat format = config->sensorFormat(); LOG(RkISP1, Debug) << "Configuring sensor with " << format.toString(); ret = sensor->setFormat(&format); if (ret < 0) return ret; LOG(RkISP1, Debug) << "Sensor configured with " << format.toString(); ret = isp_->setFormat(0, &format); if (ret < 0) return ret; Rectangle rect(0, 0, format.size); ret = isp_->setSelection(0, V4L2_SEL_TGT_CROP, &rect); if (ret < 0) return ret; LOG(RkISP1, Debug) << "ISP input pad configured with " << format.toString(); /* YUYV8_2X8 is required on the ISP source path pad for YUV output. */ format.mbus_code = MEDIA_BUS_FMT_YUYV8_2X8; LOG(RkISP1, Debug) << "Configuring ISP output pad with " << format.toString(); ret = isp_->setFormat(2, &format); if (ret < 0) return ret; LOG(RkISP1, Debug) << "ISP output pad configured with " << format.toString(); for (const StreamConfiguration &cfg : *config) { if (cfg.stream() == &data->mainPathStream_) ret = mainPath_.configure(cfg, format); else ret = selfPath_.configure(cfg, format); if (ret) return ret; } V4L2DeviceFormat paramFormat = {}; paramFormat.fourcc = V4L2PixelFormat(V4L2_META_FMT_RK_ISP1_PARAMS); ret = param_->setFormat(¶mFormat); if (ret) return ret; V4L2DeviceFormat statFormat = {}; statFormat.fourcc = V4L2PixelFormat(V4L2_META_FMT_RK_ISP1_STAT_3A); ret = stat_->setFormat(&statFormat); if (ret) return ret; return 0; } int PipelineHandlerRkISP1::exportFrameBuffers([[maybe_unused]] Camera *camera, Stream *stream, std::vector> *buffers) { RkISP1CameraData *data = cameraData(camera); unsigned int count = stream->configuration().bufferCount; if (stream == &data->mainPathStream_) return mainPath_.exportBuffers(count, buffers); else if (stream == &data->selfPathStream_) return selfPath_.exportBuffers(count, buffers); return -EINVAL; } int PipelineHandlerRkISP1::allocateBuffers(Camera *camera) { RkISP1CameraData *data = cameraData(camera); unsigned int ipaBufferId = 1; int ret; unsigned int maxCount = std::max({ data->mainPathStream_.configuration().bufferCount, data->selfPathStream_.configuration().bufferCount, }); ret = param_->allocateBuffers(maxCount, ¶mBuffers_); if (ret < 0) goto error; ret = stat_->allocateBuffers(maxCount, &statBuffers_); if (ret < 0) goto error; for (std::unique_ptr &buffer : paramBuffers_) { buffer->setCookie(ipaBufferId++); data->ipaBuffers_.push_back({ .id = buffer->cookie(), .planes = buffer->planes() }); availableParamBuffers_.push(buffer.get()); } for (std::unique_ptr &buffer : statBuffers_) { buffer->setCookie(ipaBufferId++); data->ipaBuffers_.push_back({ .id = buffer->cookie(), .planes = buffer->planes() }); availableStatBuffers_.push(buffer.get()); } data->ipa_->mapBuffers(data->ipaBuffers_); return 0; error: paramBuffers_.clear(); statBuffers_.clear(); return ret; } int PipelineHandlerRkISP1::freeBuffers(Camera *camera) { RkISP1CameraData *data = cameraData(camera); while (!availableStatBuffers_.empty()) availableStatBuffers_.pop(); while (!availableParamBuffers_.empty()) availableParamBuffers_.pop(); paramBuffers_.clear(); statBuffers_.clear(); std::vector ids; for (IPABuffer &ipabuf : data->ipaBuffers_) ids.push_back(ipabuf.id); data->ipa_->unmapBuffers(ids); data->ipaBuffers_.clear(); if (param_->releaseBuffers()) LOG(RkISP1, Error) << "Failed to release parameters buffers"; if (stat_->releaseBuffers()) LOG(RkISP1, Error) << "Failed to release stat buffers"; return 0; } int PipelineHandlerRkISP1::start(Camera *camera) { RkISP1CameraData *data = cameraData(camera); int ret; /* Allocate buffers for internal pipeline usage. */ ret = allocateBuffers(camera); if (ret) return ret; ret = data->ipa_->start(); if (ret) { freeBuffers(camera); LOG(RkISP1, Error) << "Failed to start IPA " << camera->id(); return ret; } data->frame_ = 0; ret = param_->streamOn(); if (ret) { data->ipa_->stop(); freeBuffers(camera); LOG(RkISP1, Error) << "Failed to start parameters " << camera->id(); return ret; } ret = stat_->streamOn(); if (ret) { param_->streamOff(); data->ipa_->stop(); freeBuffers(camera); LOG(RkISP1, Error) << "Failed to start statistics " << camera->id(); return ret; } std::map streamConfig; if (data->mainPath_->isEnabled()) { ret = mainPath_.start(); if (ret) { param_->streamOff(); stat_->streamOff(); data->ipa_->stop(); freeBuffers(camera); return ret; } streamConfig[0] = { .pixelFormat = data->mainPathStream_.configuration().pixelFormat, .size = data->mainPathStream_.configuration().size, }; } if (data->selfPath_->isEnabled()) { ret = selfPath_.start(); if (ret) { mainPath_.stop(); param_->streamOff(); stat_->streamOff(); data->ipa_->stop(); freeBuffers(camera); return ret; } streamConfig[1] = { .pixelFormat = data->selfPathStream_.configuration().pixelFormat, .size = data->selfPathStream_.configuration().size, }; } activeCamera_ = camera; /* Inform IPA of stream configuration and sensor controls. */ CameraSensorInfo sensorInfo = {}; ret = data->sensor_->sensorInfo(&sensorInfo); if (ret) { /* \todo Turn this in an hard failure. */ LOG(RkISP1, Warning) << "Camera sensor information not available"; sensorInfo = {}; ret = 0; } std::map entityControls; entityControls.emplace(0, data->sensor_->controls()); IPAOperationData ipaConfig; data->ipa_->configure(sensorInfo, streamConfig, entityControls, ipaConfig, nullptr); return ret; } void PipelineHandlerRkISP1::stop(Camera *camera) { RkISP1CameraData *data = cameraData(camera); int ret; selfPath_.stop(); mainPath_.stop(); ret = stat_->streamOff(); if (ret) LOG(RkISP1, Warning) << "Failed to stop statistics for " << camera->id(); ret = param_->streamOff(); if (ret) LOG(RkISP1, Warning) << "Failed to stop parameters for " << camera->id(); data->ipa_->stop(); data->timeline_.reset(); data->frameInfo_.clear(); freeBuffers(camera); activeCamera_ = nullptr; } int PipelineHandlerRkISP1::queueRequestDevice(Camera *camera, Request *request) { RkISP1CameraData *data = cameraData(camera); RkISP1FrameInfo *info = data->frameInfo_.create(data, request); if (!info) return -ENOENT; IPAOperationData op; op.operation = RKISP1_IPA_EVENT_QUEUE_REQUEST; op.data = { data->frame_, info->paramBuffer->cookie() }; op.controls = { request->controls() }; data->ipa_->processEvent(op); data->timeline_.scheduleAction(std::make_unique(data->frame_, data, this)); data->frame_++; return 0; } /* ----------------------------------------------------------------------------- * Match and Setup */ int PipelineHandlerRkISP1::initLinks(const Camera *camera, const CameraSensor *sensor, const RkISP1CameraConfiguration &config) { RkISP1CameraData *data = cameraData(camera); int ret; ret = media_->disableLinks(); if (ret < 0) return ret; /* * Configure the sensor links: enable the link corresponding to this * camera. */ const MediaPad *pad = isp_->entity()->getPadByIndex(0); for (MediaLink *link : pad->links()) { if (link->source()->entity() != sensor->entity()) continue; LOG(RkISP1, Debug) << "Enabling link from sensor '" << link->source()->entity()->name() << "' to ISP"; ret = link->setEnabled(true); if (ret < 0) return ret; } for (const StreamConfiguration &cfg : config) { if (cfg.stream() == &data->mainPathStream_) ret = data->mainPath_->setEnabled(true); else if (cfg.stream() == &data->selfPathStream_) ret = data->selfPath_->setEnabled(true); else return -EINVAL; if (ret < 0) return ret; } return 0; } int PipelineHandlerRkISP1::createCamera(MediaEntity *sensor) { int ret; std::unique_ptr data = std::make_unique(this, &mainPath_, &selfPath_); ControlInfoMap::Map ctrls; ctrls.emplace(std::piecewise_construct, std::forward_as_tuple(&controls::AeEnable), std::forward_as_tuple(false, true)); data->controlInfo_ = std::move(ctrls); data->sensor_ = new CameraSensor(sensor); ret = data->sensor_->init(); if (ret) return ret; /* Initialize the camera properties. */ data->properties_ = data->sensor_->properties(); ret = data->loadIPA(); if (ret) return ret; std::set streams{ &data->mainPathStream_, &data->selfPathStream_, }; std::shared_ptr camera = Camera::create(this, data->sensor_->id(), streams); registerCamera(std::move(camera), std::move(data)); return 0; } bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator) { const MediaPad *pad; DeviceMatch dm("rkisp1"); dm.add("rkisp1_isp"); dm.add("rkisp1_resizer_selfpath"); dm.add("rkisp1_resizer_mainpath"); dm.add("rkisp1_selfpath"); dm.add("rkisp1_mainpath"); dm.add("rkisp1_stats"); dm.add("rkisp1_params"); media_ = acquireMediaDevice(enumerator, dm); if (!media_) return false; /* Create the V4L2 subdevices we will need. */ isp_ = V4L2Subdevice::fromEntityName(media_, "rkisp1_isp"); if (isp_->open() < 0) return false; /* Locate and open the stats and params video nodes. */ stat_ = V4L2VideoDevice::fromEntityName(media_, "rkisp1_stats"); if (stat_->open() < 0) return false; param_ = V4L2VideoDevice::fromEntityName(media_, "rkisp1_params"); if (param_->open() < 0) return false; /* Locate and open the ISP main and self paths. */ if (!mainPath_.init(media_)) return false; if (!selfPath_.init(media_)) return false; mainPath_.bufferReady().connect(this, &PipelineHandlerRkISP1::bufferReady); selfPath_.bufferReady().connect(this, &PipelineHandlerRkISP1::bufferReady); stat_->bufferReady.connect(this, &PipelineHandlerRkISP1::statReady); param_->bufferReady.connect(this, &PipelineHandlerRkISP1::paramReady); /* * Enumerate all sensors connected to the ISP and create one * camera instance for each of them. */ pad = isp_->entity()->getPadByIndex(0); if (!pad) return false; bool registered = false; for (MediaLink *link : pad->links()) { if (!createCamera(link->source()->entity())) registered = true; } return registered; } /* ----------------------------------------------------------------------------- * Buffer Handling */ void PipelineHandlerRkISP1::tryCompleteRequest(Request *request) { RkISP1CameraData *data = cameraData(activeCamera_); RkISP1FrameInfo *info = data->frameInfo_.find(request); if (!info) return; if (request->hasPendingBuffers()) return; if (!info->metadataProcessed) return; if (!info->paramDequeued) return; data->frameInfo_.destroy(info->frame); completeRequest(activeCamera_, request); } void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer) { ASSERT(activeCamera_); Request *request = buffer->request(); completeBuffer(activeCamera_, request, buffer); tryCompleteRequest(request); } void PipelineHandlerRkISP1::paramReady(FrameBuffer *buffer) { if (buffer->metadata().status == FrameMetadata::FrameCancelled) return; ASSERT(activeCamera_); RkISP1CameraData *data = cameraData(activeCamera_); RkISP1FrameInfo *info = data->frameInfo_.find(buffer); info->paramDequeued = true; tryCompleteRequest(info->request); } void PipelineHandlerRkISP1::statReady(FrameBuffer *buffer) { if (buffer->metadata().status == FrameMetadata::FrameCancelled) return; ASSERT(activeCamera_); RkISP1CameraData *data = cameraData(activeCamera_); RkISP1FrameInfo *info = data->frameInfo_.find(buffer); if (!info) return; data->timeline_.bufferReady(buffer); if (data->frame_ <= buffer->metadata().sequence) data->frame_ = buffer->metadata().sequence + 1; IPAOperationData op; op.operation = RKISP1_IPA_EVENT_SIGNAL_STAT_BUFFER; op.data = { info->frame, info->statBuffer->cookie() }; data->ipa_->processEvent(op); } REGISTER_PIPELINE_HANDLER(PipelineHandlerRkISP1) } /* namespace libcamera */