diff options
Diffstat (limited to 'Documentation')
32 files changed, 1031 insertions, 782 deletions
diff --git a/Documentation/Doxyfile.in b/Documentation/Doxyfile-common.in index a86ea6c1..a70aee43 100644 --- a/Documentation/Doxyfile.in +++ b/Documentation/Doxyfile-common.in @@ -20,32 +20,15 @@ TOC_INCLUDE_HEADINGS = 0 CASE_SENSE_NAMES = YES QUIET = YES - -INPUT = "@TOP_SRCDIR@/include/libcamera" \ - "@TOP_SRCDIR@/src/ipa/ipu3" \ - "@TOP_SRCDIR@/src/ipa/libipa" \ - "@TOP_SRCDIR@/src/libcamera" \ - "@TOP_BUILDDIR@/include/libcamera" \ - "@TOP_BUILDDIR@/src/libcamera" +WARN_AS_ERROR = @WARN_AS_ERROR@ FILE_PATTERNS = *.c \ *.cpp \ + *.dox \ *.h RECURSIVE = YES -EXCLUDE = @TOP_SRCDIR@/include/libcamera/base/span.h \ - @TOP_SRCDIR@/include/libcamera/internal/device_enumerator_sysfs.h \ - @TOP_SRCDIR@/include/libcamera/internal/device_enumerator_udev.h \ - @TOP_SRCDIR@/include/libcamera/internal/ipc_pipe_unixsocket.h \ - @TOP_SRCDIR@/src/libcamera/device_enumerator_sysfs.cpp \ - @TOP_SRCDIR@/src/libcamera/device_enumerator_udev.cpp \ - @TOP_SRCDIR@/src/libcamera/ipc_pipe_unixsocket.cpp \ - @TOP_SRCDIR@/src/libcamera/pipeline/ \ - @TOP_SRCDIR@/src/libcamera/tracepoints.cpp \ - @TOP_BUILDDIR@/include/libcamera/internal/tracepoints.h \ - @TOP_BUILDDIR@/src/libcamera/proxy/ - EXCLUDE_PATTERNS = @TOP_BUILDDIR@/include/libcamera/ipa/*_serializer.h \ @TOP_BUILDDIR@/include/libcamera/ipa/*_proxy.h \ @TOP_BUILDDIR@/include/libcamera/ipa/ipu3_*.h \ @@ -68,8 +51,6 @@ EXCLUDE_SYMBOLS = libcamera::BoundMethodArgs \ EXCLUDE_SYMLINKS = YES -HTML_OUTPUT = api-html - GENERATE_LATEX = NO MACRO_EXPANSION = YES diff --git a/Documentation/Doxyfile-internal.in b/Documentation/Doxyfile-internal.in new file mode 100644 index 00000000..cf982553 --- /dev/null +++ b/Documentation/Doxyfile-internal.in @@ -0,0 +1,31 @@ +# SPDX-License-Identifier: CC-BY-SA-4.0 + +@INCLUDE_PATH = @TOP_BUILDDIR@/Documentation +@INCLUDE = Doxyfile-common + +HIDE_UNDOC_CLASSES = NO +HIDE_UNDOC_MEMBERS = NO +HTML_OUTPUT = internal-api-html +INTERNAL_DOCS = YES +ENABLED_SECTIONS = internal + +INPUT = "@TOP_SRCDIR@/Documentation" \ + "@TOP_SRCDIR@/include/libcamera" \ + "@TOP_SRCDIR@/src/ipa/ipu3" \ + "@TOP_SRCDIR@/src/ipa/libipa" \ + "@TOP_SRCDIR@/src/libcamera" \ + "@TOP_BUILDDIR@/include/libcamera" \ + "@TOP_BUILDDIR@/src/libcamera" + +EXCLUDE = @TOP_SRCDIR@/include/libcamera/base/span.h \ + @TOP_SRCDIR@/include/libcamera/internal/device_enumerator_sysfs.h \ + @TOP_SRCDIR@/include/libcamera/internal/device_enumerator_udev.h \ + @TOP_SRCDIR@/include/libcamera/internal/ipc_pipe_unixsocket.h \ + @TOP_SRCDIR@/src/libcamera/device_enumerator_sysfs.cpp \ + @TOP_SRCDIR@/src/libcamera/device_enumerator_udev.cpp \ + @TOP_SRCDIR@/src/libcamera/ipc_pipe_unixsocket.cpp \ + @TOP_SRCDIR@/src/libcamera/pipeline/ \ + @TOP_SRCDIR@/src/libcamera/tracepoints.cpp \ + @TOP_BUILDDIR@/include/libcamera/internal/tracepoints.h \ + @TOP_BUILDDIR@/include/libcamera/ipa/soft_ipa_interface.h \ + @TOP_BUILDDIR@/src/libcamera/proxy/ diff --git a/Documentation/Doxyfile-public.in b/Documentation/Doxyfile-public.in new file mode 100644 index 00000000..36bb5758 --- /dev/null +++ b/Documentation/Doxyfile-public.in @@ -0,0 +1,20 @@ +# SPDX-License-Identifier: CC-BY-SA-4.0 + +@INCLUDE_PATH = @TOP_BUILDDIR@/Documentation +@INCLUDE = Doxyfile-common + +HIDE_UNDOC_CLASSES = YES +HIDE_UNDOC_MEMBERS = YES +HTML_OUTPUT = api-html +INTERNAL_DOCS = NO + +INPUT = "@TOP_SRCDIR@/Documentation" \ + ${inputs} + +EXCLUDE = @TOP_SRCDIR@/include/libcamera/base/class.h \ + @TOP_SRCDIR@/include/libcamera/base/object.h \ + @TOP_SRCDIR@/include/libcamera/base/span.h \ + @TOP_SRCDIR@/src/libcamera/base/class.cpp \ + @TOP_SRCDIR@/src/libcamera/base/object.cpp + +PREDEFINED += __DOXYGEN_PUBLIC__ diff --git a/Documentation/api-html/index.rst b/Documentation/api-html/index.rst index 9e630fc0..2f09833d 100644 --- a/Documentation/api-html/index.rst +++ b/Documentation/api-html/index.rst @@ -2,7 +2,7 @@ .. _api: -API -=== +API Reference +============= :: Placeholder for Doxygen documentation diff --git a/Documentation/camera-sensor-model.rst b/Documentation/camera-sensor-model.rst index b66c880a..87a25bf4 100644 --- a/Documentation/camera-sensor-model.rst +++ b/Documentation/camera-sensor-model.rst @@ -1,5 +1,7 @@ .. SPDX-License-Identifier: CC-BY-SA-4.0 +.. include:: documentation-contents.rst + .. _camera-sensor-model: .. todo: Move to Doxygen-generated documentation diff --git a/Documentation/code-of-conduct.rst b/Documentation/code-of-conduct.rst index 38b7d7ad..0edd1e99 100644 --- a/Documentation/code-of-conduct.rst +++ b/Documentation/code-of-conduct.rst @@ -1,5 +1,7 @@ .. SPDX-License-Identifier: CC-BY-4.0 +.. include:: documentation-contents.rst + .. _code-of-conduct: Contributor Covenant Code of Conduct diff --git a/Documentation/coding-style.rst b/Documentation/coding-style.rst index 053fdd99..6ac3a4a0 100644 --- a/Documentation/coding-style.rst +++ b/Documentation/coding-style.rst @@ -1,5 +1,7 @@ .. SPDX-License-Identifier: CC-BY-SA-4.0 +.. include:: documentation-contents.rst + .. _coding-style-guidelines: Coding Style Guidelines @@ -59,7 +61,7 @@ document: underscores in between * All formatting rules specified in the selected sections of the Linux kernel Code Style for indentation, braces, spacing, etc -* Header guards are formatted as '__LIBCAMERA_FILE_NAME_H__' +* Headers are guarded by the use of '#pragma once' Order of Includes ~~~~~~~~~~~~~~~~~ @@ -215,7 +217,7 @@ shall be avoided when possible, but are allowed when required (for instance to implement factories with auto-registration). They shall not depend on any other global variable, should run a minimal amount of code in the constructor and destructor, and code that contains dependencies should be moved to a later -point in time. +point in time. Error Handling ~~~~~~~~~~~~~~ diff --git a/Documentation/conf.py b/Documentation/conf.py index 7eeea7f3..089f114c 100644 --- a/Documentation/conf.py +++ b/Documentation/conf.py @@ -37,8 +37,11 @@ author = u'Kieran Bingham, Jacopo Mondi, Laurent Pinchart, Niklas Söderlund' # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ + 'sphinx.ext.graphviz' ] +graphviz_output_format = 'svg' + # Add any paths that contain templates here, relative to this directory. templates_path = [] @@ -61,7 +64,12 @@ language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = [ + '_build', + 'Thumbs.db', + '.DS_Store', + 'documentation-contents.rst', +] # The name of the Pygments (syntax highlighting) style to use. pygments_style = None diff --git a/Documentation/docs.rst b/Documentation/docs.rst deleted file mode 100644 index a6e8a59a..00000000 --- a/Documentation/docs.rst +++ /dev/null @@ -1,400 +0,0 @@ -.. SPDX-License-Identifier: CC-BY-SA-4.0 - -.. contents:: - :local: - -************* -Documentation -************* - -.. toctree:: - :hidden: - - API <api-html/index> - -API -=== - -The libcamera API is extensively documented using Doxygen. The :ref:`API -nightly build <api>` contains the most up-to-date API documentation, built from -the latest master branch. - -Feature Requirements -==================== - -Device enumeration ------------------- - -The library shall support enumerating all camera devices available in the -system, including both fixed cameras and hotpluggable cameras. It shall -support cameras plugged and unplugged after the initialization of the -library, and shall offer a mechanism to notify applications of camera plug -and unplug. - -The following types of cameras shall be supported: - -* Internal cameras designed for point-and-shoot still image and video - capture usage, either controlled directly by the CPU, or exposed through - an internal USB bus as a UVC device. - -* External UVC cameras designed for video conferencing usage. - -Other types of camera, including analog cameras, depth cameras, thermal -cameras, external digital picture or movie cameras, are out of scope for -this project. - -A hardware device that includes independent camera sensors, such as front -and back sensors in a phone, shall be considered as multiple camera devices -for the purpose of this library. - -Independent Camera Devices --------------------------- - -When multiple cameras are present in the system and are able to operate -independently from each other, the library shall expose them as multiple -camera devices and support parallel operation without any additional usage -restriction apart from the limitations inherent to the hardware (such as -memory bandwidth, CPU usage or number of CSI-2 receivers for instance). - -Independent processes shall be able to use independent cameras devices -without interfering with each other. A single camera device shall be -usable by a single process at a time. - -Multiple streams support ------------------------- - -The library shall support multiple video streams running in parallel -for each camera device, within the limits imposed by the system. - -Per frame controls ------------------- - -The library shall support controlling capture parameters for each stream -on a per-frame basis, on a best effort basis based on the capabilities of the -hardware and underlying software stack (including kernel drivers and -firmware). It shall apply capture parameters to the frame they target, and -report the value of the parameters that have effectively been used for each -captured frame. - -When a camera device supports multiple streams, the library shall allow both -control of each stream independently, and control of multiple streams -together. Streams that are controlled together shall be synchronized. No -synchronization is required for streams controlled independently. - -Capability Enumeration ----------------------- - -The library shall expose capabilities of each camera device in a way that -allows applications to discover those capabilities dynamically. Applications -shall be allowed to cache capabilities for as long as they are using the -library. If capabilities can change at runtime, the library shall offer a -mechanism to notify applications of such changes. Applications shall not -cache capabilities in long term storage between runs. - -Capabilities shall be discovered dynamically at runtime from the device when -possible, and may come, in part or in full, from platform configuration -data. - -Device Profiles ---------------- - -The library may define different camera device profiles, each with a minimum -set of required capabilities. Applications may use those profiles to quickly -determine the level of features exposed by a device without parsing the full -list of capabilities. Camera devices may implement additional capabilities -on top of the minimum required set for the profile they expose. - -3A and Image Enhancement Algorithms ------------------------------------ - -The camera devices shall implement auto exposure, auto gain and auto white -balance. Camera devices that include a focus lens shall implement auto -focus. Additional image enhancement algorithms, such as noise reduction or -video stabilization, may be implemented. - -All algorithms may be implemented in hardware or firmware outside of the -library, or in software in the library. They shall all be controllable by -applications. - -The library shall be architectured to isolate the 3A and image enhancement -algorithms in a component with a documented API, respectively called the 3A -component and the 3A API. The 3A API shall be stable, and shall allow both -open-source and closed-source implementations of the 3A component. - -The library may include statically-linked open-source 3A components, and -shall support dynamically-linked open-source and closed-source 3A -components. - -Closed-source 3A Component Sandboxing -------------------------------------- - -For security purposes, it may be desired to run closed-source 3A components -in a separate process. The 3A API would in such a case be transported over -IPC. The 3A API shall make it possible to use any IPC mechanism that -supports passing file descriptors. - -The library may implement an IPC mechanism, and shall support third-party -platform-specific IPC mechanisms through the implementation of a -platform-specific 3A API wrapper. No modification to the library shall be -needed to use such third-party IPC mechanisms. - -The 3A component shall not directly access any device node on the system. -Such accesses shall instead be performed through the 3A API. The library -shall validate all accesses and restrict them to what is absolutely required -by 3A components. - -V4L2 Compatibility Layer ------------------------- - -The project shall support traditional V4L2 application through an additional -libcamera wrapper library. The wrapper library shall trap all accesses to -camera devices through `LD_PRELOAD`, and route them through libcamera to -emulate a high-level V4L2 camera device. It shall expose camera device -features on a best-effort basis, and aim for the level of features -traditionally available from a UVC camera designed for video conferencing. - -Android Camera HAL v3 Compatibility ------------------------------------ - -The library API shall expose all the features required to implement an -Android Camera HAL v3 on top of libcamera. Some features of the HAL may be -omitted as long as they can be implemented separately in the HAL, such as -JPEG encoding, or YUV reprocessing. - - -Camera Stack -============ - -:: - - a c / +-------------+ +-------------+ +-------------+ +-------------+ - p a | | Native | | Framework | | Native | | Android | - p t | | V4L2 | | Application | | libcamera | | Camera | - l i | | Application | | (gstreamer) | | Application | | Framework | - i o \ +-------------+ +-------------+ +-------------+ +-------------+ - n ^ ^ ^ ^ - | | | | - l a | | | | - i d v v | v - b a / +-------------+ +-------------+ | +-------------+ - c p | | V4L2 | | Camera | | | Android | - a t | | Compat. | | Framework | | | Camera | - m a | | | | (gstreamer) | | | HAL | - e t \ +-------------+ +-------------+ | +-------------+ - r i ^ ^ | ^ - a o | | | | - n | | | | - / | ,................................................ - | | ! : Language : ! - l f | | ! : Bindings : ! - i r | | ! : (optional) : ! - b a | | \...............................................' - c m | | | | | - a e | | | | | - m w | v v v v - e o | +----------------------------------------------------------------+ - r r | | | - a k | | libcamera | - | | | - \ +----------------------------------------------------------------+ - ^ ^ ^ - Userspace | | | - ------------------------ | ---------------- | ---------------- | --------------- - Kernel | | | - v v v - +-----------+ +-----------+ +-----------+ - | Media | <--> | Video | <--> | V4L2 | - | Device | | Device | | Subdev | - +-----------+ +-----------+ +-----------+ - -The camera stack comprises four software layers. From bottom to top: - -* The kernel drivers control the camera hardware and expose a - low-level interface to userspace through the Linux kernel V4L2 - family of APIs (Media Controller API, V4L2 Video Device API and - V4L2 Subdev API). - -* The libcamera framework is the core part of the stack. It - handles all control of the camera devices in its core component, - libcamera, and exposes a native C++ API to upper layers. Optional - language bindings allow interfacing to libcamera from other - programming languages. - - Those components live in the same source code repository and - all together constitute the libcamera framework. - -* The libcamera adaptation is an umbrella term designating the - components that interface to libcamera in other frameworks. - Notable examples are a V4L2 compatibility layer, a gstreamer - libcamera element, and an Android camera HAL implementation based - on libcamera. - - Those components can live in the libcamera project source code - in separate repositories, or move to their respective project's - repository (for instance the gstreamer libcamera element). - -* The applications and upper level frameworks are based on the - libcamera framework or libcamera adaptation, and are outside of - the scope of the libcamera project. - - -libcamera Architecture -====================== - -:: - - ---------------------------< libcamera Public API >--------------------------- - ^ ^ - | | - v v - +-------------+ +-------------------------------------------------+ - | Camera | | Camera Device | - | Devices | | +---------------------------------------------+ | - | Manager | | | Device-Agnostic | | - +-------------+ | | | | - ^ | | +------------------------+ | - | | | | ~~~~~~~~~~~~~~~~~~~~~ | - | | | | { +---------------+ } | - | | | | } | ////Image//// | { | - | | | | <-> | /Processing// | } | - | | | | } | /Algorithms// | { | - | | | | { +---------------+ } | - | | | | ~~~~~~~~~~~~~~~~~~~~~ | - | | | | ======================== | - | | | | +---------------+ | - | | | | | //Pipeline/// | | - | | | | <-> | ///Handler/// | | - | | | | | ///////////// | | - | | +--------------------+ +---------------+ | - | | Device-Specific | - | +-------------------------------------------------+ - | ^ ^ - | | | - v v v - +--------------------------------------------------------------------+ - | Helpers and Support Classes | - | +-------------+ +-------------+ +-------------+ +-------------+ | - | | MC & V4L2 | | Buffers | | Sandboxing | | Plugins | | - | | Support | | Allocator | | IPC | | Manager | | - | +-------------+ +-------------+ +-------------+ +-------------+ | - | +-------------+ +-------------+ | - | | Pipeline | | ... | | - | | Runner | | | | - | +-------------+ +-------------+ | - +--------------------------------------------------------------------+ - - /// Device-Specific Components - ~~~ Sandboxing - -While offering a unified API towards upper layers, and presenting -itself as a single library, libcamera isn't monolithic. It exposes -multiple components through its public API, is built around a set of -separate helpers internally, uses device-specific components and can -load dynamic plugins. - -Camera Devices Manager - The Camera Devices Manager provides a view of available cameras - in the system. It performs cold enumeration and runtime camera - management, and supports a hotplug notification mechanism in its - public API. - - To avoid the cost associated with cold enumeration of all devices - at application start, and to arbitrate concurrent access to camera - devices, the Camera Devices Manager could later be split to a - separate service, possibly with integration in platform-specific - device management. - -Camera Device - The Camera Device represents a camera device to upper layers. It - exposes full control of the device through the public API, and is - thus the highest level object exposed by libcamera. - - Camera Device instances are created by the Camera Devices - Manager. An optional function to create new instances could be exposed - through the public API to speed up initialization when the upper - layer knows how to directly address camera devices present in the - system. - -Pipeline Handler - The Pipeline Handler manages complex pipelines exposed by the kernel drivers - through the Media Controller and V4L2 APIs. It abstracts pipeline handling to - hide device-specific details to the rest of the library, and implements both - pipeline configuration based on stream configuration, and pipeline runtime - execution and scheduling when needed by the device. - - This component is device-specific and is part of the libcamera code base. As - such it is covered by the same free software license as the rest of libcamera - and needs to be contributed upstream by device vendors. The Pipeline Handler - lives in the same process as the rest of the library, and has access to all - helpers and kernel camera-related devices. - -Image Processing Algorithms - Together with the hardware image processing and hardware statistics - collection, the Image Processing Algorithms implement 3A (Auto-Exposure, - Auto-White Balance and Auto-Focus) and other algorithms. They run on the CPU - and interact with the kernel camera devices to control hardware image - processing based on the parameters supplied by upper layers, closing the - control loop of the ISP. - - This component is device-specific and is loaded as an external plugin. It can - be part of the libcamera code base, in which case it is covered by the same - license, or provided externally as an open-source or closed-source component. - - The component is sandboxed and can only interact with libcamera through - internal APIs specifically marked as such. In particular it will have no - direct access to kernel camera devices, and all its accesses to image and - metadata will be mediated by dmabuf instances explicitly passed to the - component. The component must be prepared to run in a process separate from - the main libcamera process, and to have a very restricted view of the system, - including no access to networking APIs and limited access to file systems. - - The sandboxing mechanism isn't defined by libcamera. One example - implementation will be provided as part of the project, and platforms vendors - will be able to provide their own sandboxing mechanism as a plugin. - - libcamera should provide a basic implementation of Image Processing - Algorithms, to serve as a reference for the internal API. Device vendors are - expected to provide a full-fledged implementation compatible with their - Pipeline Handler. One goal of the libcamera project is to create an - environment in which the community will be able to compete with the - closed-source vendor binaries and develop a high quality open source - implementation. - -Helpers and Support Classes - While Pipeline Handlers are device-specific, implementations are expected to - share code due to usage of identical APIs towards the kernel camera drivers - and the Image Processing Algorithms. This includes without limitation handling - of the MC and V4L2 APIs, buffer management through dmabuf, and pipeline - discovery, configuration and scheduling. Such code will be factored out to - helpers when applicable. - - Other parts of libcamera will also benefit from factoring code out to - self-contained support classes, even if such code is present only once in the - code base, in order to keep the source code clean and easy to read. This - should be the case for instance for plugin management. - - -V4L2 Compatibility Layer ------------------------- - -V4L2 compatibility is achieved through a shared library that traps all -accesses to camera devices and routes them to libcamera to emulate high-level -V4L2 camera devices. It is injected in a process address space through -`LD_PRELOAD` and is completely transparent for applications. - -The compatibility layer exposes camera device features on a best-effort basis, -and aims for the level of features traditionally available from a UVC camera -designed for video conferencing. - - -Android Camera HAL ------------------- - -Camera support for Android is achieved through a generic Android -camera HAL implementation on top of libcamera. The HAL will implement internally -features required by Android and missing from libcamera, such as JPEG encoding -support. - -The Android camera HAL implementation will initially target the -LIMITED hardware level, with support for the FULL level then being gradually -implemented. diff --git a/Documentation/documentation-contents.rst b/Documentation/documentation-contents.rst new file mode 100644 index 00000000..5c111849 --- /dev/null +++ b/Documentation/documentation-contents.rst @@ -0,0 +1,35 @@ +.. SPDX-License-Identifier: CC-BY-SA-4.0 + +.. container:: documentation-nav + + * **Documentation for Users** + * :doc:`Introduction </introduction>` + * :doc:`/feature_requirements` + * :doc:`/guides/application-developer` + * :doc:`/python-bindings` + * :doc:`/environment_variables` + * :doc:`/api-html/index` + * :doc:`/code-of-conduct` + * | + * **Documentation for Developers** + * :doc:`/libcamera_architecture` + * :doc:`/guides/pipeline-handler` + * :doc:`/guides/ipa` + * :doc:`/camera-sensor-model` + * :doc:`/guides/tracing` + * :doc:`/software-isp-benchmarking` + * :doc:`/coding-style` + * :doc:`/internal-api-html/index` + * | + * **Documentation for System Integrators** + * :doc:`/lens_driver_requirements` + * :doc:`/sensor_driver_requirements` + +.. + The following directive adds the "documentation" class to all of the pages + generated by sphinx. This is not relevant in libcamera nor addressed in the + theme's CSS, since all of the pages here are documentation. It **is** used + to properly format the documentation pages on libcamera.org and so should not + be removed. + +.. rst-class:: documentation diff --git a/Documentation/environment_variables.rst b/Documentation/environment_variables.rst index a9b230bc..7da9883a 100644 --- a/Documentation/environment_variables.rst +++ b/Documentation/environment_variables.rst @@ -1,5 +1,7 @@ .. SPDX-License-Identifier: CC-BY-SA-4.0 +.. include:: documentation-contents.rst + Environment variables ===================== @@ -37,11 +39,29 @@ LIBCAMERA_IPA_MODULE_PATH Example value: ``${HOME}/.libcamera/lib:/opt/libcamera/vendor/lib`` +LIBCAMERA_IPA_PROXY_PATH + Define custom full path for a proxy worker for a given executable name. + + Example value: ``${HOME}/.libcamera/proxy/worker:/opt/libcamera/vendor/proxy/worker`` + +LIBCAMERA_PIPELINES_MATCH_LIST + Define an ordered list of pipeline names to be used to match the media + devices in the system. The pipeline handler names used to populate the + variable are the ones passed to the REGISTER_PIPELINE_HANDLER() macro in the + source code. + + Example value: ``rkisp1,simple`` + LIBCAMERA_RPI_CONFIG_FILE Define a custom configuration file to use in the Raspberry Pi pipeline handler. Example value: ``/usr/local/share/libcamera/pipeline/rpi/vc4/minimal_mem.yaml`` +LIBCAMERA_RPI_TUNING_FILE + Define a custom JSON tuning file to use in the Raspberry Pi. + + Example value: ``/usr/local/share/libcamera/ipa/rpi/vc4/custom_sensor.json`` + Further details --------------- diff --git a/Documentation/feature_requirements.rst b/Documentation/feature_requirements.rst new file mode 100644 index 00000000..e6b74a62 --- /dev/null +++ b/Documentation/feature_requirements.rst @@ -0,0 +1,150 @@ +.. SPDX-License-Identifier: CC-BY-SA-4.0 + +.. include:: documentation-contents.rst + +Feature Requirements +==================== + +Device enumeration +------------------ + +The library shall support enumerating all camera devices available in the +system, including both fixed cameras and hotpluggable cameras. It shall +support cameras plugged and unplugged after the initialization of the +library, and shall offer a mechanism to notify applications of camera plug +and unplug. + +The following types of cameras shall be supported: + +* Internal cameras designed for point-and-shoot still image and video + capture usage, either controlled directly by the CPU, or exposed through + an internal USB bus as a UVC device. + +* External UVC cameras designed for video conferencing usage. + +Other types of camera, including analog cameras, depth cameras, thermal +cameras, external digital picture or movie cameras, are out of scope for +this project. + +A hardware device that includes independent camera sensors, such as front +and back sensors in a phone, shall be considered as multiple camera devices +for the purpose of this library. + +Independent Camera Devices +-------------------------- + +When multiple cameras are present in the system and are able to operate +independently from each other, the library shall expose them as multiple +camera devices and support parallel operation without any additional usage +restriction apart from the limitations inherent to the hardware (such as +memory bandwidth, CPU usage or number of CSI-2 receivers for instance). + +Independent processes shall be able to use independent cameras devices +without interfering with each other. A single camera device shall be +usable by a single process at a time. + +Multiple streams support +------------------------ + +The library shall support multiple video streams running in parallel +for each camera device, within the limits imposed by the system. + +Per frame controls +------------------ + +The library shall support controlling capture parameters for each stream +on a per-frame basis, on a best effort basis based on the capabilities of the +hardware and underlying software stack (including kernel drivers and +firmware). It shall apply capture parameters to the frame they target, and +report the value of the parameters that have effectively been used for each +captured frame. + +When a camera device supports multiple streams, the library shall allow both +control of each stream independently, and control of multiple streams +together. Streams that are controlled together shall be synchronized. No +synchronization is required for streams controlled independently. + +Capability Enumeration +---------------------- + +The library shall expose capabilities of each camera device in a way that +allows applications to discover those capabilities dynamically. Applications +shall be allowed to cache capabilities for as long as they are using the +library. If capabilities can change at runtime, the library shall offer a +mechanism to notify applications of such changes. Applications shall not +cache capabilities in long term storage between runs. + +Capabilities shall be discovered dynamically at runtime from the device when +possible, and may come, in part or in full, from platform configuration +data. + +Device Profiles +--------------- + +The library may define different camera device profiles, each with a minimum +set of required capabilities. Applications may use those profiles to quickly +determine the level of features exposed by a device without parsing the full +list of capabilities. Camera devices may implement additional capabilities +on top of the minimum required set for the profile they expose. + +3A and Image Enhancement Algorithms +----------------------------------- + +The library shall provide a basic implementation of Image Processing Algorithms +to serve as a reference for the internal API. This shall including auto exposure +and gain and auto white balance. Camera devices that include a focus lens shall +implement auto focus. Additional image enhancement algorithms, such as noise +reduction or video stabilization, may be implemented. Device vendors are +expected to provide a fully-fledged implementation compatible with their +Pipeline Handler. One goal of the libcamera project is to create an environment +in which the community will be able to compete with the closed-source vendor +biaries and develop a high quality open source implementation. + +All algorithms may be implemented in hardware or firmware outside of the +library, or in software in the library. They shall all be controllable by +applications. + +The library shall be architectured to isolate the 3A and image enhancement +algorithms in a component with a documented API, respectively called the 3A +component and the 3A API. The 3A API shall be stable, and shall allow both +open-source and closed-source implementations of the 3A component. + +The library may include statically-linked open-source 3A components, and +shall support dynamically-linked open-source and closed-source 3A +components. + +Closed-source 3A Component Sandboxing +------------------------------------- + +For security purposes, it may be desired to run closed-source 3A components +in a separate process. The 3A API would in such a case be transported over +IPC. The 3A API shall make it possible to use any IPC mechanism that +supports passing file descriptors. + +The library may implement an IPC mechanism, and shall support third-party +platform-specific IPC mechanisms through the implementation of a +platform-specific 3A API wrapper. No modification to the library shall be +needed to use such third-party IPC mechanisms. + +The 3A component shall not directly access any device node on the system. +Such accesses shall instead be performed through the 3A API. The library +shall validate all accesses and restrict them to what is absolutely required +by 3A components. + +V4L2 Compatibility Layer +------------------------ + +The project shall support traditional V4L2 application through an additional +libcamera wrapper library. The wrapper library shall trap all accesses to +camera devices through `LD_PRELOAD`, and route them through libcamera to +emulate a high-level V4L2 camera device. It shall expose camera device +features on a best-effort basis, and aim for the level of features +traditionally available from a UVC camera designed for video conferencing. + +Android Camera HAL v3 Compatibility +----------------------------------- + +The library API shall expose all the features required to implement an +Android Camera HAL v3 on top of libcamera. Some features of the HAL may be +omitted as long as they can be implemented separately in the HAL, such as +JPEG encoding, or YUV reprocessing. diff --git a/Documentation/gen-doxyfile.py b/Documentation/gen-doxyfile.py new file mode 100755 index 00000000..c265bc2f --- /dev/null +++ b/Documentation/gen-doxyfile.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: GPL-2.0-or-later +# Copyright (C) 2024, Google Inc. +# +# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com> +# +# Generate Doxyfile from a template + +import argparse +import os +import string +import sys + + +def fill_template(template, data): + + template = open(template, 'rb').read() + template = template.decode('utf-8') + template = string.Template(template) + + return template.substitute(data) + + +def main(argv): + + parser = argparse.ArgumentParser() + parser.add_argument('-o', dest='output', metavar='file', + type=argparse.FileType('w', encoding='utf-8'), + default=sys.stdout, + help='Output file name (default: standard output)') + parser.add_argument('template', metavar='doxyfile.tmpl', type=str, + help='Doxyfile template') + parser.add_argument('inputs', type=str, nargs='*', + help='Input files') + + args = parser.parse_args(argv[1:]) + + inputs = [f'"{os.path.realpath(input)}"' for input in args.inputs] + data = fill_template(args.template, {'inputs': (' \\\n' + ' ' * 25).join(inputs)}) + args.output.write(data) + + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/Documentation/getting-started.rst b/Documentation/getting-started.rst index 987f43f7..63b050eb 100644 --- a/Documentation/getting-started.rst +++ b/Documentation/getting-started.rst @@ -1,4 +1,5 @@ .. SPDX-License-Identifier: CC-BY-SA-4.0 + .. Getting started information is defined in the project README file. .. include:: ../README.rst :start-after: .. section-begin-getting-started diff --git a/Documentation/guides/application-developer.rst b/Documentation/guides/application-developer.rst index 9a9905b1..25beb55d 100644 --- a/Documentation/guides/application-developer.rst +++ b/Documentation/guides/application-developer.rst @@ -1,5 +1,7 @@ .. SPDX-License-Identifier: CC-BY-SA-4.0 +.. include:: ../documentation-contents.rst + Using libcamera in a C++ application ==================================== @@ -116,19 +118,21 @@ available. .. code:: cpp - if (cm->cameras().empty()) { + auto cameras = cm->cameras(); + if (cameras.empty()) { std::cout << "No cameras were identified on the system." << std::endl; cm->stop(); return EXIT_FAILURE; } - std::string cameraId = cm->cameras()[0]->id(); - camera = cm->get(cameraId); + std::string cameraId = cameras[0]->id(); + auto camera = cm->get(cameraId); /* - * Note that is equivalent to: - * camera = cm->cameras()[0]; + * Note that `camera` may not compare equal to `cameras[0]`. + * In fact, it might simply be a `nullptr`, as the particular + * device might have disappeared (and reappeared) in the meantime. */ Once a camera has been selected an application needs to acquire an exclusive @@ -479,7 +483,7 @@ instance. An example of how to write image data to disk is available in the `FileSink class`_ which is a part of the ``cam`` utility application in the libcamera repository. -.. _FileSink class: https://git.libcamera.org/libcamera/libcamera.git/tree/src/cam/file_sink.cpp +.. _FileSink class: https://git.libcamera.org/libcamera/libcamera.git/tree/src/apps/cam/file_sink.cpp With the handling of this request completed, it is possible to re-use the request and the associated buffers and re-queue it to the camera diff --git a/Documentation/guides/introduction.rst b/Documentation/guides/introduction.rst deleted file mode 100644 index 700ec2d3..00000000 --- a/Documentation/guides/introduction.rst +++ /dev/null @@ -1,319 +0,0 @@ -.. SPDX-License-Identifier: CC-BY-SA-4.0 - -Developers guide to libcamera -============================= - -The Linux kernel handles multimedia devices through the 'Linux media' subsystem -and provides a set of APIs (application programming interfaces) known -collectively as V4L2 (`Video for Linux 2`_) and the `Media Controller`_ API -which provide an interface to interact and control media devices. - -Included in this subsystem are drivers for camera sensors, CSI2 (Camera -Serial Interface) receivers, and ISPs (Image Signal Processors) - -The usage of these drivers to provide a functioning camera stack is a -responsibility that lies in userspace which is commonly implemented separately -by vendors without a common architecture or API for application developers. - -libcamera provides a complete camera stack for Linux based systems to abstract -functionality desired by camera application developers and process the -configuration of hardware and image control algorithms required to obtain -desirable results from the camera. - -.. _Video for Linux 2: https://www.linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/v4l/v4l2.html -.. _Media Controller: https://www.linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/mediactl/media-controller.html - - -In this developers guide, we will explore the `Camera Stack`_ and how it is -can be visualised at a high level, and explore the internal `Architecture`_ of -the libcamera library with its components. The current `Platform Support`_ is -detailed, as well as an overview of the `Licensing`_ requirements of the -project. - -This introduction is followed by a walkthrough tutorial to newcomers wishing to -support a new platform with the `Pipeline Handler Writers Guide`_ and for those -looking to make use of the libcamera native API an `Application Writers Guide`_ -provides a tutorial of the key APIs exposed by libcamera. - -.. _Pipeline Handler Writers Guide: pipeline-handler.html -.. _Application Writers Guide: application-developer.html - -.. TODO: Correctly link to the other articles of the guide - -Camera Stack ------------- - -The libcamera library is implemented in userspace, and makes use of underlying -kernel drivers that directly interact with hardware. - -Applications can make use of libcamera through the native `libcamera API`_'s or -through an adaptation layer integrating libcamera into a larger framework. - -.. _libcamera API: https://www.libcamera.org/api-html/index.html - -:: - - Application Layer - / +--------------+ +--------------+ +--------------+ +--------------+ - | | Native | | Framework | | Native | | Android | - | | V4L2 | | Application | | libcamera | | Camera | - | | Application | | (gstreamer) | | Application | | Framework | - \ +--------------+ +--------------+ +--------------+ +--------------+ - - ^ ^ ^ ^ - | | | | - | | | | - v v | v - Adaptation Layer | - / +--------------+ +--------------+ | +--------------+ - | | V4L2 | | gstreamer | | | Android | - | | Compatibility| | element | | | Camera | - | | (preload) | |(libcamerasrc)| | | HAL | - \ +--------------+ +--------------+ | +--------------+ - | - ^ ^ | ^ - | | | | - | | | | - v v v v - libcamera Framework - / +--------------------------------------------------------------------+ - | | | - | | libcamera | - | | | - \ +--------------------------------------------------------------------+ - - ^ ^ ^ - Userspace | | | - --------------------- | ---------------- | ---------------- | --------------- - Kernel | | | - v v v - - +-----------+ +-----------+ +-----------+ - | Media | <--> | Video | <--> | V4L2 | - | Device | | Device | | Subdev | - +-----------+ +-----------+ +-----------+ - -The camera stack comprises of four software layers. From bottom to top: - -* The kernel drivers control the camera hardware and expose a low-level - interface to userspace through the Linux kernel V4L2 family of APIs - (Media Controller API, V4L2 Video Device API and V4L2 Subdev API). - -* The libcamera framework is the core part of the stack. It handles all control - of the camera devices in its core component, libcamera, and exposes a native - C++ API to upper layers. - -* The libcamera adaptation layer is an umbrella term designating the components - that interface to libcamera in other frameworks. Notable examples are the V4L2 - compatibility layer, the gstreamer libcamera element, and the Android camera - HAL implementation based on libcamera which are provided as a part of the - libcamera project. - -* The applications and upper level frameworks are based on the libcamera - framework or libcamera adaptation, and are outside of the scope of the - libcamera project, however example native applications (cam, qcam) are - provided for testing. - - -V4L2 Compatibility Layer - V4L2 compatibility is achieved through a shared library that traps all - accesses to camera devices and routes them to libcamera to emulate high-level - V4L2 camera devices. It is injected in a process address space through - ``LD_PRELOAD`` and is completely transparent for applications. - - The compatibility layer exposes camera device features on a best-effort basis, - and aims for the level of features traditionally available from a UVC camera - designed for video conferencing. - -Android Camera HAL - Camera support for Android is achieved through a generic Android camera HAL - implementation on top of libcamera. The HAL implements features required by - Android and out of scope from libcamera, such as JPEG encoding support. - - This component is used to provide support for ChromeOS platforms - -GStreamer element (gstlibcamerasrc) - A `GStreamer element`_ is provided to allow capture from libcamera supported - devices through GStreamer pipelines, and connect to other elements for further - processing. - - Development of this element is ongoing and is limited to a single stream. - -Native libcamera API - Applications can make use of the libcamera API directly using the C++ - API. An example application and walkthrough using the libcamera API can be - followed in the `Application Writers Guide`_ - -.. _GStreamer element: https://gstreamer.freedesktop.org/documentation/application-development/basics/elements.html - -Architecture ------------- - -While offering a unified API towards upper layers, and presenting itself as a -single library, libcamera isn't monolithic. It exposes multiple components -through its public API and is built around a set of separate helpers internally. -Hardware abstractions are handled through the use of device-specific components -where required and dynamically loadable plugins are used to separate image -processing algorithms from the core libcamera codebase. - -:: - - --------------------------< libcamera Public API >--------------------------- - ^ ^ - | | - v v - +-------------+ +---------------------------------------------------+ - | Camera | | Camera Device | - | Manager | | +-----------------------------------------------+ | - +-------------+ | | Device-Agnostic | | - ^ | | | | - | | | +--------------------------+ | - | | | | ~~~~~~~~~~~~~~~~~~~~~~~ | - | | | | { +-----------------+ } | - | | | | } | //// Image //// | { | - | | | | <-> | / Processing // | } | - | | | | } | / Algorithms // | { | - | | | | { +-----------------+ } | - | | | | ~~~~~~~~~~~~~~~~~~~~~~~ | - | | | | ========================== | - | | | | +-----------------+ | - | | | | | // Pipeline /// | | - | | | | <-> | /// Handler /// | | - | | | | | /////////////// | | - | | +--------------------+ +-----------------+ | - | | Device-Specific | - | +---------------------------------------------------+ - | ^ ^ - | | | - v v v - +--------------------------------------------------------------------+ - | Helpers and Support Classes | - | +-------------+ +-------------+ +-------------+ +-------------+ | - | | MC & V4L2 | | Buffers | | Sandboxing | | Plugins | | - | | Support | | Allocator | | IPC | | Manager | | - | +-------------+ +-------------+ +-------------+ +-------------+ | - | +-------------+ +-------------+ | - | | Pipeline | | ... | | - | | Runner | | | | - | +-------------+ +-------------+ | - +--------------------------------------------------------------------+ - - /// Device-Specific Components - ~~~ Sandboxing - - -Camera Manager - The Camera Manager enumerates cameras and instantiates Pipeline Handlers to - manage each Camera that libcamera supports. The Camera Manager supports - hotplug detection and notification events when supported by the underlying - kernel devices. - - There is only ever one instance of the Camera Manager running per application. - Each application's instance of the Camera Manager ensures that only a single - application can take control of a camera device at once. - - Read the `Camera Manager API`_ documentation for more details. - -.. _Camera Manager API: https://libcamera.org/api-html/classlibcamera_1_1CameraManager.html - -Camera Device - The Camera class represents a single item of camera hardware that is capable - of producing one or more image streams, and provides the API to interact with - the underlying device. - - If a system has multiple instances of the same hardware attached, each has its - own instance of the camera class. - - The API exposes full control of the device to upper layers of libcamera through - the public API, making it the highest level object libcamera exposes, and the - object that all other API operations interact with from configuration to - capture. - - Read the `Camera API`_ documentation for more details. - -.. _Camera API: https://libcamera.org/api-html/classlibcamera_1_1Camera.html - -Pipeline Handler - The Pipeline Handler manages the complex pipelines exposed by the kernel - drivers through the Media Controller and V4L2 APIs. It abstracts pipeline - handling to hide device-specific details from the rest of the library, and - implements both pipeline configuration based on stream configuration, and - pipeline runtime execution and scheduling when needed by the device. - - The Pipeline Handler lives in the same process as the rest of the library, and - has access to all helpers and kernel camera-related devices. - - Hardware abstraction is handled by device specific Pipeline Handlers which are - derived from the Pipeline Handler base class allowing commonality to be shared - among the implementations. - - Derived pipeline handlers create Camera device instances based on the devices - they detect and support on the running system, and are responsible for - managing the interactions with a camera device. - - More details can be found in the `PipelineHandler API`_ documentation, and the - `Pipeline Handler Writers Guide`_. - -.. _PipelineHandler API: https://libcamera.org/api-html/classlibcamera_1_1PipelineHandler.html - -Image Processing Algorithms - An image processing algorithm (IPA) component is a loadable plugin that - implements 3A (Auto-Exposure, Auto-White Balance, and Auto-Focus) and other - algorithms. - - The algorithms run on the CPU and interact with the camera devices through the - Pipeline Handler to control hardware image processing based on the parameters - supplied by upper layers, maintaining state and closing the control loop - of the ISP. - - The component is sandboxed and can only interact with libcamera through the - API provided by the Pipeline Handler and an IPA has no direct access to kernel - camera devices. - - Open source IPA modules built with libcamera can be run in the same process - space as libcamera, however external IPA modules are run in a separate process - from the main libcamera process. IPA modules have a restricted view of the - system, including no access to networking APIs and limited access to file - systems. - - IPA modules are only required for platforms and devices with an ISP controlled - by the host CPU. Camera sensors which have an integrated ISP are not - controlled through the IPA module. - -Platform Support ----------------- - -The library currently supports the following hardware platforms specifically -with dedicated pipeline handlers: - - - Intel IPU3 (ipu3) - - Rockchip RK3399 (rkisp1) - - RaspberryPi 3 and 4 (rpi/vc4) - -Furthermore, generic platform support is provided for the following: - - - USB video device class cameras (uvcvideo) - - iMX7, Allwinner Sun6i (simple) - - Virtual media controller driver for test use cases (vimc) - -Licensing ---------- - -The libcamera core, is covered by the `LGPL-2.1-or-later`_ license. Pipeline -Handlers are a part of the libcamera code base and need to be contributed -upstream by device vendors. IPA modules included in libcamera are covered by a -free software license, however third-parties may develop IPA modules outside of -libcamera and distribute them under a closed-source license, provided they do -not include source code from the libcamera project. - -The libcamera project itself contains multiple libraries, applications and -utilities. Licenses are expressed through SPDX tags in text-based files that -support comments, and through the .reuse/dep5 file otherwise. A copy of all -licenses are stored in the LICENSES directory, and a full summary of the -licensing used throughout the project can be found in the COPYING.rst document. - -Applications which link dynamically against libcamera and use only the public -API are an independent work of the authors and have no license restrictions -imposed upon them from libcamera. - -.. _LGPL-2.1-or-later: https://spdx.org/licenses/LGPL-2.1-or-later.html diff --git a/Documentation/guides/ipa.rst b/Documentation/guides/ipa.rst index 25deadef..cd640563 100644 --- a/Documentation/guides/ipa.rst +++ b/Documentation/guides/ipa.rst @@ -1,5 +1,7 @@ .. SPDX-License-Identifier: CC-BY-SA-4.0 +.. include:: ../documentation-contents.rst + IPA Writer's Guide ================== diff --git a/Documentation/guides/pipeline-handler.rst b/Documentation/guides/pipeline-handler.rst index 728e9676..69e832a5 100644 --- a/Documentation/guides/pipeline-handler.rst +++ b/Documentation/guides/pipeline-handler.rst @@ -1,5 +1,7 @@ .. SPDX-License-Identifier: CC-BY-SA-4.0 +.. include:: ../documentation-contents.rst + Pipeline Handler Writers Guide ============================== @@ -151,13 +153,14 @@ integrates with the libcamera build system, and a *vivid.cpp* file that matches the name of the pipeline. In the *meson.build* file, add the *vivid.cpp* file as a build source for -libcamera by adding it to the global meson ``libcamera_sources`` variable: +libcamera by adding it to the global meson ``libcamera_internal_sources`` +variable: .. code-block:: none # SPDX-License-Identifier: CC0-1.0 - libcamera_sources += files([ + libcamera_internal_sources += files([ 'vivid.cpp', ]) @@ -258,7 +261,7 @@ implementations for the overridden class members. return false; } - REGISTER_PIPELINE_HANDLER(PipelineHandlerVivid) + REGISTER_PIPELINE_HANDLER(PipelineHandlerVivid, "vivid") } /* namespace libcamera */ @@ -266,6 +269,8 @@ Note that you must register the ``PipelineHandler`` subclass with the pipeline handler factory using the `REGISTER_PIPELINE_HANDLER`_ macro which registers it and creates a global symbol to reference the class and make it available to try and match devices. +String "vivid" is the name assigned to the pipeline, matching the pipeline +subdirectory name in the source tree. .. _REGISTER_PIPELINE_HANDLER: https://libcamera.org/api-html/pipeline__handler_8h.html @@ -1345,7 +1350,7 @@ before being set. continue; } - int32_t value = lroundf(it.second.get<float>() * 128 + offset); + int32_t value = std::lround(it.second.get<float>() * 128 + offset); controls.set(cid, std::clamp(value, 0, 255)); } @@ -1409,7 +1414,7 @@ value translation operations: .. code-block:: cpp - #include <math.h> + #include <cmath> Frame completion and event handling ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/Documentation/guides/tracing.rst b/Documentation/guides/tracing.rst index ae960d85..537dce50 100644 --- a/Documentation/guides/tracing.rst +++ b/Documentation/guides/tracing.rst @@ -1,5 +1,7 @@ .. SPDX-License-Identifier: CC-BY-SA-4.0 +.. include:: ../documentation-contents.rst + Tracing Guide ============= diff --git a/Documentation/index.rst b/Documentation/index.rst index 63fac72d..bea40660 100644 --- a/Documentation/index.rst +++ b/Documentation/index.rst @@ -1,26 +1,29 @@ .. SPDX-License-Identifier: CC-BY-SA-4.0 -.. Front page matter is defined in the project README file. -.. include:: ../README.rst - :start-after: .. section-begin-libcamera - :end-before: .. section-end-libcamera +.. include:: introduction.rst .. toctree:: :maxdepth: 1 :caption: Contents: Home <self> - Docs <docs> Contribute <contributing> Getting Started <getting-started> - Developer Guide <guides/introduction> Application Writer's Guide <guides/application-developer> - Pipeline Handler Writer's Guide <guides/pipeline-handler> - IPA Writer's guide <guides/ipa> - Tracing guide <guides/tracing> + Camera Sensor Model <camera-sensor-model> Environment variables <environment_variables> - Sensor driver requirements <sensor_driver_requirements> + Feature Requirements <feature_requirements> + IPA Writer's guide <guides/ipa> Lens driver requirements <lens_driver_requirements> + libcamera Architecture <libcamera_architecture> + Pipeline Handler Writer's Guide <guides/pipeline-handler> Python Bindings <python-bindings> - Camera Sensor Model <camera-sensor-model> + Sensor driver requirements <sensor_driver_requirements> + SoftwareISP Benchmarking <software-isp-benchmarking> + Tracing guide <guides/tracing> + +.. toctree:: + :hidden: + + introduction
\ No newline at end of file diff --git a/Documentation/internal-api-html/index.rst b/Documentation/internal-api-html/index.rst new file mode 100644 index 00000000..43768648 --- /dev/null +++ b/Documentation/internal-api-html/index.rst @@ -0,0 +1,8 @@ +.. SPDX-License-Identifier: CC-BY-SA-4.0 + +.. _internal-api: + +Internal API Reference +====================== + +:: Placeholder for Doxygen documentation diff --git a/Documentation/introduction.rst b/Documentation/introduction.rst new file mode 100644 index 00000000..82aa11a3 --- /dev/null +++ b/Documentation/introduction.rst @@ -0,0 +1,224 @@ +.. SPDX-License-Identifier: CC-BY-SA-4.0 + +.. include:: documentation-contents.rst + +************ +Introduction +************ + +.. toctree:: + :hidden: + + API <api-html/index> + Internal API <internal-api-html/index> + +What is libcamera? +================== + +libcamera is an open source complex camera support library for Linux, Android +and ChromeOS. The library interfaces with Linux kernel device drivers and +provides an intuitive API to developers in order to simplify the complexity +involved in capturing images from complex cameras on Linux systems. + +What is a "complex camera"? +=========================== + +A modern "camera" tends to infact be several different pieces of hardware which +must all be controlled together in order to produce and capture images of +appropriate quality. A hardware pipeline typically consists of a camera sensor +that captures raw frames and transmits them on a bus, a receiver that decodes +the bus signals, and an image signal processor that processes raw frames to +produce usable images in a standard format. The Linux kernel handles these +multimedia devices through the 'Linux media' subsystem and provides a set of +application programming interfaces known collectively as the +V4L2 (`Video for Linux 2`_) and the `Media Controller`_ APIs, which provide an +interface to interact and control media devices. + +.. _Video for Linux 2: https://www.linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/v4l/v4l2.html +.. _Media Controller: https://www.linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/mediactl/media-controller.html + +Included in this subsystem are drivers for camera sensors, CSI2 (Camera +Serial Interface) receivers, and ISPs (Image Signal Processors). + +The usage of these drivers to provide a functioning camera stack is a +responsibility that lies in userspace, and is commonly implemented separately +by vendors without a common architecture or API for application developers. This +adds a lot of complexity to the task, particularly when considering that the +differences in hardware pipelines and their representation in the kernel's APIs +often necessitate bespoke handling. + +What is libcamera for? +====================== + +libcamera provides a complete camera stack for Linux-based systems to abstract +the configuration of hardware and image control algorithms required to obtain +desirable results from the camera through the kernel's APIs, reducing those +operations to a simple and consistent method for developers. In short instead of +having to deal with this: + +.. graphviz:: mali-c55.dot + +you can instead simply deal with: + +.. code-block:: python + + >>> import libcamera as lc + >>> camera_manager = lc.CameraManager.singleton() + [0:15:59.582029920] [504] INFO Camera camera_manager.cpp:313 libcamera v0.3.0+182-01e57380 + >>> for camera in camera_manager.cameras: + ... print(f' - {camera.id}') + ... + - mali-c55 tpg + - imx415 1-001a + +The library handles the rest for you. These documentary pages give more +information on the internal workings of libcamera (and the kernel camera stack +that lies behind it) as well as guidance on using libcamera in an application or +extending the library with support for your hardware (through the pipeline +handler and IPA module writer's guides). + +How should I use it? +==================== + +There are a few ways you might want to use libcamera, depending on your +application. It's always possible to use the library directly, and you can find +detailed information on how to do so in the +:doc:`application writer's guide <guides/application-developer>`. + +It is often more appropriate to use one of the frameworks with libcamera +support. For example an application powering an embedded media device +incorporating capture, encoding and streaming of both video and audio would +benefit from using `GStreamer`_, for which libcamera provides a plugin. +Similarly an application for user-facing devices like a laptop would likely +benefit accessing cameras through the XDG camera portal and `pipewire`_, which +brings the advantages of resource sharing (multiple applications accessing the +stream at the same time) and access control. + +.. _GStreamer: https://gstreamer.freedesktop.org/ +.. _pipewire: https://pipewire.org/ + +Camera Stack +============ + +:: + + a c / +-------------+ +-------------+ +-------------+ +-------------+ + p a | | Native | | Framework | | Native | | Android | + p t | | V4L2 | | Application | | libcamera | | Camera | + l i | | Application | | (gstreamer) | | Application | | Framework | + i o \ +-------------+ +-------------+ +-------------+ +-------------+ + n ^ ^ ^ ^ + | | | | + l a | | | | + i d v v | v + b a / +-------------+ +-------------+ | +-------------+ + c p | | V4L2 | | Camera | | | Android | + a t | | Compat. | | Framework | | | Camera | + m a | | | | (gstreamer) | | | HAL | + e t \ +-------------+ +-------------+ | +-------------+ + r i ^ ^ | ^ + a o | | | | + n | | | | + / | ,................................................ + | | ! : Language : ! + l f | | ! : Bindings : ! + i r | | ! : (optional) : ! + b a | | \...............................................' + c m | | | | | + a e | | | | | + m w | v v v v + e o | +----------------------------------------------------------------+ + r r | | | + a k | | libcamera | + | | | + \ +----------------------------------------------------------------+ + ^ ^ ^ + Userspace | | | + ------------------------ | ---------------- | ---------------- | --------------- + Kernel | | | + v v v + +-----------+ +-----------+ +-----------+ + | Media | <--> | Video | <--> | V4L2 | + | Device | | Device | | Subdev | + +-----------+ +-----------+ +-----------+ + +The camera stack comprises four software layers. From bottom to top: + +* The kernel drivers control the camera hardware and expose a + low-level interface to userspace through the Linux kernel V4L2 + family of APIs (Media Controller API, V4L2 Video Device API and + V4L2 Subdev API). + +* The libcamera framework is the core part of the stack. It + handles all control of the camera devices in its core component, + libcamera, and exposes a native C++ API to upper layers. Optional + language bindings allow interfacing to libcamera from other + programming languages. + + Those components live in the same source code repository and + all together constitute the libcamera framework. + +* The libcamera adaptation is an umbrella term designating the + components that interface to libcamera in other frameworks. + Notable examples are a V4L2 compatibility layer, a gstreamer + libcamera element, and an Android camera HAL implementation based + on libcamera. + + Those components can live in the libcamera project source code + in separate repositories, or move to their respective project's + repository (for instance the gstreamer libcamera element). + +* The applications and upper level frameworks are based on the + libcamera framework or libcamera adaptation, and are outside of + the scope of the libcamera project. + +V4L2 Compatibility Layer + V4L2 compatibility is achieved through a shared library that traps all + accesses to camera devices and routes them to libcamera to emulate high-level + V4L2 camera devices. It is injected in a process address space through + ``LD_PRELOAD`` and is completely transparent for applications. + + The compatibility layer exposes camera device features on a best-effort basis, + and aims for the level of features traditionally available from a UVC camera + designed for video conferencing. + +Android Camera HAL + Camera support for Android is achieved through a generic Android camera HAL + implementation on top of libcamera. The HAL implements features required by + Android and out of scope from libcamera, such as JPEG encoding support. + + This component is used to provide support for ChromeOS platforms. + +GStreamer element (gstlibcamerasrc) + A `GStreamer element`_ is provided to allow capture from libcamera supported + devices through GStreamer pipelines, and connect to other elements for further + processing. + +Native libcamera API + Applications can make use of the libcamera API directly using the C++ + API. An example application and walkthrough using the libcamera API can be + followed in the :doc:`Application writer's guide </guides/application-developer>` + +.. _GStreamer element: https://gstreamer.freedesktop.org/documentation/application-development/basics/elements.html + +Licensing +========= + +The libcamera core is covered by the `LGPL-2.1-or-later`_ license. Pipeline +Handlers are a part of the libcamera code base and need to be contributed +upstream by device vendors. IPA modules included in libcamera are covered by a +free software license, however third-parties may develop IPA modules outside of +libcamera and distribute them under a closed-source license, provided they do +not include source code from the libcamera project. + +The libcamera project itself contains multiple libraries, applications and +utilities. Licenses are expressed through SPDX tags in text-based files that +support comments, and through the .reuse/dep5 file otherwise. A copy of all +licenses are stored in the LICENSES directory, and a full summary of the +licensing used throughout the project can be found in the COPYING.rst document. + +Applications which link dynamically against libcamera and use only the public +API are an independent work of the authors and have no license restrictions +imposed upon them from libcamera. + +.. _LGPL-2.1-or-later: https://spdx.org/licenses/LGPL-2.1-or-later.html diff --git a/Documentation/lens_driver_requirements.rst b/Documentation/lens_driver_requirements.rst index b96e502d..85fef76f 100644 --- a/Documentation/lens_driver_requirements.rst +++ b/Documentation/lens_driver_requirements.rst @@ -1,5 +1,7 @@ .. SPDX-License-Identifier: CC-BY-SA-4.0 +.. include:: documentation-contents.rst + .. _lens-driver-requirements: Lens Driver Requirements diff --git a/Documentation/libcamera_architecture.rst b/Documentation/libcamera_architecture.rst new file mode 100644 index 00000000..abbb0d17 --- /dev/null +++ b/Documentation/libcamera_architecture.rst @@ -0,0 +1,168 @@ +.. SPDX-License-Identifier: CC-BY-SA-4.0 + +.. include:: documentation-contents.rst + +libcamera Architecture +====================== + +While offering a unified API towards upper layers, and presenting itself as a +single library, libcamera isn't monolithic. It exposes multiple components +through its public API and is built around a set of separate helpers internally. +Hardware abstractions are handled through the use of device-specific components +where required and dynamically loadable plugins are used to separate image +processing algorithms from the core libcamera codebase. + +:: + + --------------------------< libcamera Public API >--------------------------- + ^ ^ + | | + v v + +-------------+ +---------------------------------------------------+ + | Camera | | Camera Device | + | Manager | | +-----------------------------------------------+ | + +-------------+ | | Device-Agnostic | | + ^ | | | | + | | | +--------------------------+ | + | | | | ~~~~~~~~~~~~~~~~~~~~~~~ | + | | | | { +-----------------+ } | + | | | | } | //// Image //// | { | + | | | | <-> | / Processing // | } | + | | | | } | / Algorithms // | { | + | | | | { +-----------------+ } | + | | | | ~~~~~~~~~~~~~~~~~~~~~~~ | + | | | | ========================== | + | | | | +-----------------+ | + | | | | | // Pipeline /// | | + | | | | <-> | /// Handler /// | | + | | | | | /////////////// | | + | | +--------------------+ +-----------------+ | + | | Device-Specific | + | +---------------------------------------------------+ + | ^ ^ + | | | + v v v + +--------------------------------------------------------------------+ + | Helpers and Support Classes | + | +-------------+ +-------------+ +-------------+ +-------------+ | + | | MC & V4L2 | | Buffers | | Sandboxing | | Plugins | | + | | Support | | Allocator | | IPC | | Manager | | + | +-------------+ +-------------+ +-------------+ +-------------+ | + | +-------------+ +-------------+ | + | | Pipeline | | ... | | + | | Runner | | | | + | +-------------+ +-------------+ | + +--------------------------------------------------------------------+ + + /// Device-Specific Components + ~~~ Sandboxing + + +Camera Manager + The Camera Manager enumerates cameras and instantiates Pipeline Handlers to + manage each Camera that libcamera supports. The Camera Manager supports + hotplug detection and notification events when supported by the underlying + kernel devices. + + There is only ever one instance of the Camera Manager running per application. + Each application's instance of the Camera Manager ensures that only a single + application can take control of a camera device at once. + + Read the `Camera Manager API`_ documentation for more details. + +.. _Camera Manager API: https://libcamera.org/api-html/classlibcamera_1_1CameraManager.html + +Camera Device + The Camera class represents a single item of camera hardware that is capable + of producing one or more image streams, and provides the API to interact with + the underlying device. + + If a system has multiple instances of the same hardware attached, each has its + own instance of the camera class. + + The API exposes full control of the device to upper layers of libcamera through + the public API, making it the highest level object libcamera exposes, and the + object that all other API operations interact with from configuration to + capture. + + Read the `Camera API`_ documentation for more details. + +.. _Camera API: https://libcamera.org/api-html/classlibcamera_1_1Camera.html + +Pipeline Handler + The Pipeline Handler manages the complex pipelines exposed by the kernel + drivers through the Media Controller and V4L2 APIs. It abstracts pipeline + handling to hide device-specific details from the rest of the library, and + implements both pipeline configuration based on stream configuration, and + pipeline runtime execution and scheduling when needed by the device. + + The Pipeline Handler lives in the same process as the rest of the library, and + has access to all helpers and kernel camera-related devices. + + Hardware abstraction is handled by device specific Pipeline Handlers which are + derived from the Pipeline Handler base class allowing commonality to be shared + among the implementations. + + Derived pipeline handlers create Camera device instances based on the devices + they detect and support on the running system, and are responsible for + managing the interactions with a camera device. + + More details can be found in the `PipelineHandler API`_ documentation, and the + :doc:`Pipeline Handler Writers Guide <guides/pipeline-handler>`. + +.. _PipelineHandler API: https://libcamera.org/api-html/classlibcamera_1_1PipelineHandler.html + +Image Processing Algorithms + Together with the hardware image processing and hardware statistics + collection, the Image Processing Algorithms (IPA) implement 3A (Auto-Exposure, + Auto-White Balance and Auto-Focus) and other algorithms. They run on the CPU + and control hardware image processing based on the parameters supplied by + upper layers, closing the control loop of the ISP. + + IPAs are loaded as external plugins named IPA Modules. IPA Modules can be part + of the libcamera code base or provided externally by camera vendors as + open-source or closed-source components. + + Open source IPA Modules built with libcamera are run in the same process space + as libcamera. External IPA Modules are run in a separate sandboxed process. In + either case, they can only interact with libcamera through the API provided by + the Pipeline Handler. They have a restricted view of the system, with no direct + access to kernel camera devices, no access to networking APIs, and limited + access to file systems. All their accesses to image and metadata are mediated + by dmabuf instances explicitly passed by the Pipeline Handler to the IPA + Module. + + IPA Modules are only required for platforms and devices with an ISP controlled + by the host CPU. Camera sensors which have an integrated ISP are not + controlled through the IPA Module. + +Helpers and Support Classes + While Pipeline Handlers are device-specific, implementations are expected to + share code due to usage of identical APIs towards the kernel camera drivers + and the Image Processing Algorithms. This includes without limitation handling + of the MC and V4L2 APIs, buffer management through dmabuf, and pipeline + discovery, configuration and scheduling. Such code will be factored out to + helpers when applicable. + + Other parts of libcamera will also benefit from factoring code out to + self-contained support classes, even if such code is present only once in the + code base, in order to keep the source code clean and easy to read. This + should be the case for instance for plugin management. + +Platform Support +---------------- + +The library currently supports the following hardware platforms specifically +with dedicated pipeline handlers: + + - Arm Mali-C55 + - Intel IPU3 (ipu3) + - NXP i.MX8MP (imx8-isi and rkisp1) + - RaspberryPi 3, 4 and zero (rpi/vc4) + - Rockchip RK3399 (rkisp1) + +Furthermore, generic platform support is provided for the following: + + - USB video device class cameras (uvcvideo) + - iMX7, IPU6, Allwinner Sun6i (simple) + - Virtual media controller driver for test use cases (vimc) diff --git a/Documentation/mainpage.dox b/Documentation/mainpage.dox new file mode 100644 index 00000000..cbee9bab --- /dev/null +++ b/Documentation/mainpage.dox @@ -0,0 +1,33 @@ +/** +\mainpage libcamera API reference + +Welcome to the API reference for <a href="https://libcamera.org/">libcamera</a>, +a complex camera support library for Linux, Android and ChromeOS. These pages +are automatically generated from the libcamera source code and describe the API +in detail - if this is your first interaction with libcamera then you may find +it useful to visit the [documentation](../introduction.html) in +the first instance, which can provide a more generic introduction to the +library's concepts. + +\if internal + +As a follow-on to the developer's guide, to assist you in adding support for +your platform the [pipeline handler writer's guide](../guides/pipeline-handler.html) +and the [ipa module writer's guide](../guides/ipa.html) should be helpful. + +The full libcamera API is documented here. If you wish to see only the public +part of the API you can use [these pages](../api-html/index.html) instead. + +\else + +As a follow-on to the developer's guide, to assist you in using libcamera within +your project the [application developer's guide](../guides/application-developer.html) +gives an overview on how to achieve that. + +Only the public part of the libcamera API is documented here; if you are a +developer seeking to add support for your hardware to the library or make other +improvements, you should switch to the internal API +[reference pages](../internal-api-html/index.html) instead. + +\endif +*/ diff --git a/Documentation/mali-c55.dot b/Documentation/mali-c55.dot new file mode 100644 index 00000000..7bfc44c0 --- /dev/null +++ b/Documentation/mali-c55.dot @@ -0,0 +1,25 @@ +/* SPDX-License-Identifier: CC-BY-SA-4.0 */ + +digraph board { + rankdir=TB + n00000001 [label="{{} | mali-c55 tpg\n/dev/v4l-subdev0 | {<port0> 0}}", shape=Mrecord, style=filled, fillcolor=green] + n00000001:port0 -> n00000003:port0 [style=dashed] + n00000003 [label="{{<port0> 0 | <port4> 4} | mali-c55 isp\n/dev/v4l-subdev1 | {<port1> 1 | <port2> 2 | <port3> 3}}", shape=Mrecord, style=filled, fillcolor=green] + n00000003:port1 -> n00000009:port0 [style=bold] + n00000003:port2 -> n00000009:port2 [style=bold] + n00000003:port1 -> n0000000d:port0 [style=bold] + n00000003:port3 -> n0000001c + n00000009 [label="{{<port0> 0 | <port2> 2} | mali-c55 resizer fr\n/dev/v4l-subdev2 | {<port1> 1}}", shape=Mrecord, style=filled, fillcolor=green] + n00000009:port1 -> n00000010 + n0000000d [label="{{<port0> 0} | mali-c55 resizer ds\n/dev/v4l-subdev3 | {<port1> 1}}", shape=Mrecord, style=filled, fillcolor=green] + n0000000d:port1 -> n00000014 + n00000010 [label="mali-c55 fr\n/dev/video0", shape=box, style=filled, fillcolor=yellow] + n00000014 [label="mali-c55 ds\n/dev/video1", shape=box, style=filled, fillcolor=yellow] + n00000018 [label="mali-c55 3a params\n/dev/video2", shape=box, style=filled, fillcolor=yellow] + n00000018 -> n00000003:port4 + n0000001c [label="mali-c55 3a stats\n/dev/video3", shape=box, style=filled, fillcolor=yellow] + n00000030 [label="{{<port0> 0} | lte-csi2-rx\n/dev/v4l-subdev4 | {<port1> 1}}", shape=Mrecord, style=filled, fillcolor=green] + n00000030:port1 -> n00000003:port0 + n00000035 [label="{{} | imx415 1-001a\n/dev/v4l-subdev5 | {<port0> 0}}", shape=Mrecord, style=filled, fillcolor=green] + n00000035:port0 -> n00000030:port0 [style=bold] +} diff --git a/Documentation/meson.build b/Documentation/meson.build index 7a58fec8..36ffae23 100644 --- a/Documentation/meson.build +++ b/Documentation/meson.build @@ -15,6 +15,7 @@ if doxygen.found() and dot.found() cdata.set('TOP_SRCDIR', meson.project_source_root()) cdata.set('TOP_BUILDDIR', meson.project_build_root()) cdata.set('OUTPUT_DIR', meson.current_build_dir()) + cdata.set('WARN_AS_ERROR', get_option('doc_werror') ? 'YES' : 'NO') doxygen_predefined = [] foreach key : config_h.keys() @@ -23,34 +24,92 @@ if doxygen.found() and dot.found() cdata.set('PREDEFINED', ' \\\n\t\t\t '.join(doxygen_predefined)) - doxyfile = configure_file(input : 'Doxyfile.in', - output : 'Doxyfile', - configuration : cdata) + doxyfile_common = configure_file(input : 'Doxyfile-common.in', + output : 'Doxyfile-common', + configuration : cdata) + + doxygen_public_input = [ + libcamera_base_public_headers, + libcamera_base_public_sources, + libcamera_public_headers, + libcamera_public_sources, + ] - doxygen_input = [ - doxyfile, - libcamera_base_headers, - libcamera_base_sources, + doxygen_internal_input = [ + libcamera_base_private_headers, + libcamera_base_internal_sources, libcamera_internal_headers, + libcamera_internal_sources, libcamera_ipa_headers, libcamera_ipa_interfaces, - libcamera_public_headers, - libcamera_sources, libipa_headers, libipa_sources, ] if is_variable('ipu3_ipa_sources') - doxygen_input += [ipu3_ipa_sources] + doxygen_internal_input += [ipu3_ipa_sources] endif - custom_target('doxygen', - input : doxygen_input, + # We run doxygen twice - the first run excludes internal API objects as it + # is intended to document the public API only. A second run covers all of + # the library's objects for libcamera developers. Common configuration is + # set in an initially generated Doxyfile, which is then included by the two + # final Doxyfiles. + + # This is the "public" run of doxygen generating an abridged version of the + # API's documentation. + + doxyfile_tmpl = configure_file(input : 'Doxyfile-public.in', + output : 'Doxyfile-public.tmpl', + configuration : cdata) + + # The set of public input files stored in the doxygen_public_input array + # needs to be set in Doxyfile public. We can't pass them through cdata + # cdata, as some of the array members are custom_tgt instances, which + # configuration_data.set() doesn't support. Using a separate script invoked + # through custom_target(), which supports custom_tgt instances as inputs. + + doxyfile = custom_target('doxyfile-public', + input : [ + doxygen_public_input, + ], + output : 'Doxyfile-public', + command : [ + 'gen-doxyfile.py', + '-o', '@OUTPUT@', + doxyfile_tmpl, + '@INPUT@', + ]) + + custom_target('doxygen-public', + input : [ + doxyfile, + doxyfile_common, + ], output : 'api-html', command : [doxygen, doxyfile], install : true, install_dir : doc_install_dir, install_tag : 'doc') + + # This is the internal documentation, which hard-codes a list of directories + # to parse in its doxyfile. + + doxyfile = configure_file(input : 'Doxyfile-internal.in', + output : 'Doxyfile-internal', + configuration : cdata) + + custom_target('doxygen-internal', + input : [ + doxyfile, + doxyfile_common, + doxygen_internal_input, + ], + output : 'internal-api-html', + command : [doxygen, doxyfile], + install : true, + install_dir : doc_install_dir, + install_tag : 'doc-internal') endif # @@ -69,17 +128,21 @@ if sphinx.found() 'coding-style.rst', 'conf.py', 'contributing.rst', - 'docs.rst', + 'documentation-contents.rst', 'environment_variables.rst', + 'feature_requirements.rst', 'guides/application-developer.rst', - 'guides/introduction.rst', 'guides/ipa.rst', 'guides/pipeline-handler.rst', 'guides/tracing.rst', 'index.rst', + 'introduction.rst', 'lens_driver_requirements.rst', + 'libcamera_architecture.rst', + 'mali-c55.dot', 'python-bindings.rst', 'sensor_driver_requirements.rst', + 'software-isp-benchmarking.rst', '../README.rst', ] diff --git a/Documentation/python-bindings.rst b/Documentation/python-bindings.rst index ed9f686b..94712238 100644 --- a/Documentation/python-bindings.rst +++ b/Documentation/python-bindings.rst @@ -1,5 +1,7 @@ .. SPDX-License-Identifier: CC-BY-SA-4.0 +.. include:: documentation-contents.rst + .. _python-bindings: Python Bindings for libcamera diff --git a/Documentation/sensor_driver_requirements.rst b/Documentation/sensor_driver_requirements.rst index 0e516b34..fb4269d0 100644 --- a/Documentation/sensor_driver_requirements.rst +++ b/Documentation/sensor_driver_requirements.rst @@ -1,5 +1,7 @@ .. SPDX-License-Identifier: CC-BY-SA-4.0 +.. include:: documentation-contents.rst + .. _sensor-driver-requirements: Sensor Driver Requirements diff --git a/Documentation/software-isp-benchmarking.rst b/Documentation/software-isp-benchmarking.rst new file mode 100644 index 00000000..9c2a409b --- /dev/null +++ b/Documentation/software-isp-benchmarking.rst @@ -0,0 +1,79 @@ +.. SPDX-License-Identifier: CC-BY-SA-4.0 + +.. include:: documentation-contents.rst + +.. _software-isp-benchmarking: + +Software ISP benchmarking +========================= + +The Software ISP is particularly sensitive to performance regressions therefore +it is a good idea to always benchmark the Software ISP before and after making +changes to it and ensure that there are no performance regressions. + +DebayerCpu class builtin benchmark +---------------------------------- + +The DebayerCpu class has a builtin benchmark. This benchmark measures the time +spent on processing (collecting statistics and debayering) only, it does not +measure the time spent on capturing or outputting the frames. + +The builtin benchmark always runs. So this can be used by simply running "cam" +or "qcam" with a pipeline using the Software ISP. + +When it runs it will skip measuring the first 30 frames to allow the caches and +the CPU temperature (turbo-ing) to warm-up and then it measures 30 fps and shows +the total and per frame processing time using an info level log message: + +.. code-block:: text + + INFO Debayer debayer_cpu.cpp:907 Processed 30 frames in 244317us, 8143 us/frame + +To get stable measurements it is advised to disable any other processes which +may cause significant CPU usage (e.g. disable wifi, bluetooth and browsers). +When possible it is also advisable to disable CPU turbo-ing and +frequency-scaling. + +For example when benchmarking on a Lenovo ThinkPad X1 Yoga Gen 8, with the +charger plugged in, the CPU can be fixed to run at 2 GHz using: + +.. code-block:: shell + + sudo x86_energy_perf_policy --turbo-enable 0 + sudo cpupower frequency-set -d 2GHz -u 2GHz + +with these settings the builtin bench reports a processing time of ~7.8ms/frame +on this laptop for FHD SGRBG10 (unpacked) bayer data. + +Measuring power consumption +--------------------------- + +Since the Software ISP is often used on mobile devices it is also important to +measure power consumption and ensure that that does not regress. + +For example to measure power consumption on a Lenovo ThinkPad X1 Yoga Gen 8 it +needs to be running on battery and it should be configured with its +platform-profile (/sys/firmware/acpi/platform_profile) set to balanced and with +its default turbo and frequency-scaling behavior to match real world usage. + +Then start qcam to capture a FHD picture at 30 fps and position the qcam window +so that it is fully visible. After this run the following command to monitor the +power consumption: + +.. code-block:: shell + + watch -n 10 cat /sys/class/power_supply/BAT0/power_now /sys/class/hwmon/hwmon6/fan?_input + +Note this not only measures the power consumption in µW it also monitors the +speed of this laptop's 2 fans. This is important because depending on the +ambient temperature the 2 fans may spin up while testing and this will cause an +additional power consumption of approx. 0.5 W messing up the measurement. + +After starting qcam + the watch command let the laptop sit without using it for +2 minutes for the readings to stabilize. Then check that the fans have not +turned on and manually take a couple of consecutive power readings and average +these. + +On the example Lenovo ThinkPad X1 Yoga Gen 8 laptop this results in a measured +power consumption of approx. 13 W while running qcam versus approx. 4-5 W while +setting idle with its OLED panel on. diff --git a/Documentation/theme/static/css/theme.css b/Documentation/theme/static/css/theme.css index d4274ea6..a6d43195 100644 --- a/Documentation/theme/static/css/theme.css +++ b/Documentation/theme/static/css/theme.css @@ -283,9 +283,13 @@ div#signature { font-size: 12px; } -#libcamera div.toctree-wrapper { +#licensing div.toctree-wrapper { height: 0px; margin: 0px; padding: 0px; visibility: hidden; } + +.documentation-nav { + display: none; +} diff --git a/Documentation/thread-safety.dox b/Documentation/thread-safety.dox new file mode 100644 index 00000000..df4c457c --- /dev/null +++ b/Documentation/thread-safety.dox @@ -0,0 +1,44 @@ +/** + * \page thread-safety Reentrancy and Thread-Safety + * + * Through the documentation, several terms are used to define how classes and + * their member functions can be used from multiple threads. + * + * - A **reentrant** function may be called simultaneously from multiple + * threads if and only if each invocation uses a different instance of the + * class. This is the default for all member functions not explictly marked + * otherwise. + * + * - \anchor thread-safe A **thread-safe** function may be called + * simultaneously from multiple threads on the same instance of a class. A + * thread-safe function is thus reentrant. Thread-safe functions may also be + * called simultaneously with any other reentrant function of the same class + * on the same instance. + * + * \internal + * - \anchor thread-bound A **thread-bound** function may be called only from + * the thread that the class instances lives in (see section \ref + * thread-objects). For instances of classes that do not derive from the + * Object class, this is the thread in which the instance was created. A + * thread-bound function is not thread-safe, and may or may not be reentrant. + * \endinternal + * + * Neither reentrancy nor thread-safety, in this context, mean that a function + * may be called simultaneously from the same thread, for instance from a + * callback invoked by the function. This may deadlock and isn't allowed unless + * separately documented. + * + * \if internal + * A class is defined as reentrant, thread-safe or thread-bound if all its + * member functions are reentrant, thread-safe or thread-bound respectively. + * \else + * A class is defined as reentrant or thread-safe if all its member functions + * are reentrant or thread-safe respectively. + * \endif + * Some member functions may additionally be documented as having additional + * thread-related attributes. + * + * Most classes are reentrant but not thread-safe, as making them fully + * thread-safe would incur locking costs considered prohibitive for the + * expected use cases. + */ |