summaryrefslogtreecommitdiff
path: root/src/android
diff options
context:
space:
mode:
Diffstat (limited to 'src/android')
-rw-r--r--src/android/camera3_hal.cpp17
-rw-r--r--src/android/camera_buffer.h61
-rw-r--r--src/android/camera_capabilities.cpp1165
-rw-r--r--src/android/camera_capabilities.h66
-rw-r--r--src/android/camera_device.cpp2074
-rw-r--r--src/android/camera_device.h126
-rw-r--r--src/android/camera_hal_config.cpp407
-rw-r--r--src/android/camera_hal_config.h39
-rw-r--r--src/android/camera_hal_manager.cpp84
-rw-r--r--src/android/camera_hal_manager.h20
-rw-r--r--src/android/camera_metadata.cpp147
-rw-r--r--src/android/camera_metadata.h70
-rw-r--r--src/android/camera_ops.cpp8
-rw-r--r--src/android/camera_stream.cpp146
-rw-r--r--src/android/camera_stream.h147
-rw-r--r--src/android/camera_worker.cpp129
-rw-r--r--src/android/camera_worker.h71
-rw-r--r--src/android/cros/camera3_hal.cpp24
-rw-r--r--src/android/cros/meson.build13
-rw-r--r--src/android/data/soraka/camera_hal.yaml8
-rw-r--r--src/android/jpeg/encoder.h16
-rw-r--r--src/android/jpeg/encoder_libjpeg.cpp36
-rw-r--r--src/android/jpeg/encoder_libjpeg.h21
-rw-r--r--src/android/jpeg/exif.cpp315
-rw-r--r--src/android/jpeg/exif.h65
-rw-r--r--src/android/jpeg/post_processor_jpeg.cpp196
-rw-r--r--src/android/jpeg/post_processor_jpeg.h45
-rw-r--r--src/android/jpeg/thumbnailer.cpp93
-rw-r--r--src/android/jpeg/thumbnailer.h34
-rw-r--r--src/android/meson.build47
-rw-r--r--src/android/mm/cros_camera_buffer.cpp134
-rw-r--r--src/android/mm/generic_camera_buffer.cpp91
-rw-r--r--src/android/mm/meson.build9
-rw-r--r--src/android/post_processor.h32
-rw-r--r--src/android/yuv/post_processor_yuv.cpp143
-rw-r--r--src/android/yuv/post_processor_yuv.h42
36 files changed, 4742 insertions, 1399 deletions
diff --git a/src/android/camera3_hal.cpp b/src/android/camera3_hal.cpp
index d6e04af2..da836bae 100644
--- a/src/android/camera3_hal.cpp
+++ b/src/android/camera3_hal.cpp
@@ -7,7 +7,7 @@
#include <hardware/camera_common.h>
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
#include "camera_device.h"
#include "camera_hal_manager.h"
@@ -16,25 +16,23 @@ using namespace libcamera;
LOG_DEFINE_CATEGORY(HAL)
-static CameraHalManager cameraManager;
-
/*------------------------------------------------------------------------------
* Android Camera HAL callbacks
*/
static int hal_get_number_of_cameras()
{
- return cameraManager.numCameras();
+ return CameraHalManager::instance()->numCameras();
}
static int hal_get_camera_info(int id, struct camera_info *info)
{
- return cameraManager.getCameraInfo(id, info);
+ return CameraHalManager::instance()->getCameraInfo(id, info);
}
static int hal_set_callbacks(const camera_module_callbacks_t *callbacks)
{
- cameraManager.setCallbacks(callbacks);
+ CameraHalManager::instance()->setCallbacks(callbacks);
return 0;
}
@@ -62,7 +60,7 @@ static int hal_init()
{
LOG(HAL, Info) << "Initialising Android camera HAL";
- cameraManager.init();
+ CameraHalManager::instance()->init();
return 0;
}
@@ -77,11 +75,12 @@ static int hal_dev_open(const hw_module_t *module, const char *name,
LOG(HAL, Debug) << "Open camera " << name;
int id = atoi(name);
- CameraDevice *camera = cameraManager.open(id, module);
+
+ auto [camera, ret] = CameraHalManager::instance()->open(id, module);
if (!camera) {
LOG(HAL, Error)
<< "Failed to open camera module '" << id << "'";
- return -ENODEV;
+ return ret == -EBUSY ? -EUSERS : ret;
}
*device = &camera->camera3Device()->common;
diff --git a/src/android/camera_buffer.h b/src/android/camera_buffer.h
new file mode 100644
index 00000000..21373fa2
--- /dev/null
+++ b/src/android/camera_buffer.h
@@ -0,0 +1,61 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera_buffer.h - Frame buffer handling interface definition
+ */
+#ifndef __ANDROID_CAMERA_BUFFER_H__
+#define __ANDROID_CAMERA_BUFFER_H__
+
+#include <hardware/camera3.h>
+
+#include <libcamera/base/class.h>
+#include <libcamera/base/span.h>
+
+class CameraBuffer final : public libcamera::Extensible
+{
+ LIBCAMERA_DECLARE_PRIVATE()
+
+public:
+ CameraBuffer(buffer_handle_t camera3Buffer, int flags);
+ ~CameraBuffer();
+
+ bool isValid() const;
+
+ unsigned int numPlanes() const;
+
+ libcamera::Span<const uint8_t> plane(unsigned int plane) const;
+ libcamera::Span<uint8_t> plane(unsigned int plane);
+
+ size_t jpegBufferSize(size_t maxJpegBufferSize) const;
+};
+
+#define PUBLIC_CAMERA_BUFFER_IMPLEMENTATION \
+CameraBuffer::CameraBuffer(buffer_handle_t camera3Buffer, int flags) \
+ : Extensible(new Private(this, camera3Buffer, flags)) \
+{ \
+} \
+CameraBuffer::~CameraBuffer() \
+{ \
+} \
+bool CameraBuffer::isValid() const \
+{ \
+ return _d()->isValid(); \
+} \
+unsigned int CameraBuffer::numPlanes() const \
+{ \
+ return _d()->numPlanes(); \
+} \
+Span<const uint8_t> CameraBuffer::plane(unsigned int plane) const \
+{ \
+ return const_cast<Private *>(_d())->plane(plane); \
+} \
+Span<uint8_t> CameraBuffer::plane(unsigned int plane) \
+{ \
+ return _d()->plane(plane); \
+} \
+size_t CameraBuffer::jpegBufferSize(size_t maxJpegBufferSize) const \
+{ \
+ return _d()->jpegBufferSize(maxJpegBufferSize); \
+}
+#endif /* __ANDROID_CAMERA_BUFFER_H__ */
diff --git a/src/android/camera_capabilities.cpp b/src/android/camera_capabilities.cpp
new file mode 100644
index 00000000..6b5edb66
--- /dev/null
+++ b/src/android/camera_capabilities.cpp
@@ -0,0 +1,1165 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera_capabilities.cpp - Camera static properties manager
+ */
+
+#include "camera_capabilities.h"
+
+#include <array>
+#include <cmath>
+
+#include <hardware/camera3.h>
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/control_ids.h>
+#include <libcamera/controls.h>
+#include <libcamera/property_ids.h>
+
+#include "libcamera/internal/formats.h"
+
+using namespace libcamera;
+
+LOG_DECLARE_CATEGORY(HAL)
+
+namespace {
+
+/*
+ * \var camera3Resolutions
+ * \brief The list of image resolutions defined as mandatory to be supported by
+ * the Android Camera3 specification
+ */
+const std::vector<Size> camera3Resolutions = {
+ { 320, 240 },
+ { 640, 480 },
+ { 1280, 720 },
+ { 1920, 1080 }
+};
+
+/*
+ * \struct Camera3Format
+ * \brief Data associated with an Android format identifier
+ * \var libcameraFormats List of libcamera pixel formats compatible with the
+ * Android format
+ * \var name The human-readable representation of the Android format code
+ */
+struct Camera3Format {
+ std::vector<PixelFormat> libcameraFormats;
+ bool mandatory;
+ const char *name;
+};
+
+/*
+ * \var camera3FormatsMap
+ * \brief Associate Android format code with ancillary data
+ */
+const std::map<int, const Camera3Format> camera3FormatsMap = {
+ {
+ HAL_PIXEL_FORMAT_BLOB, {
+ { formats::MJPEG },
+ true,
+ "BLOB"
+ }
+ }, {
+ HAL_PIXEL_FORMAT_YCbCr_420_888, {
+ { formats::NV12, formats::NV21 },
+ true,
+ "YCbCr_420_888"
+ }
+ }, {
+ /*
+ * \todo Translate IMPLEMENTATION_DEFINED inspecting the gralloc
+ * usage flag. For now, copy the YCbCr_420 configuration.
+ */
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, {
+ { formats::NV12, formats::NV21 },
+ true,
+ "IMPLEMENTATION_DEFINED"
+ }
+ }, {
+ HAL_PIXEL_FORMAT_RAW10, {
+ {
+ formats::SBGGR10_CSI2P,
+ formats::SGBRG10_CSI2P,
+ formats::SGRBG10_CSI2P,
+ formats::SRGGB10_CSI2P
+ },
+ false,
+ "RAW10"
+ }
+ }, {
+ HAL_PIXEL_FORMAT_RAW12, {
+ {
+ formats::SBGGR12_CSI2P,
+ formats::SGBRG12_CSI2P,
+ formats::SGRBG12_CSI2P,
+ formats::SRGGB12_CSI2P
+ },
+ false,
+ "RAW12"
+ }
+ }, {
+ HAL_PIXEL_FORMAT_RAW16, {
+ {
+ formats::SBGGR16,
+ formats::SGBRG16,
+ formats::SGRBG16,
+ formats::SRGGB16
+ },
+ false,
+ "RAW16"
+ }
+ },
+};
+
+} /* namespace */
+
+int CameraCapabilities::initialize(std::shared_ptr<libcamera::Camera> camera,
+ int orientation, int facing)
+{
+ camera_ = camera;
+ orientation_ = orientation;
+ facing_ = facing;
+
+ /* Acquire the camera and initialize available stream configurations. */
+ int ret = camera_->acquire();
+ if (ret) {
+ LOG(HAL, Error) << "Failed to temporarily acquire the camera";
+ return ret;
+ }
+
+ ret = initializeStreamConfigurations();
+ camera_->release();
+ if (ret)
+ return ret;
+
+ return initializeStaticMetadata();
+}
+
+std::vector<Size> CameraCapabilities::getYUVResolutions(CameraConfiguration *cameraConfig,
+ const PixelFormat &pixelFormat,
+ const std::vector<Size> &resolutions)
+{
+ std::vector<Size> supportedResolutions;
+
+ StreamConfiguration &cfg = cameraConfig->at(0);
+ for (const Size &res : resolutions) {
+ cfg.pixelFormat = pixelFormat;
+ cfg.size = res;
+
+ CameraConfiguration::Status status = cameraConfig->validate();
+ if (status != CameraConfiguration::Valid) {
+ LOG(HAL, Debug) << cfg.toString() << " not supported";
+ continue;
+ }
+
+ LOG(HAL, Debug) << cfg.toString() << " supported";
+
+ supportedResolutions.push_back(res);
+ }
+
+ return supportedResolutions;
+}
+
+std::vector<Size> CameraCapabilities::getRawResolutions(const libcamera::PixelFormat &pixelFormat)
+{
+ std::unique_ptr<CameraConfiguration> cameraConfig =
+ camera_->generateConfiguration({ StreamRole::Raw });
+ StreamConfiguration &cfg = cameraConfig->at(0);
+ const StreamFormats &formats = cfg.formats();
+ std::vector<Size> supportedResolutions = formats.sizes(pixelFormat);
+
+ return supportedResolutions;
+}
+
+/*
+ * Initialize the format conversion map to translate from Android format
+ * identifier to libcamera pixel formats and fill in the list of supported
+ * stream configurations to be reported to the Android camera framework through
+ * the camera static metadata.
+ */
+int CameraCapabilities::initializeStreamConfigurations()
+{
+ /*
+ * Get the maximum output resolutions
+ * \todo Get this from the camera properties once defined
+ */
+ std::unique_ptr<CameraConfiguration> cameraConfig =
+ camera_->generateConfiguration({ StillCapture });
+ if (!cameraConfig) {
+ LOG(HAL, Error) << "Failed to get maximum resolution";
+ return -EINVAL;
+ }
+ StreamConfiguration &cfg = cameraConfig->at(0);
+
+ /*
+ * \todo JPEG - Adjust the maximum available resolution by taking the
+ * JPEG encoder requirements into account (alignment and aspect ratio).
+ */
+ const Size maxRes = cfg.size;
+ LOG(HAL, Debug) << "Maximum supported resolution: " << maxRes.toString();
+
+ /*
+ * Build the list of supported image resolutions.
+ *
+ * The resolutions listed in camera3Resolution are mandatory to be
+ * supported, up to the camera maximum resolution.
+ *
+ * Augment the list by adding resolutions calculated from the camera
+ * maximum one.
+ */
+ std::vector<Size> cameraResolutions;
+ std::copy_if(camera3Resolutions.begin(), camera3Resolutions.end(),
+ std::back_inserter(cameraResolutions),
+ [&](const Size &res) { return res < maxRes; });
+
+ /*
+ * The Camera3 specification suggests adding 1/2 and 1/4 of the maximum
+ * resolution.
+ */
+ for (unsigned int divider = 2;; divider <<= 1) {
+ Size derivedSize{
+ maxRes.width / divider,
+ maxRes.height / divider,
+ };
+
+ if (derivedSize.width < 320 ||
+ derivedSize.height < 240)
+ break;
+
+ cameraResolutions.push_back(derivedSize);
+ }
+ cameraResolutions.push_back(maxRes);
+
+ /* Remove duplicated entries from the list of supported resolutions. */
+ std::sort(cameraResolutions.begin(), cameraResolutions.end());
+ auto last = std::unique(cameraResolutions.begin(), cameraResolutions.end());
+ cameraResolutions.erase(last, cameraResolutions.end());
+
+ /*
+ * Build the list of supported camera formats.
+ *
+ * To each Android format a list of compatible libcamera formats is
+ * associated. The first libcamera format that tests successful is added
+ * to the format translation map used when configuring the streams.
+ * It is then tested against the list of supported camera resolutions to
+ * build the stream configuration map reported through the camera static
+ * metadata.
+ */
+ Size maxJpegSize;
+ for (const auto &format : camera3FormatsMap) {
+ int androidFormat = format.first;
+ const Camera3Format &camera3Format = format.second;
+ const std::vector<PixelFormat> &libcameraFormats =
+ camera3Format.libcameraFormats;
+
+ LOG(HAL, Debug) << "Trying to map Android format "
+ << camera3Format.name;
+
+ /*
+ * JPEG is always supported, either produced directly by the
+ * camera, or encoded in the HAL.
+ */
+ if (androidFormat == HAL_PIXEL_FORMAT_BLOB) {
+ formatsMap_[androidFormat] = formats::MJPEG;
+ LOG(HAL, Debug) << "Mapped Android format "
+ << camera3Format.name << " to "
+ << formats::MJPEG.toString()
+ << " (fixed mapping)";
+ continue;
+ }
+
+ /*
+ * Test the libcamera formats that can produce images
+ * compatible with the format defined by Android.
+ */
+ PixelFormat mappedFormat;
+ for (const PixelFormat &pixelFormat : libcameraFormats) {
+
+ LOG(HAL, Debug) << "Testing " << pixelFormat.toString();
+
+ /*
+ * The stream configuration size can be adjusted,
+ * not the pixel format.
+ *
+ * \todo This could be simplified once all pipeline
+ * handlers will report the StreamFormats list of
+ * supported formats.
+ */
+ cfg.pixelFormat = pixelFormat;
+
+ CameraConfiguration::Status status = cameraConfig->validate();
+ if (status != CameraConfiguration::Invalid &&
+ cfg.pixelFormat == pixelFormat) {
+ mappedFormat = pixelFormat;
+ break;
+ }
+ }
+
+ if (!mappedFormat.isValid()) {
+ /* If the format is not mandatory, skip it. */
+ if (!camera3Format.mandatory)
+ continue;
+
+ LOG(HAL, Error)
+ << "Failed to map mandatory Android format "
+ << camera3Format.name << " ("
+ << utils::hex(androidFormat) << "): aborting";
+ return -EINVAL;
+ }
+
+ /*
+ * Record the mapping and then proceed to generate the
+ * stream configurations map, by testing the image resolutions.
+ */
+ formatsMap_[androidFormat] = mappedFormat;
+ LOG(HAL, Debug) << "Mapped Android format "
+ << camera3Format.name << " to "
+ << mappedFormat.toString();
+
+ std::vector<Size> resolutions;
+ const PixelFormatInfo &info = PixelFormatInfo::info(mappedFormat);
+ if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW)
+ resolutions = getRawResolutions(mappedFormat);
+ else
+ resolutions = getYUVResolutions(cameraConfig.get(),
+ mappedFormat,
+ cameraResolutions);
+
+ for (const Size &res : resolutions) {
+ streamConfigurations_.push_back({ res, androidFormat });
+
+ /*
+ * If the format is HAL_PIXEL_FORMAT_YCbCr_420_888
+ * from which JPEG is produced, add an entry for
+ * the JPEG stream.
+ *
+ * \todo Wire the JPEG encoder to query the supported
+ * sizes provided a list of formats it can encode.
+ *
+ * \todo Support JPEG streams produced by the camera
+ * natively.
+ */
+ if (androidFormat == HAL_PIXEL_FORMAT_YCbCr_420_888) {
+ streamConfigurations_.push_back(
+ { res, HAL_PIXEL_FORMAT_BLOB });
+ maxJpegSize = std::max(maxJpegSize, res);
+ }
+ }
+
+ /*
+ * \todo Calculate the maximum JPEG buffer size by asking the
+ * encoder giving the maximum frame size required.
+ */
+ maxJpegBufferSize_ = maxJpegSize.width * maxJpegSize.height * 1.5;
+ }
+
+ LOG(HAL, Debug) << "Collected stream configuration map: ";
+ for (const auto &entry : streamConfigurations_)
+ LOG(HAL, Debug) << "{ " << entry.resolution.toString() << " - "
+ << utils::hex(entry.androidFormat) << " }";
+
+ return 0;
+}
+
+int CameraCapabilities::initializeStaticMetadata()
+{
+ staticMetadata_ = std::make_unique<CameraMetadata>(64, 1024);
+ if (!staticMetadata_->isValid()) {
+ LOG(HAL, Error) << "Failed to allocate static metadata";
+ staticMetadata_.reset();
+ return -EINVAL;
+ }
+
+ const ControlInfoMap &controlsInfo = camera_->controls();
+ const ControlList &properties = camera_->properties();
+
+ /* Color correction static metadata. */
+ {
+ std::vector<uint8_t> data;
+ data.reserve(3);
+ const auto &infoMap = controlsInfo.find(&controls::draft::ColorCorrectionAberrationMode);
+ if (infoMap != controlsInfo.end()) {
+ for (const auto &value : infoMap->second.values())
+ data.push_back(value.get<int32_t>());
+ } else {
+ data.push_back(ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF);
+ }
+ staticMetadata_->addEntry(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
+ data);
+ }
+
+ /* Control static metadata. */
+ std::vector<uint8_t> aeAvailableAntiBandingModes = {
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
+ aeAvailableAntiBandingModes);
+
+ std::vector<uint8_t> aeAvailableModes = {
+ ANDROID_CONTROL_AE_MODE_ON,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_MODES,
+ aeAvailableModes);
+
+ int64_t minFrameDurationNsec = -1;
+ int64_t maxFrameDurationNsec = -1;
+ const auto frameDurationsInfo = controlsInfo.find(&controls::FrameDurationLimits);
+ if (frameDurationsInfo != controlsInfo.end()) {
+ minFrameDurationNsec = frameDurationsInfo->second.min().get<int64_t>() * 1000;
+ maxFrameDurationNsec = frameDurationsInfo->second.max().get<int64_t>() * 1000;
+
+ /*
+ * Adjust the minimum frame duration to comply with Android
+ * requirements. The camera service mandates all preview/record
+ * streams to have a minimum frame duration < 33,366 milliseconds
+ * (see MAX_PREVIEW_RECORD_DURATION_NS in the camera service
+ * implementation).
+ *
+ * If we're close enough (+ 500 useconds) to that value, round
+ * the minimum frame duration of the camera to an accepted
+ * value.
+ */
+ static constexpr int64_t MAX_PREVIEW_RECORD_DURATION_NS = 1e9 / 29.97;
+ if (minFrameDurationNsec > MAX_PREVIEW_RECORD_DURATION_NS &&
+ minFrameDurationNsec < MAX_PREVIEW_RECORD_DURATION_NS + 500000)
+ minFrameDurationNsec = MAX_PREVIEW_RECORD_DURATION_NS - 1000;
+
+ /*
+ * The AE routine frame rate limits are computed using the frame
+ * duration limits, as libcamera clips the AE routine to the
+ * frame durations.
+ */
+ int32_t maxFps = std::round(1e9 / minFrameDurationNsec);
+ int32_t minFps = std::round(1e9 / maxFrameDurationNsec);
+ minFps = std::max(1, minFps);
+
+ /*
+ * Force rounding errors so that we have the proper frame
+ * durations for when we reuse these variables later
+ */
+ minFrameDurationNsec = 1e9 / maxFps;
+ maxFrameDurationNsec = 1e9 / minFps;
+
+ /*
+ * Register to the camera service {min, max} and {max, max}
+ * intervals as requested by the metadata documentation.
+ */
+ int32_t availableAeFpsTarget[] = {
+ minFps, maxFps, maxFps, maxFps
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+ availableAeFpsTarget);
+ }
+
+ std::vector<int32_t> aeCompensationRange = {
+ 0, 0,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
+ aeCompensationRange);
+
+ const camera_metadata_rational_t aeCompensationStep[] = {
+ { 0, 1 }
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_STEP,
+ aeCompensationStep);
+
+ std::vector<uint8_t> availableAfModes = {
+ ANDROID_CONTROL_AF_MODE_OFF,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ availableAfModes);
+
+ std::vector<uint8_t> availableEffects = {
+ ANDROID_CONTROL_EFFECT_MODE_OFF,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_EFFECTS,
+ availableEffects);
+
+ std::vector<uint8_t> availableSceneModes = {
+ ANDROID_CONTROL_SCENE_MODE_DISABLED,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
+ availableSceneModes);
+
+ std::vector<uint8_t> availableStabilizationModes = {
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+ availableStabilizationModes);
+
+ /*
+ * \todo Inspect the camera capabilities to report the available
+ * AWB modes. Default to AUTO as CTS tests require it.
+ */
+ std::vector<uint8_t> availableAwbModes = {
+ ANDROID_CONTROL_AWB_MODE_AUTO,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
+ availableAwbModes);
+
+ std::vector<int32_t> availableMaxRegions = {
+ 0, 0, 0,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_MAX_REGIONS,
+ availableMaxRegions);
+
+ std::vector<uint8_t> sceneModesOverride = {
+ ANDROID_CONTROL_AE_MODE_ON,
+ ANDROID_CONTROL_AWB_MODE_AUTO,
+ ANDROID_CONTROL_AF_MODE_OFF,
+ };
+ staticMetadata_->addEntry(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
+ sceneModesOverride);
+
+ uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
+ aeLockAvailable);
+
+ uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
+ staticMetadata_->addEntry(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
+ awbLockAvailable);
+
+ char availableControlModes = ANDROID_CONTROL_MODE_AUTO;
+ staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_MODES,
+ availableControlModes);
+
+ /* JPEG static metadata. */
+
+ /*
+ * Create the list of supported thumbnail sizes by inspecting the
+ * available JPEG resolutions collected in streamConfigurations_ and
+ * generate one entry for each aspect ratio.
+ *
+ * The JPEG thumbnailer can freely scale, so pick an arbitrary
+ * (160, 160) size as the bounding rectangle, which is then cropped to
+ * the different supported aspect ratios.
+ */
+ constexpr Size maxJpegThumbnail(160, 160);
+ std::vector<Size> thumbnailSizes;
+ thumbnailSizes.push_back({ 0, 0 });
+ for (const auto &entry : streamConfigurations_) {
+ if (entry.androidFormat != HAL_PIXEL_FORMAT_BLOB)
+ continue;
+
+ Size thumbnailSize = maxJpegThumbnail
+ .boundedToAspectRatio({ entry.resolution.width,
+ entry.resolution.height });
+ thumbnailSizes.push_back(thumbnailSize);
+ }
+
+ std::sort(thumbnailSizes.begin(), thumbnailSizes.end());
+ auto last = std::unique(thumbnailSizes.begin(), thumbnailSizes.end());
+ thumbnailSizes.erase(last, thumbnailSizes.end());
+
+ /* Transform sizes in to a list of integers that can be consumed. */
+ std::vector<int32_t> thumbnailEntries;
+ thumbnailEntries.reserve(thumbnailSizes.size() * 2);
+ for (const auto &size : thumbnailSizes) {
+ thumbnailEntries.push_back(size.width);
+ thumbnailEntries.push_back(size.height);
+ }
+ staticMetadata_->addEntry(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+ thumbnailEntries);
+
+ staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, maxJpegBufferSize_);
+
+ /* Sensor static metadata. */
+ std::array<int32_t, 2> pixelArraySize;
+ {
+ const Size &size = properties.get(properties::PixelArraySize);
+ pixelArraySize[0] = size.width;
+ pixelArraySize[1] = size.height;
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
+ pixelArraySize);
+ }
+
+ if (properties.contains(properties::UnitCellSize)) {
+ const Size &cellSize = properties.get<Size>(properties::UnitCellSize);
+ std::array<float, 2> physicalSize{
+ cellSize.width * pixelArraySize[0] / 1e6f,
+ cellSize.height * pixelArraySize[1] / 1e6f
+ };
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
+ physicalSize);
+ }
+
+ {
+ const Span<const Rectangle> &rects =
+ properties.get(properties::PixelArrayActiveAreas);
+ std::vector<int32_t> data{
+ static_cast<int32_t>(rects[0].x),
+ static_cast<int32_t>(rects[0].y),
+ static_cast<int32_t>(rects[0].width),
+ static_cast<int32_t>(rects[0].height),
+ };
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+ data);
+ }
+
+ int32_t sensitivityRange[] = {
+ 32, 2400,
+ };
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
+ sensitivityRange);
+
+ /* Report the color filter arrangement if the camera reports it. */
+ if (properties.contains(properties::draft::ColorFilterArrangement)) {
+ uint8_t filterArr = properties.get(properties::draft::ColorFilterArrangement);
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
+ filterArr);
+ }
+
+ const auto &exposureInfo = controlsInfo.find(&controls::ExposureTime);
+ if (exposureInfo != controlsInfo.end()) {
+ int64_t exposureTimeRange[2] = {
+ exposureInfo->second.min().get<int32_t>() * 1000LL,
+ exposureInfo->second.max().get<int32_t>() * 1000LL,
+ };
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
+ exposureTimeRange, 2);
+ }
+
+ staticMetadata_->addEntry(ANDROID_SENSOR_ORIENTATION, orientation_);
+
+ std::vector<int32_t> testPatternModes = {
+ ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
+ };
+ const auto &testPatternsInfo =
+ controlsInfo.find(&controls::draft::TestPatternMode);
+ if (testPatternsInfo != controlsInfo.end()) {
+ const auto &values = testPatternsInfo->second.values();
+ ASSERT(!values.empty());
+ for (const auto &value : values) {
+ switch (value.get<int32_t>()) {
+ case controls::draft::TestPatternModeOff:
+ /*
+ * ANDROID_SENSOR_TEST_PATTERN_MODE_OFF is
+ * already in testPatternModes.
+ */
+ break;
+
+ case controls::draft::TestPatternModeSolidColor:
+ testPatternModes.push_back(
+ ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR);
+ break;
+
+ case controls::draft::TestPatternModeColorBars:
+ testPatternModes.push_back(
+ ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS);
+ break;
+
+ case controls::draft::TestPatternModeColorBarsFadeToGray:
+ testPatternModes.push_back(
+ ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY);
+ break;
+
+ case controls::draft::TestPatternModePn9:
+ testPatternModes.push_back(
+ ANDROID_SENSOR_TEST_PATTERN_MODE_PN9);
+ break;
+
+ case controls::draft::TestPatternModeCustom1:
+ /* We don't support this yet. */
+ break;
+
+ default:
+ LOG(HAL, Error) << "Unknown test pattern mode: "
+ << value.get<int32_t>();
+ continue;
+ }
+ }
+ }
+ staticMetadata_->addEntry(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
+ testPatternModes);
+
+ uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
+ timestampSource);
+
+ if (maxFrameDurationNsec > 0)
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
+ maxFrameDurationNsec);
+
+ /* Statistics static metadata. */
+ uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
+ staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
+ faceDetectMode);
+
+ int32_t maxFaceCount = 0;
+ staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
+ maxFaceCount);
+
+ {
+ std::vector<uint8_t> data;
+ data.reserve(2);
+ const auto &infoMap = controlsInfo.find(&controls::draft::LensShadingMapMode);
+ if (infoMap != controlsInfo.end()) {
+ for (const auto &value : infoMap->second.values())
+ data.push_back(value.get<int32_t>());
+ } else {
+ data.push_back(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF);
+ }
+ staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
+ data);
+ }
+
+ /* Sync static metadata. */
+ int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
+ staticMetadata_->addEntry(ANDROID_SYNC_MAX_LATENCY, maxLatency);
+
+ /* Flash static metadata. */
+ char flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
+ staticMetadata_->addEntry(ANDROID_FLASH_INFO_AVAILABLE,
+ flashAvailable);
+
+ /* Lens static metadata. */
+ std::vector<float> lensApertures = {
+ 2.53 / 100,
+ };
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
+ lensApertures);
+
+ uint8_t lensFacing;
+ switch (facing_) {
+ default:
+ case CAMERA_FACING_FRONT:
+ lensFacing = ANDROID_LENS_FACING_FRONT;
+ break;
+ case CAMERA_FACING_BACK:
+ lensFacing = ANDROID_LENS_FACING_BACK;
+ break;
+ case CAMERA_FACING_EXTERNAL:
+ lensFacing = ANDROID_LENS_FACING_EXTERNAL;
+ break;
+ }
+ staticMetadata_->addEntry(ANDROID_LENS_FACING, lensFacing);
+
+ std::vector<float> lensFocalLengths = {
+ 1,
+ };
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
+ lensFocalLengths);
+
+ std::vector<uint8_t> opticalStabilizations = {
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,
+ };
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
+ opticalStabilizations);
+
+ float hypeFocalDistance = 0;
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
+ hypeFocalDistance);
+
+ float minFocusDistance = 0;
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
+ minFocusDistance);
+
+ /* Noise reduction modes. */
+ {
+ std::vector<uint8_t> data;
+ data.reserve(5);
+ const auto &infoMap = controlsInfo.find(&controls::draft::NoiseReductionMode);
+ if (infoMap != controlsInfo.end()) {
+ for (const auto &value : infoMap->second.values())
+ data.push_back(value.get<int32_t>());
+ } else {
+ data.push_back(ANDROID_NOISE_REDUCTION_MODE_OFF);
+ }
+ staticMetadata_->addEntry(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+ data);
+ }
+
+ /* Scaler static metadata. */
+
+ /*
+ * \todo The digital zoom factor is a property that depends on the
+ * desired output configuration and the sensor frame size input to the
+ * ISP. This information is not available to the Android HAL, not at
+ * initialization time at least.
+ *
+ * As a workaround rely on pipeline handlers initializing the
+ * ScalerCrop control with the camera default configuration and use the
+ * maximum and minimum crop rectangles to calculate the digital zoom
+ * factor.
+ */
+ float maxZoom = 1.0f;
+ const auto scalerCrop = controlsInfo.find(&controls::ScalerCrop);
+ if (scalerCrop != controlsInfo.end()) {
+ Rectangle min = scalerCrop->second.min().get<Rectangle>();
+ Rectangle max = scalerCrop->second.max().get<Rectangle>();
+ maxZoom = std::min(1.0f * max.width / min.width,
+ 1.0f * max.height / min.height);
+ }
+ staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+ maxZoom);
+
+ std::vector<uint32_t> availableStreamConfigurations;
+ availableStreamConfigurations.reserve(streamConfigurations_.size() * 4);
+ for (const auto &entry : streamConfigurations_) {
+ availableStreamConfigurations.push_back(entry.androidFormat);
+ availableStreamConfigurations.push_back(entry.resolution.width);
+ availableStreamConfigurations.push_back(entry.resolution.height);
+ availableStreamConfigurations.push_back(
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
+ }
+ staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+ availableStreamConfigurations);
+
+ std::vector<int64_t> availableStallDurations = {
+ ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920, 33333333,
+ };
+ staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
+ availableStallDurations);
+
+ /* Use the minimum frame duration for all the YUV/RGB formats. */
+ if (minFrameDurationNsec > 0) {
+ std::vector<int64_t> minFrameDurations;
+ minFrameDurations.reserve(streamConfigurations_.size() * 4);
+ for (const auto &entry : streamConfigurations_) {
+ minFrameDurations.push_back(entry.androidFormat);
+ minFrameDurations.push_back(entry.resolution.width);
+ minFrameDurations.push_back(entry.resolution.height);
+ minFrameDurations.push_back(minFrameDurationNsec);
+ }
+ staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
+ minFrameDurations);
+ }
+
+ uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
+ staticMetadata_->addEntry(ANDROID_SCALER_CROPPING_TYPE, croppingType);
+
+ /* Info static metadata. */
+ uint8_t supportedHWLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
+ staticMetadata_->addEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
+ supportedHWLevel);
+
+ /* Request static metadata. */
+ int32_t partialResultCount = 1;
+ staticMetadata_->addEntry(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
+ partialResultCount);
+
+ {
+ /* Default the value to 2 if not reported by the camera. */
+ uint8_t maxPipelineDepth = 2;
+ const auto &infoMap = controlsInfo.find(&controls::draft::PipelineDepth);
+ if (infoMap != controlsInfo.end())
+ maxPipelineDepth = infoMap->second.max().get<int32_t>();
+ staticMetadata_->addEntry(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
+ maxPipelineDepth);
+ }
+
+ /* LIMITED does not support reprocessing. */
+ uint32_t maxNumInputStreams = 0;
+ staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
+ maxNumInputStreams);
+
+ std::vector<uint8_t> availableCapabilities = {
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
+ };
+
+ /* Report if camera supports RAW. */
+ bool rawStreamAvailable = false;
+ std::unique_ptr<CameraConfiguration> cameraConfig =
+ camera_->generateConfiguration({ StreamRole::Raw });
+ if (cameraConfig && !cameraConfig->empty()) {
+ const PixelFormatInfo &info =
+ PixelFormatInfo::info(cameraConfig->at(0).pixelFormat);
+ /* Only advertise RAW support if RAW16 is possible. */
+ if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW &&
+ info.bitsPerPixel == 16) {
+ rawStreamAvailable = true;
+ availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
+ }
+ }
+
+ /* Number of { RAW, YUV, JPEG } supported output streams */
+ int32_t numOutStreams[] = { rawStreamAvailable, 2, 1 };
+ staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
+ numOutStreams);
+
+ staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
+ availableCapabilities);
+
+ std::vector<int32_t> availableCharacteristicsKeys = {
+ ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
+ ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
+ ANDROID_CONTROL_AE_AVAILABLE_MODES,
+ ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+ ANDROID_CONTROL_AE_COMPENSATION_RANGE,
+ ANDROID_CONTROL_AE_COMPENSATION_STEP,
+ ANDROID_CONTROL_AE_LOCK_AVAILABLE,
+ ANDROID_CONTROL_AF_AVAILABLE_MODES,
+ ANDROID_CONTROL_AVAILABLE_EFFECTS,
+ ANDROID_CONTROL_AVAILABLE_MODES,
+ ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
+ ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+ ANDROID_CONTROL_AWB_AVAILABLE_MODES,
+ ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
+ ANDROID_CONTROL_MAX_REGIONS,
+ ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
+ ANDROID_FLASH_INFO_AVAILABLE,
+ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
+ ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+ ANDROID_JPEG_MAX_SIZE,
+ ANDROID_LENS_FACING,
+ ANDROID_LENS_INFO_AVAILABLE_APERTURES,
+ ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
+ ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
+ ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
+ ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
+ ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
+ ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
+ ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
+ ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
+ ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
+ ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+ ANDROID_SCALER_CROPPING_TYPE,
+ ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
+ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+ ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
+ ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
+ ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
+ ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
+ ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
+ ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
+ ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
+ ANDROID_SENSOR_ORIENTATION,
+ ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
+ ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
+ ANDROID_SYNC_MAX_LATENCY,
+ };
+ staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
+ availableCharacteristicsKeys);
+
+ std::vector<int32_t> availableRequestKeys = {
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ ANDROID_CONTROL_AE_LOCK,
+ ANDROID_CONTROL_AE_MODE,
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+ ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ ANDROID_CONTROL_AF_MODE,
+ ANDROID_CONTROL_AF_TRIGGER,
+ ANDROID_CONTROL_AWB_LOCK,
+ ANDROID_CONTROL_AWB_MODE,
+ ANDROID_CONTROL_CAPTURE_INTENT,
+ ANDROID_CONTROL_EFFECT_MODE,
+ ANDROID_CONTROL_MODE,
+ ANDROID_CONTROL_SCENE_MODE,
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+ ANDROID_FLASH_MODE,
+ ANDROID_JPEG_ORIENTATION,
+ ANDROID_JPEG_QUALITY,
+ ANDROID_JPEG_THUMBNAIL_QUALITY,
+ ANDROID_JPEG_THUMBNAIL_SIZE,
+ ANDROID_LENS_APERTURE,
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+ ANDROID_NOISE_REDUCTION_MODE,
+ ANDROID_SCALER_CROP_REGION,
+ ANDROID_STATISTICS_FACE_DETECT_MODE
+ };
+ staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
+ availableRequestKeys);
+
+ std::vector<int32_t> availableResultKeys = {
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ ANDROID_CONTROL_AE_LOCK,
+ ANDROID_CONTROL_AE_MODE,
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+ ANDROID_CONTROL_AE_STATE,
+ ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ ANDROID_CONTROL_AF_MODE,
+ ANDROID_CONTROL_AF_STATE,
+ ANDROID_CONTROL_AF_TRIGGER,
+ ANDROID_CONTROL_AWB_LOCK,
+ ANDROID_CONTROL_AWB_MODE,
+ ANDROID_CONTROL_AWB_STATE,
+ ANDROID_CONTROL_CAPTURE_INTENT,
+ ANDROID_CONTROL_EFFECT_MODE,
+ ANDROID_CONTROL_MODE,
+ ANDROID_CONTROL_SCENE_MODE,
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+ ANDROID_FLASH_MODE,
+ ANDROID_FLASH_STATE,
+ ANDROID_JPEG_GPS_COORDINATES,
+ ANDROID_JPEG_GPS_PROCESSING_METHOD,
+ ANDROID_JPEG_GPS_TIMESTAMP,
+ ANDROID_JPEG_ORIENTATION,
+ ANDROID_JPEG_QUALITY,
+ ANDROID_JPEG_SIZE,
+ ANDROID_JPEG_THUMBNAIL_QUALITY,
+ ANDROID_JPEG_THUMBNAIL_SIZE,
+ ANDROID_LENS_APERTURE,
+ ANDROID_LENS_FOCAL_LENGTH,
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+ ANDROID_LENS_STATE,
+ ANDROID_NOISE_REDUCTION_MODE,
+ ANDROID_REQUEST_PIPELINE_DEPTH,
+ ANDROID_SCALER_CROP_REGION,
+ ANDROID_SENSOR_EXPOSURE_TIME,
+ ANDROID_SENSOR_FRAME_DURATION,
+ ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
+ ANDROID_SENSOR_TEST_PATTERN_MODE,
+ ANDROID_SENSOR_TIMESTAMP,
+ ANDROID_STATISTICS_FACE_DETECT_MODE,
+ ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
+ ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
+ ANDROID_STATISTICS_SCENE_FLICKER,
+ };
+ staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
+ availableResultKeys);
+
+ if (!staticMetadata_->isValid()) {
+ LOG(HAL, Error) << "Failed to construct static metadata";
+ staticMetadata_.reset();
+ return -EINVAL;
+ }
+
+ if (staticMetadata_->resized()) {
+ auto [entryCount, dataCount] = staticMetadata_->usage();
+ LOG(HAL, Info)
+ << "Static metadata resized: " << entryCount
+ << " entries and " << dataCount << " bytes used";
+ }
+
+ return 0;
+}
+
+/* Translate Android format code to libcamera pixel format. */
+PixelFormat CameraCapabilities::toPixelFormat(int format) const
+{
+ auto it = formatsMap_.find(format);
+ if (it == formatsMap_.end()) {
+ LOG(HAL, Error) << "Requested format " << utils::hex(format)
+ << " not supported";
+ return PixelFormat();
+ }
+
+ return it->second;
+}
+
+std::unique_ptr<CameraMetadata> CameraCapabilities::requestTemplatePreview() const
+{
+ /*
+ * \todo Keep this in sync with the actual number of entries.
+ * Currently: 20 entries, 35 bytes
+ */
+ auto requestTemplate = std::make_unique<CameraMetadata>(21, 36);
+ if (!requestTemplate->isValid()) {
+ return nullptr;
+ }
+
+ /* Get the FPS range registered in the static metadata. */
+ camera_metadata_ro_entry_t entry;
+ bool found = staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+ &entry);
+ if (!found) {
+ LOG(HAL, Error) << "Cannot create capture template without FPS range";
+ return nullptr;
+ }
+
+ /*
+ * Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata
+ * has been assembled as {{min, max} {max, max}}.
+ */
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ entry.data.i32, 2);
+
+ uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_MODE, aeMode);
+
+ int32_t aeExposureCompensation = 0;
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ aeExposureCompensation);
+
+ uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+ aePrecaptureTrigger);
+
+ uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_LOCK, aeLock);
+
+ uint8_t aeAntibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+ aeAntibandingMode);
+
+ uint8_t afMode = ANDROID_CONTROL_AF_MODE_OFF;
+ requestTemplate->addEntry(ANDROID_CONTROL_AF_MODE, afMode);
+
+ uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
+ requestTemplate->addEntry(ANDROID_CONTROL_AF_TRIGGER, afTrigger);
+
+ uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
+ requestTemplate->addEntry(ANDROID_CONTROL_AWB_MODE, awbMode);
+
+ uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
+ requestTemplate->addEntry(ANDROID_CONTROL_AWB_LOCK, awbLock);
+
+ uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
+ requestTemplate->addEntry(ANDROID_FLASH_MODE, flashMode);
+
+ uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
+ requestTemplate->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE,
+ faceDetectMode);
+
+ uint8_t noiseReduction = ANDROID_NOISE_REDUCTION_MODE_OFF;
+ requestTemplate->addEntry(ANDROID_NOISE_REDUCTION_MODE,
+ noiseReduction);
+
+ uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
+ requestTemplate->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+ aberrationMode);
+
+ uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
+ requestTemplate->addEntry(ANDROID_CONTROL_MODE, controlMode);
+
+ float lensAperture = 2.53 / 100;
+ requestTemplate->addEntry(ANDROID_LENS_APERTURE, lensAperture);
+
+ uint8_t opticalStabilization = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+ requestTemplate->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+ opticalStabilization);
+
+ uint8_t captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+ requestTemplate->addEntry(ANDROID_CONTROL_CAPTURE_INTENT,
+ captureIntent);
+
+ return requestTemplate;
+}
+
+std::unique_ptr<CameraMetadata> CameraCapabilities::requestTemplateVideo() const
+{
+ std::unique_ptr<CameraMetadata> previewTemplate = requestTemplatePreview();
+ if (!previewTemplate)
+ return nullptr;
+
+ /*
+ * The video template requires a fixed FPS range. Everything else
+ * stays the same as the preview template.
+ */
+ camera_metadata_ro_entry_t entry;
+ staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+ &entry);
+
+ /*
+ * Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata
+ * has been assembled as {{min, max} {max, max}}.
+ */
+ previewTemplate->updateEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ entry.data.i32 + 2, 2);
+
+ return previewTemplate;
+}
diff --git a/src/android/camera_capabilities.h b/src/android/camera_capabilities.h
new file mode 100644
index 00000000..4f5be825
--- /dev/null
+++ b/src/android/camera_capabilities.h
@@ -0,0 +1,66 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera_capabilities.h - Camera static properties manager
+ */
+#ifndef __ANDROID_CAMERA_CAPABILITIES_H__
+#define __ANDROID_CAMERA_CAPABILITIES_H__
+
+#include <map>
+#include <memory>
+#include <vector>
+
+#include <libcamera/base/class.h>
+
+#include <libcamera/camera.h>
+#include <libcamera/formats.h>
+#include <libcamera/geometry.h>
+
+#include "camera_metadata.h"
+
+class CameraCapabilities
+{
+public:
+ CameraCapabilities() = default;
+
+ int initialize(std::shared_ptr<libcamera::Camera> camera,
+ int orientation, int facing);
+
+ CameraMetadata *staticMetadata() const { return staticMetadata_.get(); }
+ libcamera::PixelFormat toPixelFormat(int format) const;
+ unsigned int maxJpegBufferSize() const { return maxJpegBufferSize_; }
+
+ std::unique_ptr<CameraMetadata> requestTemplatePreview() const;
+ std::unique_ptr<CameraMetadata> requestTemplateVideo() const;
+
+private:
+ LIBCAMERA_DISABLE_COPY_AND_MOVE(CameraCapabilities)
+
+ struct Camera3StreamConfiguration {
+ libcamera::Size resolution;
+ int androidFormat;
+ };
+
+ std::vector<libcamera::Size>
+ getYUVResolutions(libcamera::CameraConfiguration *cameraConfig,
+ const libcamera::PixelFormat &pixelFormat,
+ const std::vector<libcamera::Size> &resolutions);
+ std::vector<libcamera::Size>
+ getRawResolutions(const libcamera::PixelFormat &pixelFormat);
+ int initializeStreamConfigurations();
+
+ int initializeStaticMetadata();
+
+ std::shared_ptr<libcamera::Camera> camera_;
+
+ int facing_;
+ int orientation_;
+
+ std::vector<Camera3StreamConfiguration> streamConfigurations_;
+ std::map<int, libcamera::PixelFormat> formatsMap_;
+ std::unique_ptr<CameraMetadata> staticMetadata_;
+ unsigned int maxJpegBufferSize_;
+};
+
+#endif /* __ANDROID_CAMERA_CAPABILITIES_H__ */
diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp
index 751699cd..678cde23 100644
--- a/src/android/camera_device.cpp
+++ b/src/android/camera_device.cpp
@@ -6,174 +6,212 @@
*/
#include "camera_device.h"
+#include "camera_hal_config.h"
#include "camera_ops.h"
+#include "post_processor.h"
+#include <algorithm>
+#include <fstream>
#include <sys/mman.h>
-#include <tuple>
+#include <unistd.h>
#include <vector>
+#include <libcamera/base/log.h>
+#include <libcamera/base/thread.h>
+#include <libcamera/base/utils.h>
+
+#include <libcamera/control_ids.h>
#include <libcamera/controls.h>
#include <libcamera/formats.h>
#include <libcamera/property_ids.h>
-#include "libcamera/internal/formats.h"
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/utils.h"
-
-#include "camera_metadata.h"
#include "system/graphics.h"
-#include "jpeg/encoder_libjpeg.h"
-#include "jpeg/exif.h"
-
using namespace libcamera;
+LOG_DECLARE_CATEGORY(HAL)
+
namespace {
/*
- * \var camera3Resolutions
- * \brief The list of image resolutions defined as mandatory to be supported by
- * the Android Camera3 specification
+ * \struct Camera3StreamConfig
+ * \brief Data to store StreamConfiguration associated with camera3_stream(s)
+ * \var streams List of the pairs of a stream requested by Android HAL client
+ * and CameraStream::Type associated with the stream
+ * \var config StreamConfiguration for streams
*/
-const std::vector<Size> camera3Resolutions = {
- { 320, 240 },
- { 640, 480 },
- { 1280, 720 },
- { 1920, 1080 }
-};
+struct Camera3StreamConfig {
+ struct Camera3Stream {
+ camera3_stream_t *stream;
+ CameraStream::Type type;
+ };
-/*
- * \struct Camera3Format
- * \brief Data associated with an Android format identifier
- * \var libcameraFormats List of libcamera pixel formats compatible with the
- * Android format
- * \var name The human-readable representation of the Android format code
- */
-struct Camera3Format {
- std::vector<PixelFormat> libcameraFormats;
- bool mandatory;
- const char *name;
+ std::vector<Camera3Stream> streams;
+ StreamConfiguration config;
};
/*
- * \var camera3FormatsMap
- * \brief Associate Android format code with ancillary data
+ * Reorder the configurations so that libcamera::Camera can accept them as much
+ * as possible. The sort rule is as follows.
+ * 1.) The configuration for NV12 request whose resolution is the largest.
+ * 2.) The configuration for JPEG request.
+ * 3.) Others. Larger resolutions and different formats are put earlier.
*/
-const std::map<int, const Camera3Format> camera3FormatsMap = {
- {
- HAL_PIXEL_FORMAT_BLOB, {
- { formats::MJPEG },
- true,
- "BLOB"
- }
- }, {
- HAL_PIXEL_FORMAT_YCbCr_420_888, {
- { formats::NV12, formats::NV21 },
- true,
- "YCbCr_420_888"
+void sortCamera3StreamConfigs(std::vector<Camera3StreamConfig> &unsortedConfigs,
+ const camera3_stream_t *jpegStream)
+{
+ const Camera3StreamConfig *jpegConfig = nullptr;
+
+ std::map<PixelFormat, std::vector<const Camera3StreamConfig *>> formatToConfigs;
+ for (const auto &streamConfig : unsortedConfigs) {
+ if (jpegStream && !jpegConfig) {
+ const auto &streams = streamConfig.streams;
+ if (std::find_if(streams.begin(), streams.end(),
+ [jpegStream](const auto &stream) {
+ return stream.stream == jpegStream;
+ }) != streams.end()) {
+ jpegConfig = &streamConfig;
+ continue;
+ }
}
- }, {
+ formatToConfigs[streamConfig.config.pixelFormat].push_back(&streamConfig);
+ }
+
+ if (jpegStream && !jpegConfig)
+ LOG(HAL, Fatal) << "No Camera3StreamConfig is found for JPEG";
+
+ for (auto &fmt : formatToConfigs) {
+ auto &streamConfigs = fmt.second;
+
+ /* Sorted by resolution. Smaller is put first. */
+ std::sort(streamConfigs.begin(), streamConfigs.end(),
+ [](const auto *streamConfigA, const auto *streamConfigB) {
+ const Size &sizeA = streamConfigA->config.size;
+ const Size &sizeB = streamConfigB->config.size;
+ return sizeA < sizeB;
+ });
+ }
+
+ std::vector<Camera3StreamConfig> sortedConfigs;
+ sortedConfigs.reserve(unsortedConfigs.size());
+
+ /*
+ * NV12 is the most prioritized format. Put the configuration with NV12
+ * and the largest resolution first.
+ */
+ const auto nv12It = formatToConfigs.find(formats::NV12);
+ if (nv12It != formatToConfigs.end()) {
+ auto &nv12Configs = nv12It->second;
+ const Camera3StreamConfig *nv12Largest = nv12Configs.back();
+
/*
- * \todo Translate IMPLEMENTATION_DEFINED inspecting the gralloc
- * usage flag. For now, copy the YCbCr_420 configuration.
+ * If JPEG will be created from NV12 and the size is larger than
+ * the largest NV12 configurations, then put the NV12
+ * configuration for JPEG first.
*/
- HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, {
- { formats::NV12, formats::NV21 },
- true,
- "IMPLEMENTATION_DEFINED"
- }
- }, {
- HAL_PIXEL_FORMAT_RAW10, {
- {
- formats::SBGGR10_CSI2P,
- formats::SGBRG10_CSI2P,
- formats::SGRBG10_CSI2P,
- formats::SRGGB10_CSI2P
- },
- false,
- "RAW10"
- }
- }, {
- HAL_PIXEL_FORMAT_RAW12, {
- {
- formats::SBGGR12_CSI2P,
- formats::SGBRG12_CSI2P,
- formats::SGRBG12_CSI2P,
- formats::SRGGB12_CSI2P
- },
- false,
- "RAW12"
- }
- }, {
- HAL_PIXEL_FORMAT_RAW16, {
- {
- formats::SBGGR16,
- formats::SGBRG16,
- formats::SGRBG16,
- formats::SRGGB16
- },
- false,
- "RAW16"
+ if (jpegConfig && jpegConfig->config.pixelFormat == formats::NV12) {
+ const Size &nv12SizeForJpeg = jpegConfig->config.size;
+ const Size &nv12LargestSize = nv12Largest->config.size;
+
+ if (nv12LargestSize < nv12SizeForJpeg) {
+ LOG(HAL, Debug) << "Insert " << jpegConfig->config.toString();
+ sortedConfigs.push_back(std::move(*jpegConfig));
+ jpegConfig = nullptr;
+ }
}
- }, {
- HAL_PIXEL_FORMAT_RAW_OPAQUE, {
- {
- formats::SBGGR10_IPU3,
- formats::SGBRG10_IPU3,
- formats::SGRBG10_IPU3,
- formats::SRGGB10_IPU3
- },
- false,
- "RAW_OPAQUE"
+
+ LOG(HAL, Debug) << "Insert " << nv12Largest->config.toString();
+ sortedConfigs.push_back(*nv12Largest);
+ nv12Configs.pop_back();
+
+ if (nv12Configs.empty())
+ formatToConfigs.erase(nv12It);
+ }
+
+ /* If the configuration for JPEG is there, then put it. */
+ if (jpegConfig) {
+ LOG(HAL, Debug) << "Insert " << jpegConfig->config.toString();
+ sortedConfigs.push_back(std::move(*jpegConfig));
+ jpegConfig = nullptr;
+ }
+
+ /*
+ * Put configurations with different formats and larger resolutions
+ * earlier.
+ */
+ while (!formatToConfigs.empty()) {
+ for (auto it = formatToConfigs.begin(); it != formatToConfigs.end();) {
+ auto &configs = it->second;
+ LOG(HAL, Debug) << "Insert " << configs.back()->config.toString();
+ sortedConfigs.push_back(*configs.back());
+ configs.pop_back();
+
+ if (configs.empty())
+ it = formatToConfigs.erase(it);
+ else
+ it++;
}
- },
-};
+ }
-} /* namespace */
+ ASSERT(sortedConfigs.size() == unsortedConfigs.size());
-LOG_DECLARE_CATEGORY(HAL);
+ unsortedConfigs = sortedConfigs;
+}
-class MappedCamera3Buffer : public MappedBuffer
+const char *rotationToString(int rotation)
{
-public:
- MappedCamera3Buffer(const buffer_handle_t camera3buffer, int flags);
-};
+ switch (rotation) {
+ case CAMERA3_STREAM_ROTATION_0:
+ return "0";
+ case CAMERA3_STREAM_ROTATION_90:
+ return "90";
+ case CAMERA3_STREAM_ROTATION_180:
+ return "180";
+ case CAMERA3_STREAM_ROTATION_270:
+ return "270";
+ }
+ return "INVALID";
+}
-MappedCamera3Buffer::MappedCamera3Buffer(const buffer_handle_t camera3buffer,
- int flags)
+#if defined(OS_CHROMEOS)
+/*
+ * Check whether the crop_rotate_scale_degrees values for all streams in
+ * the list are valid according to the Chrome OS camera HAL API.
+ */
+bool validateCropRotate(const camera3_stream_configuration_t &streamList)
{
- maps_.reserve(camera3buffer->numFds);
- error_ = 0;
-
- for (int i = 0; i < camera3buffer->numFds; i++) {
- if (camera3buffer->data[i] == -1)
- continue;
-
- off_t length = lseek(camera3buffer->data[i], 0, SEEK_END);
- if (length < 0) {
- error_ = -errno;
- LOG(HAL, Error) << "Failed to query plane length";
+ ASSERT(streamList.num_streams > 0);
+ const int cropRotateScaleDegrees =
+ streamList.streams[0]->crop_rotate_scale_degrees;
+ for (unsigned int i = 0; i < streamList.num_streams; ++i) {
+ const camera3_stream_t &stream = *streamList.streams[i];
+
+ switch (stream.crop_rotate_scale_degrees) {
+ case CAMERA3_STREAM_ROTATION_0:
+ case CAMERA3_STREAM_ROTATION_90:
+ case CAMERA3_STREAM_ROTATION_270:
break;
- }
- void *address = mmap(nullptr, length, flags, MAP_SHARED,
- camera3buffer->data[i], 0);
- if (address == MAP_FAILED) {
- error_ = -errno;
- LOG(HAL, Error) << "Failed to mmap plane";
- break;
+ /* 180° rotation is specified by Chrome OS as invalid. */
+ case CAMERA3_STREAM_ROTATION_180:
+ default:
+ LOG(HAL, Error) << "Invalid crop_rotate_scale_degrees: "
+ << stream.crop_rotate_scale_degrees;
+ return false;
}
- maps_.emplace_back(static_cast<uint8_t *>(address),
- static_cast<size_t>(length));
+ if (cropRotateScaleDegrees != stream.crop_rotate_scale_degrees) {
+ LOG(HAL, Error) << "crop_rotate_scale_degrees in all "
+ << "streams are not identical";
+ return false;
+ }
}
-}
-CameraStream::CameraStream(PixelFormat format, Size size,
- unsigned int index, Encoder *encoder)
- : format_(format), size_(size), index_(index), encoder_(encoder)
-{
+ return true;
}
+#endif
+
+} /* namespace */
/*
* \struct Camera3RequestDescriptor
@@ -183,16 +221,30 @@ CameraStream::CameraStream(PixelFormat format, Size size,
*/
CameraDevice::Camera3RequestDescriptor::Camera3RequestDescriptor(
- unsigned int frameNumber, unsigned int numBuffers)
- : frameNumber(frameNumber), numBuffers(numBuffers)
+ Camera *camera, const camera3_capture_request_t *camera3Request)
{
- buffers = new camera3_stream_buffer_t[numBuffers];
- frameBuffers.reserve(numBuffers);
-}
+ frameNumber_ = camera3Request->frame_number;
-CameraDevice::Camera3RequestDescriptor::~Camera3RequestDescriptor()
-{
- delete[] buffers;
+ /* Copy the camera3 request stream information for later access. */
+ const uint32_t numBuffers = camera3Request->num_output_buffers;
+ buffers_.resize(numBuffers);
+ for (uint32_t i = 0; i < numBuffers; i++)
+ buffers_[i] = camera3Request->output_buffers[i];
+
+ /*
+ * FrameBuffer instances created by wrapping a camera3 provided dmabuf
+ * are emplaced in this vector of unique_ptr<> for lifetime management.
+ */
+ frameBuffers_.reserve(numBuffers);
+
+ /* Clone the controls associated with the camera3 request. */
+ settings_ = CameraMetadata(camera3Request->settings);
+
+ /*
+ * Create the CaptureRequest, stored as a unique_ptr<> to tie its
+ * lifetime to the descriptor.
+ */
+ request_ = std::make_unique<CaptureRequest>(camera);
}
/*
@@ -208,42 +260,64 @@ CameraDevice::Camera3RequestDescriptor::~Camera3RequestDescriptor()
* back to the framework using the designated callbacks.
*/
-CameraDevice::CameraDevice(unsigned int id, const std::shared_ptr<Camera> &camera)
- : id_(id), running_(false), camera_(camera), staticMetadata_(nullptr),
+CameraDevice::CameraDevice(unsigned int id, std::shared_ptr<Camera> camera)
+ : id_(id), state_(State::Stopped), camera_(std::move(camera)),
facing_(CAMERA_FACING_FRONT), orientation_(0)
{
camera_->requestCompleted.connect(this, &CameraDevice::requestComplete);
- /*
- * \todo Determine a more accurate value for this during
- * streamConfiguration.
- */
- maxJpegBufferSize_ = 13 << 20; /* 13631488 from USB HAL */
-}
+ maker_ = "libcamera";
+ model_ = "cameraModel";
-CameraDevice::~CameraDevice()
-{
- if (staticMetadata_)
- delete staticMetadata_;
+ /* \todo Support getting properties on Android */
+ std::ifstream fstream("/var/cache/camera/camera.prop");
+ if (!fstream.is_open())
+ return;
+
+ std::string line;
+ while (std::getline(fstream, line)) {
+ std::string::size_type delimPos = line.find("=");
+ if (delimPos == std::string::npos)
+ continue;
+ std::string key = line.substr(0, delimPos);
+ std::string val = line.substr(delimPos + 1);
- for (auto &it : requestTemplates_)
- delete it.second;
+ if (!key.compare("ro.product.model"))
+ model_ = val;
+ else if (!key.compare("ro.product.manufacturer"))
+ maker_ = val;
+ }
}
-std::shared_ptr<CameraDevice> CameraDevice::create(unsigned int id,
- const std::shared_ptr<Camera> &cam)
+CameraDevice::~CameraDevice() = default;
+
+std::unique_ptr<CameraDevice> CameraDevice::create(unsigned int id,
+ std::shared_ptr<Camera> cam)
{
- CameraDevice *camera = new CameraDevice(id, cam);
- return std::shared_ptr<CameraDevice>(camera);
+ return std::unique_ptr<CameraDevice>(
+ new CameraDevice(id, std::move(cam)));
}
/*
- * Initialize the camera static information.
+ * Initialize the camera static information retrieved from the
+ * Camera::properties or from the cameraConfigData.
+ *
+ * cameraConfigData is optional for external camera devices and can be
+ * nullptr.
+ *
* This method is called before the camera device is opened.
*/
-int CameraDevice::initialize()
+int CameraDevice::initialize(const CameraConfigData *cameraConfigData)
{
- /* Initialize orientation and facing side of the camera. */
+ /*
+ * Initialize orientation and facing side of the camera.
+ *
+ * If the libcamera::Camera provides those information as retrieved
+ * from firmware use them, otherwise fallback to values parsed from
+ * the configuration file. If the configuration file is not available
+ * the camera is external so its location and rotation can be safely
+ * defaulted.
+ */
const ControlList &properties = camera_->properties();
if (properties.contains(properties::Location)) {
@@ -259,6 +333,22 @@ int CameraDevice::initialize()
facing_ = CAMERA_FACING_EXTERNAL;
break;
}
+
+ if (cameraConfigData && cameraConfigData->facing != -1 &&
+ facing_ != cameraConfigData->facing) {
+ LOG(HAL, Warning)
+ << "Camera location does not match"
+ << " configuration file. Using " << facing_;
+ }
+ } else if (cameraConfigData) {
+ if (cameraConfigData->facing == -1) {
+ LOG(HAL, Error)
+ << "Camera facing not in configuration file";
+ return -EINVAL;
+ }
+ facing_ = cameraConfigData->facing;
+ } else {
+ facing_ = CAMERA_FACING_EXTERNAL;
}
/*
@@ -272,234 +362,24 @@ int CameraDevice::initialize()
if (properties.contains(properties::Rotation)) {
int rotation = properties.get(properties::Rotation);
orientation_ = (360 - rotation) % 360;
- }
-
- int ret = camera_->acquire();
- if (ret) {
- LOG(HAL, Error) << "Failed to temporarily acquire the camera";
- return ret;
- }
-
- ret = initializeStreamConfigurations();
- camera_->release();
- return ret;
-}
-
-std::vector<Size> CameraDevice::getYUVResolutions(CameraConfiguration *cameraConfig,
- const PixelFormat &pixelFormat,
- const std::vector<Size> &resolutions)
-{
- std::vector<Size> supportedResolutions;
-
- StreamConfiguration &cfg = cameraConfig->at(0);
- for (const Size &res : resolutions) {
- cfg.pixelFormat = pixelFormat;
- cfg.size = res;
-
- CameraConfiguration::Status status = cameraConfig->validate();
- if (status != CameraConfiguration::Valid) {
- LOG(HAL, Debug) << cfg.toString() << " not supported";
- continue;
+ if (cameraConfigData && cameraConfigData->rotation != -1 &&
+ orientation_ != cameraConfigData->rotation) {
+ LOG(HAL, Warning)
+ << "Camera orientation does not match"
+ << " configuration file. Using " << orientation_;
}
-
- LOG(HAL, Debug) << cfg.toString() << " supported";
-
- supportedResolutions.push_back(res);
- }
-
- return supportedResolutions;
-}
-
-std::vector<Size> CameraDevice::getRawResolutions(const libcamera::PixelFormat &pixelFormat)
-{
- std::unique_ptr<CameraConfiguration> cameraConfig =
- camera_->generateConfiguration({ StreamRole::Raw });
- StreamConfiguration &cfg = cameraConfig->at(0);
- const StreamFormats &formats = cfg.formats();
- std::vector<Size> supportedResolutions = formats.sizes(pixelFormat);
-
- return supportedResolutions;
-}
-
-/*
- * Initialize the format conversion map to translate from Android format
- * identifier to libcamera pixel formats and fill in the list of supported
- * stream configurations to be reported to the Android camera framework through
- * the static stream configuration metadata.
- */
-int CameraDevice::initializeStreamConfigurations()
-{
- /*
- * Get the maximum output resolutions
- * \todo Get this from the camera properties once defined
- */
- std::unique_ptr<CameraConfiguration> cameraConfig =
- camera_->generateConfiguration({ StillCapture });
- if (!cameraConfig) {
- LOG(HAL, Error) << "Failed to get maximum resolution";
- return -EINVAL;
- }
- StreamConfiguration &cfg = cameraConfig->at(0);
-
- /*
- * \todo JPEG - Adjust the maximum available resolution by taking the
- * JPEG encoder requirements into account (alignment and aspect ratio).
- */
- const Size maxRes = cfg.size;
- LOG(HAL, Debug) << "Maximum supported resolution: " << maxRes.toString();
-
- /*
- * Build the list of supported image resolutions.
- *
- * The resolutions listed in camera3Resolution are mandatory to be
- * supported, up to the camera maximum resolution.
- *
- * Augment the list by adding resolutions calculated from the camera
- * maximum one.
- */
- std::vector<Size> cameraResolutions;
- std::copy_if(camera3Resolutions.begin(), camera3Resolutions.end(),
- std::back_inserter(cameraResolutions),
- [&](const Size &res) { return res < maxRes; });
-
- /*
- * The Camera3 specification suggests adding 1/2 and 1/4 of the maximum
- * resolution.
- */
- for (unsigned int divider = 2;; divider <<= 1) {
- Size derivedSize{
- maxRes.width / divider,
- maxRes.height / divider,
- };
-
- if (derivedSize.width < 320 ||
- derivedSize.height < 240)
- break;
-
- cameraResolutions.push_back(derivedSize);
- }
- cameraResolutions.push_back(maxRes);
-
- /* Remove duplicated entries from the list of supported resolutions. */
- std::sort(cameraResolutions.begin(), cameraResolutions.end());
- auto last = std::unique(cameraResolutions.begin(), cameraResolutions.end());
- cameraResolutions.erase(last, cameraResolutions.end());
-
- /*
- * Build the list of supported camera formats.
- *
- * To each Android format a list of compatible libcamera formats is
- * associated. The first libcamera format that tests successful is added
- * to the format translation map used when configuring the streams.
- * It is then tested against the list of supported camera resolutions to
- * build the stream configuration map reported through the camera static
- * metadata.
- */
- for (const auto &format : camera3FormatsMap) {
- int androidFormat = format.first;
- const Camera3Format &camera3Format = format.second;
- const std::vector<PixelFormat> &libcameraFormats =
- camera3Format.libcameraFormats;
-
- LOG(HAL, Debug) << "Trying to map Android format "
- << camera3Format.name;
-
- /*
- * JPEG is always supported, either produced directly by the
- * camera, or encoded in the HAL.
- */
- if (androidFormat == HAL_PIXEL_FORMAT_BLOB) {
- formatsMap_[androidFormat] = formats::MJPEG;
- LOG(HAL, Debug) << "Mapped Android format "
- << camera3Format.name << " to "
- << formats::MJPEG.toString()
- << " (fixed mapping)";
- continue;
- }
-
- /*
- * Test the libcamera formats that can produce images
- * compatible with the format defined by Android.
- */
- PixelFormat mappedFormat;
- for (const PixelFormat &pixelFormat : libcameraFormats) {
-
- LOG(HAL, Debug) << "Testing " << pixelFormat.toString();
-
- /*
- * The stream configuration size can be adjusted,
- * not the pixel format.
- *
- * \todo This could be simplified once all pipeline
- * handlers will report the StreamFormats list of
- * supported formats.
- */
- cfg.pixelFormat = pixelFormat;
-
- CameraConfiguration::Status status = cameraConfig->validate();
- if (status != CameraConfiguration::Invalid &&
- cfg.pixelFormat == pixelFormat) {
- mappedFormat = pixelFormat;
- break;
- }
- }
-
- if (!mappedFormat.isValid()) {
- /* If the format is not mandatory, skip it. */
- if (!camera3Format.mandatory)
- continue;
-
+ } else if (cameraConfigData) {
+ if (cameraConfigData->rotation == -1) {
LOG(HAL, Error)
- << "Failed to map mandatory Android format "
- << camera3Format.name << " ("
- << utils::hex(androidFormat) << "): aborting";
+ << "Camera rotation not in configuration file";
return -EINVAL;
}
-
- /*
- * Record the mapping and then proceed to generate the
- * stream configurations map, by testing the image resolutions.
- */
- formatsMap_[androidFormat] = mappedFormat;
- LOG(HAL, Debug) << "Mapped Android format "
- << camera3Format.name << " to "
- << mappedFormat.toString();
-
- std::vector<Size> resolutions;
- const PixelFormatInfo &info = PixelFormatInfo::info(mappedFormat);
- if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW)
- resolutions = getRawResolutions(mappedFormat);
- else
- resolutions = getYUVResolutions(cameraConfig.get(),
- mappedFormat,
- cameraResolutions);
-
- for (const Size &res : resolutions) {
- streamConfigurations_.push_back({ res, androidFormat });
-
- /*
- * If the format is HAL_PIXEL_FORMAT_YCbCr_420_888
- * from which JPEG is produced, add an entry for
- * the JPEG stream.
- *
- * \todo Wire the JPEG encoder to query the supported
- * sizes provided a list of formats it can encode.
- *
- * \todo Support JPEG streams produced by the Camera
- * natively.
- */
- if (androidFormat == HAL_PIXEL_FORMAT_YCbCr_420_888)
- streamConfigurations_.push_back(
- { res, HAL_PIXEL_FORMAT_BLOB });
- }
+ orientation_ = cameraConfigData->rotation;
+ } else {
+ orientation_ = 0;
}
- LOG(HAL, Debug) << "Collected stream configuration map: ";
- for (const auto &entry : streamConfigurations_)
- LOG(HAL, Debug) << "{ " << entry.resolution.toString() << " - "
- << utils::hex(entry.androidFormat) << " }";
-
- return 0;
+ return capabilities_.initialize(camera_, orientation_, facing_);
}
/*
@@ -532,571 +412,56 @@ int CameraDevice::open(const hw_module_t *hardwareModule)
void CameraDevice::close()
{
- camera_->stop();
- camera_->release();
-
- running_ = false;
-}
-
-void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks)
-{
- callbacks_ = callbacks;
-}
-
-std::tuple<uint32_t, uint32_t> CameraDevice::calculateStaticMetadataSize()
-{
- /*
- * \todo Keep this in sync with the actual number of entries.
- * Currently: 51 entries, 687 bytes of static metadata
- */
- uint32_t numEntries = 51;
- uint32_t byteSize = 687;
+ streams_.clear();
- /*
- * Calculate space occupation in bytes for dynamically built metadata
- * entries.
- *
- * Each stream configuration entry requires 52 bytes:
- * 4 32bits integers for ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
- * 4 64bits integers for ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS
- */
- byteSize += streamConfigurations_.size() * 48;
+ stop();
- return std::make_tuple(numEntries, byteSize);
+ camera_->release();
}
-/*
- * Return static information for the camera.
- */
-const camera_metadata_t *CameraDevice::getStaticMetadata()
+void CameraDevice::flush()
{
- if (staticMetadata_)
- return staticMetadata_->get();
-
- /*
- * The here reported metadata are enough to implement a basic capture
- * example application, but a real camera implementation will require
- * more.
- */
- uint32_t numEntries;
- uint32_t byteSize;
- std::tie(numEntries, byteSize) = calculateStaticMetadataSize();
- staticMetadata_ = new CameraMetadata(numEntries, byteSize);
- if (!staticMetadata_->isValid()) {
- LOG(HAL, Error) << "Failed to allocate static metadata";
- delete staticMetadata_;
- staticMetadata_ = nullptr;
- return nullptr;
- }
-
- /* Color correction static metadata. */
- std::vector<uint8_t> aberrationModes = {
- ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
- aberrationModes.data(),
- aberrationModes.size());
-
- /* Control static metadata. */
- std::vector<uint8_t> aeAvailableAntiBandingModes = {
- ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
- ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,
- ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,
- ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
- aeAvailableAntiBandingModes.data(),
- aeAvailableAntiBandingModes.size());
-
- std::vector<uint8_t> aeAvailableModes = {
- ANDROID_CONTROL_AE_MODE_ON,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_MODES,
- aeAvailableModes.data(),
- aeAvailableModes.size());
-
- std::vector<int32_t> availableAeFpsTarget = {
- 15, 30,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
- availableAeFpsTarget.data(),
- availableAeFpsTarget.size());
-
- std::vector<int32_t> aeCompensationRange = {
- 0, 0,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
- aeCompensationRange.data(),
- aeCompensationRange.size());
-
- const camera_metadata_rational_t aeCompensationStep[] = {
- { 0, 1 }
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_STEP,
- aeCompensationStep, 1);
-
- std::vector<uint8_t> availableAfModes = {
- ANDROID_CONTROL_AF_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AF_AVAILABLE_MODES,
- availableAfModes.data(),
- availableAfModes.size());
-
- std::vector<uint8_t> availableEffects = {
- ANDROID_CONTROL_EFFECT_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_EFFECTS,
- availableEffects.data(),
- availableEffects.size());
-
- std::vector<uint8_t> availableSceneModes = {
- ANDROID_CONTROL_SCENE_MODE_DISABLED,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
- availableSceneModes.data(),
- availableSceneModes.size());
-
- std::vector<uint8_t> availableStabilizationModes = {
- ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
- availableStabilizationModes.data(),
- availableStabilizationModes.size());
-
- std::vector<uint8_t> availableAwbModes = {
- ANDROID_CONTROL_AWB_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
- availableAwbModes.data(),
- availableAwbModes.size());
-
- std::vector<int32_t> availableMaxRegions = {
- 0, 0, 0,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_MAX_REGIONS,
- availableMaxRegions.data(),
- availableMaxRegions.size());
-
- std::vector<uint8_t> sceneModesOverride = {
- ANDROID_CONTROL_AE_MODE_ON,
- ANDROID_CONTROL_AWB_MODE_AUTO,
- ANDROID_CONTROL_AF_MODE_AUTO,
- };
- staticMetadata_->addEntry(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
- sceneModesOverride.data(),
- sceneModesOverride.size());
-
- uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
- &aeLockAvailable, 1);
-
- uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
- staticMetadata_->addEntry(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
- &awbLockAvailable, 1);
-
- char availableControlModes = ANDROID_CONTROL_MODE_AUTO;
- staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_MODES,
- &availableControlModes, 1);
-
- /* JPEG static metadata. */
- std::vector<int32_t> availableThumbnailSizes = {
- 0, 0,
- };
- staticMetadata_->addEntry(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
- availableThumbnailSizes.data(),
- availableThumbnailSizes.size());
-
- /*
- * \todo Calculate the maximum JPEG buffer size by asking the encoder
- * giving the maximum frame size required.
- */
- staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, &maxJpegBufferSize_, 1);
-
- /* Sensor static metadata. */
- int32_t pixelArraySize[] = {
- 2592, 1944,
- };
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
- &pixelArraySize, 2);
-
- int32_t sensorSizes[] = {
- 0, 0, 2560, 1920,
- };
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
- &sensorSizes, 4);
-
- int32_t sensitivityRange[] = {
- 32, 2400,
- };
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
- &sensitivityRange, 2);
-
- uint16_t filterArr = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG;
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
- &filterArr, 1);
-
- int64_t exposureTimeRange[] = {
- 100000, 200000000,
- };
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
- &exposureTimeRange, 2);
-
- staticMetadata_->addEntry(ANDROID_SENSOR_ORIENTATION, &orientation_, 1);
-
- std::vector<int32_t> testPatterModes = {
- ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
- testPatterModes.data(),
- testPatterModes.size());
-
- std::vector<float> physicalSize = {
- 2592, 1944,
- };
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
- physicalSize.data(),
- physicalSize.size());
-
- uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
- &timestampSource, 1);
-
- /* Statistics static metadata. */
- uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
- staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
- &faceDetectMode, 1);
-
- int32_t maxFaceCount = 0;
- staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
- &maxFaceCount, 1);
-
- /* Sync static metadata. */
- int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
- staticMetadata_->addEntry(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
-
- /* Flash static metadata. */
- char flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
- staticMetadata_->addEntry(ANDROID_FLASH_INFO_AVAILABLE,
- &flashAvailable, 1);
-
- /* Lens static metadata. */
- std::vector<float> lensApertures = {
- 2.53 / 100,
- };
- staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
- lensApertures.data(),
- lensApertures.size());
-
- uint8_t lensFacing;
- switch (facing_) {
- default:
- case CAMERA_FACING_FRONT:
- lensFacing = ANDROID_LENS_FACING_FRONT;
- break;
- case CAMERA_FACING_BACK:
- lensFacing = ANDROID_LENS_FACING_BACK;
- break;
- case CAMERA_FACING_EXTERNAL:
- lensFacing = ANDROID_LENS_FACING_EXTERNAL;
- break;
- }
- staticMetadata_->addEntry(ANDROID_LENS_FACING, &lensFacing, 1);
-
- std::vector<float> lensFocalLenghts = {
- 1,
- };
- staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
- lensFocalLenghts.data(),
- lensFocalLenghts.size());
-
- std::vector<uint8_t> opticalStabilizations = {
- ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,
- };
- staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
- opticalStabilizations.data(),
- opticalStabilizations.size());
-
- float hypeFocalDistance = 0;
- staticMetadata_->addEntry(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
- &hypeFocalDistance, 1);
-
- float minFocusDistance = 0;
- staticMetadata_->addEntry(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
- &minFocusDistance, 1);
-
- /* Noise reduction modes. */
- uint8_t noiseReductionModes = ANDROID_NOISE_REDUCTION_MODE_OFF;
- staticMetadata_->addEntry(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
- &noiseReductionModes, 1);
-
- /* Scaler static metadata. */
- float maxDigitalZoom = 1;
- staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
- &maxDigitalZoom, 1);
-
- std::vector<uint32_t> availableStreamConfigurations;
- availableStreamConfigurations.reserve(streamConfigurations_.size() * 4);
- for (const auto &entry : streamConfigurations_) {
- availableStreamConfigurations.push_back(entry.androidFormat);
- availableStreamConfigurations.push_back(entry.resolution.width);
- availableStreamConfigurations.push_back(entry.resolution.height);
- availableStreamConfigurations.push_back(
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
- }
- staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
- availableStreamConfigurations.data(),
- availableStreamConfigurations.size());
-
- std::vector<int64_t> availableStallDurations = {
- ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920, 33333333,
- };
- staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
- availableStallDurations.data(),
- availableStallDurations.size());
-
- /* \todo Collect the minimum frame duration from the camera. */
- std::vector<int64_t> minFrameDurations;
- minFrameDurations.reserve(streamConfigurations_.size() * 4);
- for (const auto &entry : streamConfigurations_) {
- minFrameDurations.push_back(entry.androidFormat);
- minFrameDurations.push_back(entry.resolution.width);
- minFrameDurations.push_back(entry.resolution.height);
- minFrameDurations.push_back(33333333);
- }
- staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
- minFrameDurations.data(),
- minFrameDurations.size());
-
- uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
- staticMetadata_->addEntry(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
-
- /* Info static metadata. */
- uint8_t supportedHWLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
- staticMetadata_->addEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
- &supportedHWLevel, 1);
-
- /* Request static metadata. */
- int32_t partialResultCount = 1;
- staticMetadata_->addEntry(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
- &partialResultCount, 1);
-
- uint8_t maxPipelineDepth = 2;
- staticMetadata_->addEntry(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
- &maxPipelineDepth, 1);
-
- /* LIMITED does not support reprocessing. */
- uint32_t maxNumInputStreams = 0;
- staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
- &maxNumInputStreams, 1);
-
- std::vector<uint8_t> availableCapabilities = {
- ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
- };
+ {
+ MutexLocker stateLock(stateMutex_);
+ if (state_ != State::Running)
+ return;
- /* Report if camera supports RAW. */
- std::unique_ptr<CameraConfiguration> cameraConfig =
- camera_->generateConfiguration({ StreamRole::Raw });
- if (cameraConfig && !cameraConfig->empty()) {
- const PixelFormatInfo &info =
- PixelFormatInfo::info(cameraConfig->at(0).pixelFormat);
- if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW)
- availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
+ state_ = State::Flushing;
}
- staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
- availableCapabilities.data(),
- availableCapabilities.size());
-
- std::vector<int32_t> availableCharacteristicsKeys = {
- ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
- ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
- ANDROID_CONTROL_AE_AVAILABLE_MODES,
- ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
- ANDROID_CONTROL_AE_COMPENSATION_RANGE,
- ANDROID_CONTROL_AE_COMPENSATION_STEP,
- ANDROID_CONTROL_AF_AVAILABLE_MODES,
- ANDROID_CONTROL_AVAILABLE_EFFECTS,
- ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
- ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
- ANDROID_CONTROL_AWB_AVAILABLE_MODES,
- ANDROID_CONTROL_MAX_REGIONS,
- ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
- ANDROID_CONTROL_AE_LOCK_AVAILABLE,
- ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
- ANDROID_CONTROL_AVAILABLE_MODES,
- ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
- ANDROID_JPEG_MAX_SIZE,
- ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
- ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
- ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
- ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
- ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
- ANDROID_SENSOR_ORIENTATION,
- ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
- ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
- ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
- ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
- ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
- ANDROID_SYNC_MAX_LATENCY,
- ANDROID_FLASH_INFO_AVAILABLE,
- ANDROID_LENS_INFO_AVAILABLE_APERTURES,
- ANDROID_LENS_FACING,
- ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
- ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
- ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
- ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
- ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
- ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
- ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
- ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
- ANDROID_SCALER_CROPPING_TYPE,
- ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
- ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
- ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
- ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
- ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
- };
- staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
- availableCharacteristicsKeys.data(),
- availableCharacteristicsKeys.size());
-
- std::vector<int32_t> availableRequestKeys = {
- ANDROID_CONTROL_AE_MODE,
- ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
- ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
- ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
- ANDROID_CONTROL_AE_ANTIBANDING_MODE,
- ANDROID_CONTROL_AE_LOCK,
- ANDROID_CONTROL_AF_TRIGGER,
- ANDROID_CONTROL_AWB_MODE,
- ANDROID_CONTROL_AWB_LOCK,
- ANDROID_FLASH_MODE,
- ANDROID_STATISTICS_FACE_DETECT_MODE,
- ANDROID_NOISE_REDUCTION_MODE,
- ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
- ANDROID_LENS_APERTURE,
- ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
- ANDROID_CONTROL_MODE,
- ANDROID_CONTROL_CAPTURE_INTENT,
- };
- staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
- availableRequestKeys.data(),
- availableRequestKeys.size());
-
- std::vector<int32_t> availableResultKeys = {
- ANDROID_CONTROL_AE_STATE,
- ANDROID_CONTROL_AE_LOCK,
- ANDROID_CONTROL_AF_STATE,
- ANDROID_CONTROL_AWB_STATE,
- ANDROID_CONTROL_AWB_LOCK,
- ANDROID_LENS_STATE,
- ANDROID_SCALER_CROP_REGION,
- ANDROID_SENSOR_TIMESTAMP,
- ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
- ANDROID_SENSOR_EXPOSURE_TIME,
- ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
- ANDROID_STATISTICS_SCENE_FLICKER,
- ANDROID_JPEG_SIZE,
- ANDROID_JPEG_QUALITY,
- ANDROID_JPEG_ORIENTATION,
- };
- staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
- availableResultKeys.data(),
- availableResultKeys.size());
-
- if (!staticMetadata_->isValid()) {
- LOG(HAL, Error) << "Failed to construct static metadata";
- delete staticMetadata_;
- staticMetadata_ = nullptr;
- return nullptr;
- }
+ worker_.stop();
+ camera_->stop();
- return staticMetadata_->get();
+ MutexLocker stateLock(stateMutex_);
+ state_ = State::Stopped;
}
-CameraMetadata *CameraDevice::requestTemplatePreview()
+void CameraDevice::stop()
{
- /*
- * \todo Keep this in sync with the actual number of entries.
- * Currently: 20 entries, 35 bytes
- */
- CameraMetadata *requestTemplate = new CameraMetadata(20, 35);
- if (!requestTemplate->isValid()) {
- delete requestTemplate;
- return nullptr;
- }
-
- uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
- requestTemplate->addEntry(ANDROID_CONTROL_AE_MODE,
- &aeMode, 1);
-
- int32_t aeExposureCompensation = 0;
- requestTemplate->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
- &aeExposureCompensation, 1);
+ MutexLocker stateLock(stateMutex_);
+ if (state_ == State::Stopped)
+ return;
- uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
- requestTemplate->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
- &aePrecaptureTrigger, 1);
-
- uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
- requestTemplate->addEntry(ANDROID_CONTROL_AE_LOCK,
- &aeLock, 1);
-
- std::vector<int32_t> aeFpsTarget = {
- 15, 30,
- };
- requestTemplate->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
- aeFpsTarget.data(),
- aeFpsTarget.size());
-
- uint8_t aeAntibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
- requestTemplate->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
- &aeAntibandingMode, 1);
-
- uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
- requestTemplate->addEntry(ANDROID_CONTROL_AF_TRIGGER,
- &afTrigger, 1);
-
- uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
- requestTemplate->addEntry(ANDROID_CONTROL_AWB_MODE,
- &awbMode, 1);
-
- uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
- requestTemplate->addEntry(ANDROID_CONTROL_AWB_LOCK,
- &awbLock, 1);
-
- uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
- requestTemplate->addEntry(ANDROID_FLASH_MODE,
- &flashMode, 1);
-
- uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
- requestTemplate->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE,
- &faceDetectMode, 1);
-
- uint8_t noiseReduction = ANDROID_NOISE_REDUCTION_MODE_OFF;
- requestTemplate->addEntry(ANDROID_NOISE_REDUCTION_MODE,
- &noiseReduction, 1);
-
- uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
- requestTemplate->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
- &aberrationMode, 1);
-
- uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
- requestTemplate->addEntry(ANDROID_CONTROL_MODE, &controlMode, 1);
+ worker_.stop();
+ camera_->stop();
- float lensAperture = 2.53 / 100;
- requestTemplate->addEntry(ANDROID_LENS_APERTURE, &lensAperture, 1);
+ descriptors_.clear();
+ state_ = State::Stopped;
+}
- uint8_t opticalStabilization = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
- requestTemplate->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
- &opticalStabilization, 1);
+unsigned int CameraDevice::maxJpegBufferSize() const
+{
+ return capabilities_.maxJpegBufferSize();
+}
- uint8_t captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
- requestTemplate->addEntry(ANDROID_CONTROL_CAPTURE_INTENT,
- &captureIntent, 1);
+void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks)
+{
+ callbacks_ = callbacks;
+}
- return requestTemplate;
+const camera_metadata_t *CameraDevice::getStaticMetadata()
+{
+ return capabilities_.staticMetadata()->get();
}
/*
@@ -1109,57 +474,47 @@ const camera_metadata_t *CameraDevice::constructDefaultRequestSettings(int type)
return it->second->get();
/* Use the capture intent matching the requested template type. */
- CameraMetadata *requestTemplate;
+ std::unique_ptr<CameraMetadata> requestTemplate;
uint8_t captureIntent;
switch (type) {
case CAMERA3_TEMPLATE_PREVIEW:
captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+ requestTemplate = capabilities_.requestTemplatePreview();
break;
case CAMERA3_TEMPLATE_STILL_CAPTURE:
+ /*
+ * Use the preview template for still capture, they only differ
+ * for the torch mode we currently do not support.
+ */
captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
+ requestTemplate = capabilities_.requestTemplatePreview();
break;
case CAMERA3_TEMPLATE_VIDEO_RECORD:
captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
+ requestTemplate = capabilities_.requestTemplateVideo();
break;
case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
+ requestTemplate = capabilities_.requestTemplateVideo();
break;
+ /* \todo Implement templates generation for the remaining use cases. */
case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
- captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
- break;
case CAMERA3_TEMPLATE_MANUAL:
- captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
- break;
default:
- LOG(HAL, Error) << "Invalid template request type: " << type;
+ LOG(HAL, Error) << "Unsupported template request type: " << type;
return nullptr;
}
- requestTemplate = requestTemplatePreview();
if (!requestTemplate || !requestTemplate->isValid()) {
LOG(HAL, Error) << "Failed to construct request template";
- delete requestTemplate;
return nullptr;
}
requestTemplate->updateEntry(ANDROID_CONTROL_CAPTURE_INTENT,
- &captureIntent, 1);
-
- requestTemplates_[type] = requestTemplate;
- return requestTemplate->get();
-}
+ captureIntent);
-PixelFormat CameraDevice::toPixelFormat(int format)
-{
- /* Translate Android format code to libcamera pixel format. */
- auto it = formatsMap_.find(format);
- if (it == formatsMap_.end()) {
- LOG(HAL, Error) << "Requested format " << utils::hex(format)
- << " not supported";
- return PixelFormat();
- }
-
- return it->second;
+ requestTemplates_[type] = std::move(requestTemplate);
+ return requestTemplates_[type]->get();
}
/*
@@ -1168,12 +523,25 @@ PixelFormat CameraDevice::toPixelFormat(int format)
*/
int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
{
+ /* Before any configuration attempt, stop the camera. */
+ stop();
+
+ if (stream_list->num_streams == 0) {
+ LOG(HAL, Error) << "No streams in configuration";
+ return -EINVAL;
+ }
+
+#if defined(OS_CHROMEOS)
+ if (!validateCropRotate(*stream_list))
+ return -EINVAL;
+#endif
+
/*
* Generate an empty configuration, and construct a StreamConfiguration
* for each camera3_stream to add to it.
*/
- config_ = camera_->generateConfiguration();
- if (!config_) {
+ std::unique_ptr<CameraConfiguration> config = camera_->generateConfiguration();
+ if (!config) {
LOG(HAL, Error) << "Failed to generate camera configuration";
return -EINVAL;
}
@@ -1186,24 +554,44 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
streams_.clear();
streams_.reserve(stream_list->num_streams);
+ std::vector<Camera3StreamConfig> streamConfigs;
+ streamConfigs.reserve(stream_list->num_streams);
+
/* First handle all non-MJPEG streams. */
camera3_stream_t *jpegStream = nullptr;
for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
camera3_stream_t *stream = stream_list->streams[i];
Size size(stream->width, stream->height);
- PixelFormat format = toPixelFormat(stream->format);
+ PixelFormat format = capabilities_.toPixelFormat(stream->format);
LOG(HAL, Info) << "Stream #" << i
<< ", direction: " << stream->stream_type
<< ", width: " << stream->width
<< ", height: " << stream->height
<< ", format: " << utils::hex(stream->format)
+ << ", rotation: " << rotationToString(stream->rotation)
+#if defined(OS_CHROMEOS)
+ << ", crop_rotate_scale_degrees: "
+ << rotationToString(stream->crop_rotate_scale_degrees)
+#endif
<< " (" << format.toString() << ")";
if (!format.isValid())
return -EINVAL;
+ /* \todo Support rotation. */
+ if (stream->rotation != CAMERA3_STREAM_ROTATION_0) {
+ LOG(HAL, Error) << "Rotation is not supported";
+ return -EINVAL;
+ }
+#if defined(OS_CHROMEOS)
+ if (stream->crop_rotate_scale_degrees != CAMERA3_STREAM_ROTATION_0) {
+ LOG(HAL, Error) << "Rotation is not supported";
+ return -EINVAL;
+ }
+#endif
+
/* Defer handling of MJPEG streams until all others are known. */
if (stream->format == HAL_PIXEL_FORMAT_BLOB) {
if (jpegStream) {
@@ -1216,23 +604,25 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
continue;
}
- StreamConfiguration streamConfiguration;
- streamConfiguration.size = size;
- streamConfiguration.pixelFormat = format;
+ Camera3StreamConfig streamConfig;
+ streamConfig.streams = { { stream, CameraStream::Type::Direct } };
+ streamConfig.config.size = size;
+ streamConfig.config.pixelFormat = format;
+ streamConfigs.push_back(std::move(streamConfig));
- config_->addConfiguration(streamConfiguration);
- unsigned int index = config_->size() - 1;
- streams_.emplace_back(format, size, index);
- stream->priv = static_cast<void *>(&streams_.back());
+ /* This stream will be produced by hardware. */
+ stream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
}
/* Now handle the MJPEG streams, adding a new stream if required. */
if (jpegStream) {
+ CameraStream::Type type;
int index = -1;
/* Search for a compatible stream in the non-JPEG ones. */
- for (unsigned int i = 0; i < config_->size(); i++) {
- StreamConfiguration &cfg = config_->at(i);
+ for (size_t i = 0; i < streamConfigs.size(); ++i) {
+ Camera3StreamConfig &streamConfig = streamConfigs[i];
+ const auto &cfg = streamConfig.config;
/*
* \todo The PixelFormat must also be compatible with
@@ -1245,7 +635,15 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
LOG(HAL, Info)
<< "Android JPEG stream mapped to libcamera stream " << i;
+ type = CameraStream::Type::Mapped;
index = i;
+
+ /*
+ * The source stream will be read by software to
+ * produce the JPEG stream.
+ */
+ camera3_stream_t *stream = streamConfig.streams[0].stream;
+ stream->usage |= GRALLOC_USAGE_SW_READ_OFTEN;
break;
}
@@ -1254,80 +652,82 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
* introduce a new stream to satisfy the request requirements.
*/
if (index < 0) {
- StreamConfiguration streamConfiguration;
-
/*
* \todo The pixelFormat should be a 'best-fit' choice
* and may require a validation cycle. This is not yet
* handled, and should be considered as part of any
* stream configuration reworks.
*/
- streamConfiguration.size.width = jpegStream->width;
- streamConfiguration.size.height = jpegStream->height;
- streamConfiguration.pixelFormat = formats::NV12;
+ Camera3StreamConfig streamConfig;
+ streamConfig.config.size.width = jpegStream->width;
+ streamConfig.config.size.height = jpegStream->height;
+ streamConfig.config.pixelFormat = formats::NV12;
+ streamConfigs.push_back(std::move(streamConfig));
- LOG(HAL, Info) << "Adding " << streamConfiguration.toString()
+ LOG(HAL, Info) << "Adding " << streamConfig.config.toString()
<< " for MJPEG support";
- config_->addConfiguration(streamConfiguration);
- index = config_->size() - 1;
+ type = CameraStream::Type::Internal;
+ index = streamConfigs.size() - 1;
}
- StreamConfiguration &cfg = config_->at(index);
+ /* The JPEG stream will be produced by software. */
+ jpegStream->usage |= GRALLOC_USAGE_SW_WRITE_OFTEN;
- /*
- * Construct a software encoder for the MJPEG streams from the
- * chosen libcamera source stream.
- */
- Encoder *encoder = new EncoderLibJpeg();
- int ret = encoder->configure(cfg);
- if (ret) {
- LOG(HAL, Error) << "Failed to configure encoder";
- delete encoder;
- return ret;
- }
+ streamConfigs[index].streams.push_back({ jpegStream, type });
+ }
+
+ sortCamera3StreamConfigs(streamConfigs, jpegStream);
+ for (const auto &streamConfig : streamConfigs) {
+ config->addConfiguration(streamConfig.config);
- streams_.emplace_back(formats::MJPEG, cfg.size, index, encoder);
- jpegStream->priv = static_cast<void *>(&streams_.back());
+ for (auto &stream : streamConfig.streams) {
+ streams_.emplace_back(this, config.get(), stream.type,
+ stream.stream, config->size() - 1);
+ stream.stream->priv = static_cast<void *>(&streams_.back());
+ }
}
- switch (config_->validate()) {
+ switch (config->validate()) {
case CameraConfiguration::Valid:
break;
case CameraConfiguration::Adjusted:
LOG(HAL, Info) << "Camera configuration adjusted";
- for (const StreamConfiguration &cfg : *config_)
+ for (const StreamConfiguration &cfg : *config)
LOG(HAL, Info) << " - " << cfg.toString();
- config_.reset();
return -EINVAL;
case CameraConfiguration::Invalid:
LOG(HAL, Info) << "Camera configuration invalid";
- config_.reset();
return -EINVAL;
}
- for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
- camera3_stream_t *stream = stream_list->streams[i];
- CameraStream *cameraStream = static_cast<CameraStream *>(stream->priv);
- StreamConfiguration &cfg = config_->at(cameraStream->index());
-
- /* Use the bufferCount confirmed by the validation process. */
- stream->max_buffers = cfg.bufferCount;
- }
-
/*
* Once the CameraConfiguration has been adjusted/validated
* it can be applied to the camera.
*/
- int ret = camera_->configure(config_.get());
+ int ret = camera_->configure(config.get());
if (ret) {
LOG(HAL, Error) << "Failed to configure camera '"
<< camera_->id() << "'";
return ret;
}
+ /*
+ * Configure the HAL CameraStream instances using the associated
+ * StreamConfiguration and set the number of required buffers in
+ * the Android camera3_stream_t.
+ */
+ for (CameraStream &cameraStream : streams_) {
+ ret = cameraStream.configure();
+ if (ret) {
+ LOG(HAL, Error) << "Failed to configure camera stream";
+ return ret;
+ }
+ }
+
+ config_ = std::move(config);
return 0;
}
@@ -1361,85 +761,234 @@ FrameBuffer *CameraDevice::createFrameBuffer(const buffer_handle_t camera3buffer
return new FrameBuffer(std::move(planes));
}
-int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Request)
+int CameraDevice::processControls(Camera3RequestDescriptor *descriptor)
+{
+ const CameraMetadata &settings = descriptor->settings_;
+ if (!settings.isValid())
+ return 0;
+
+ /* Translate the Android request settings to libcamera controls. */
+ camera_metadata_ro_entry_t entry;
+ if (settings.getEntry(ANDROID_SCALER_CROP_REGION, &entry)) {
+ const int32_t *data = entry.data.i32;
+ Rectangle cropRegion{ data[0], data[1],
+ static_cast<unsigned int>(data[2]),
+ static_cast<unsigned int>(data[3]) };
+ ControlList &controls = descriptor->request_->controls();
+ controls.set(controls::ScalerCrop, cropRegion);
+ }
+
+ return 0;
+}
+
+void CameraDevice::abortRequest(camera3_capture_request_t *request)
+{
+ notifyError(request->frame_number, nullptr, CAMERA3_MSG_ERROR_REQUEST);
+
+ camera3_capture_result_t result = {};
+ result.num_output_buffers = request->num_output_buffers;
+ result.frame_number = request->frame_number;
+ result.partial_result = 0;
+
+ std::vector<camera3_stream_buffer_t> resultBuffers(result.num_output_buffers);
+ for (auto [i, buffer] : utils::enumerate(resultBuffers)) {
+ buffer = request->output_buffers[i];
+ buffer.release_fence = request->output_buffers[i].acquire_fence;
+ buffer.acquire_fence = -1;
+ buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ }
+ result.output_buffers = resultBuffers.data();
+
+ callbacks_->process_capture_result(callbacks_, &result);
+}
+
+bool CameraDevice::isValidRequest(camera3_capture_request_t *camera3Request) const
{
- if (!camera3Request->num_output_buffers) {
+ if (!camera3Request) {
+ LOG(HAL, Error) << "No capture request provided";
+ return false;
+ }
+
+ if (!camera3Request->num_output_buffers ||
+ !camera3Request->output_buffers) {
LOG(HAL, Error) << "No output buffers provided";
- return -EINVAL;
+ return false;
}
- /* Start the camera if that's the first request we handle. */
- if (!running_) {
- int ret = camera_->start();
- if (ret) {
- LOG(HAL, Error) << "Failed to start camera";
- return ret;
+ /* configureStreams() has not been called or has failed. */
+ if (streams_.empty() || !config_) {
+ LOG(HAL, Error) << "No stream is configured";
+ return false;
+ }
+
+ for (uint32_t i = 0; i < camera3Request->num_output_buffers; i++) {
+ const camera3_stream_buffer_t &outputBuffer =
+ camera3Request->output_buffers[i];
+ if (!outputBuffer.buffer || !(*outputBuffer.buffer)) {
+ LOG(HAL, Error) << "Invalid native handle";
+ return false;
+ }
+
+ const native_handle_t *handle = *outputBuffer.buffer;
+ constexpr int kNativeHandleMaxFds = 1024;
+ if (handle->numFds < 0 || handle->numFds > kNativeHandleMaxFds) {
+ LOG(HAL, Error)
+ << "Invalid number of fds (" << handle->numFds
+ << ") in buffer " << i;
+ return false;
+ }
+
+ constexpr int kNativeHandleMaxInts = 1024;
+ if (handle->numInts < 0 || handle->numInts > kNativeHandleMaxInts) {
+ LOG(HAL, Error)
+ << "Invalid number of ints (" << handle->numInts
+ << ") in buffer " << i;
+ return false;
}
- running_ = true;
+ const camera3_stream *camera3Stream = outputBuffer.stream;
+ if (!camera3Stream)
+ return false;
+
+ const CameraStream *cameraStream =
+ static_cast<CameraStream *>(camera3Stream->priv);
+
+ auto found = std::find_if(streams_.begin(), streams_.end(),
+ [cameraStream](const CameraStream &stream) {
+ return &stream == cameraStream;
+ });
+ if (found == streams_.end()) {
+ LOG(HAL, Error)
+ << "No corresponding configured stream found";
+ return false;
+ }
}
- /*
- * Queue a request for the Camera with the provided dmabuf file
- * descriptors.
- */
- const camera3_stream_buffer_t *camera3Buffers =
- camera3Request->output_buffers;
+ return true;
+}
+
+int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Request)
+{
+ if (!isValidRequest(camera3Request))
+ return -EINVAL;
/*
* Save the request descriptors for use at completion time.
* The descriptor and the associated memory reserved here are freed
* at request complete time.
*/
- Camera3RequestDescriptor *descriptor =
- new Camera3RequestDescriptor(camera3Request->frame_number,
- camera3Request->num_output_buffers);
+ Camera3RequestDescriptor descriptor(camera_.get(), camera3Request);
- Request *request =
- camera_->createRequest(reinterpret_cast<uint64_t>(descriptor));
-
- for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
- CameraStream *cameraStream =
- static_cast<CameraStream *>(camera3Buffers[i].stream->priv);
+ /*
+ * \todo The Android request model is incremental, settings passed in
+ * previous requests are to be effective until overridden explicitly in
+ * a new request. Do we need to cache settings incrementally here, or is
+ * it handled by the Android camera service ?
+ */
+ if (camera3Request->settings)
+ lastSettings_ = camera3Request->settings;
+ else
+ descriptor.settings_ = lastSettings_;
+
+ LOG(HAL, Debug) << "Queueing request " << descriptor.request_->cookie()
+ << " with " << descriptor.buffers_.size() << " streams";
+ for (unsigned int i = 0; i < descriptor.buffers_.size(); ++i) {
+ const camera3_stream_buffer_t &camera3Buffer = descriptor.buffers_[i];
+ camera3_stream *camera3Stream = camera3Buffer.stream;
+ CameraStream *cameraStream = static_cast<CameraStream *>(camera3Stream->priv);
+
+ std::stringstream ss;
+ ss << i << " - (" << camera3Stream->width << "x"
+ << camera3Stream->height << ")"
+ << "[" << utils::hex(camera3Stream->format) << "] -> "
+ << "(" << cameraStream->configuration().size.toString() << ")["
+ << cameraStream->configuration().pixelFormat.toString() << "]";
/*
- * Keep track of which stream the request belongs to and store
- * the native buffer handles.
+ * Inspect the camera stream type, create buffers opportunely
+ * and add them to the Request if required.
*/
- descriptor->buffers[i].stream = camera3Buffers[i].stream;
- descriptor->buffers[i].buffer = camera3Buffers[i].buffer;
-
- /* Software streams are handled after hardware streams complete. */
- if (cameraStream->format() == formats::MJPEG)
+ FrameBuffer *buffer = nullptr;
+ switch (cameraStream->type()) {
+ case CameraStream::Type::Mapped:
+ /*
+ * Mapped streams don't need buffers added to the
+ * Request.
+ */
+ LOG(HAL, Debug) << ss.str() << " (mapped)";
continue;
- /*
- * Create a libcamera buffer using the dmabuf descriptors of
- * the camera3Buffer for each stream. The FrameBuffer is
- * directly associated with the Camera3RequestDescriptor for
- * lifetime management only.
- */
- FrameBuffer *buffer = createFrameBuffer(*camera3Buffers[i].buffer);
+ case CameraStream::Type::Direct:
+ /*
+ * Create a libcamera buffer using the dmabuf
+ * descriptors of the camera3Buffer for each stream and
+ * associate it with the Camera3RequestDescriptor for
+ * lifetime management only.
+ */
+ buffer = createFrameBuffer(*camera3Buffer.buffer);
+ descriptor.frameBuffers_.emplace_back(buffer);
+ LOG(HAL, Debug) << ss.str() << " (direct)";
+ break;
+
+ case CameraStream::Type::Internal:
+ /*
+ * Get the frame buffer from the CameraStream internal
+ * buffer pool.
+ *
+ * The buffer has to be returned to the CameraStream
+ * once it has been processed.
+ */
+ buffer = cameraStream->getBuffer();
+ LOG(HAL, Debug) << ss.str() << " (internal)";
+ break;
+ }
+
if (!buffer) {
LOG(HAL, Error) << "Failed to create buffer";
- delete request;
- delete descriptor;
return -ENOMEM;
}
- descriptor->frameBuffers.emplace_back(buffer);
-
- StreamConfiguration *streamConfiguration = &config_->at(cameraStream->index());
- Stream *stream = streamConfiguration->stream();
- request->addBuffer(stream, buffer);
+ descriptor.request_->addBuffer(cameraStream->stream(), buffer,
+ camera3Buffer.acquire_fence);
}
- int ret = camera_->queueRequest(request);
- if (ret) {
- LOG(HAL, Error) << "Failed to queue request";
- delete request;
- delete descriptor;
+ /*
+ * Translate controls from Android to libcamera and queue the request
+ * to the CameraWorker thread.
+ */
+ int ret = processControls(&descriptor);
+ if (ret)
return ret;
+
+ /*
+ * If flush is in progress abort the request. If the camera has been
+ * stopped we have to re-start it to be able to process the request.
+ */
+ MutexLocker stateLock(stateMutex_);
+
+ if (state_ == State::Flushing) {
+ abortRequest(camera3Request);
+ return 0;
+ }
+
+ if (state_ == State::Stopped) {
+ worker_.start();
+
+ ret = camera_->start();
+ if (ret) {
+ LOG(HAL, Error) << "Failed to start camera";
+ worker_.stop();
+ return ret;
+ }
+
+ state_ = State::Running;
+ }
+
+ worker_.queueRequest(descriptor.request_.get());
+
+ {
+ MutexLocker descriptorsLock(descriptorsMutex_);
+ descriptors_[descriptor.request_->cookie()] = std::move(descriptor);
}
return 0;
@@ -1447,152 +996,127 @@ int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Reques
void CameraDevice::requestComplete(Request *request)
{
- const Request::BufferMap &buffers = request->buffers();
- camera3_buffer_status status = CAMERA3_BUFFER_STATUS_OK;
- std::unique_ptr<CameraMetadata> resultMetadata;
- Camera3RequestDescriptor *descriptor =
- reinterpret_cast<Camera3RequestDescriptor *>(request->cookie());
+ decltype(descriptors_)::node_type node;
+ {
+ MutexLocker descriptorsLock(descriptorsMutex_);
+ auto it = descriptors_.find(request->cookie());
+ if (it == descriptors_.end()) {
+ /*
+ * \todo Clarify if the Camera has to be closed on
+ * ERROR_DEVICE and possibly demote the Fatal to simple
+ * Error.
+ */
+ notifyError(0, nullptr, CAMERA3_MSG_ERROR_DEVICE);
+ LOG(HAL, Fatal)
+ << "Unknown request: " << request->cookie();
+
+ return;
+ }
+
+ node = descriptors_.extract(it);
+ }
+ Camera3RequestDescriptor &descriptor = node.mapped();
+
+ /*
+ * Prepare the capture result for the Android camera stack.
+ *
+ * The buffer status is set to OK and later changed to ERROR if
+ * post-processing/compression fails.
+ */
+ camera3_capture_result_t captureResult = {};
+ captureResult.frame_number = descriptor.frameNumber_;
+ captureResult.num_output_buffers = descriptor.buffers_.size();
+ for (camera3_stream_buffer_t &buffer : descriptor.buffers_) {
+ buffer.acquire_fence = -1;
+ buffer.release_fence = -1;
+ buffer.status = CAMERA3_BUFFER_STATUS_OK;
+ }
+ captureResult.output_buffers = descriptor.buffers_.data();
+ captureResult.partial_result = 1;
+ /*
+ * If the Request has failed, abort the request by notifying the error
+ * and complete the request with all buffers in error state.
+ */
if (request->status() != Request::RequestComplete) {
- LOG(HAL, Error) << "Request not successfully completed: "
+ LOG(HAL, Error) << "Request " << request->cookie()
+ << " not successfully completed: "
<< request->status();
- status = CAMERA3_BUFFER_STATUS_ERROR;
+
+ notifyError(descriptor.frameNumber_, nullptr,
+ CAMERA3_MSG_ERROR_REQUEST);
+
+ captureResult.partial_result = 0;
+ for (camera3_stream_buffer_t &buffer : descriptor.buffers_)
+ buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ callbacks_->process_capture_result(callbacks_, &captureResult);
+
+ return;
}
/*
- * \todo The timestamp used for the metadata is currently always taken
- * from the first buffer (which may be the first stream) in the Request.
- * It might be appropriate to return a 'correct' (as determined by
- * pipeline handlers) timestamp in the Request itself.
+ * Notify shutter as soon as we have verified we have a valid request.
+ *
+ * \todo The shutter event notification should be sent to the framework
+ * as soon as possible, earlier than request completion time.
+ */
+ uint64_t sensorTimestamp = static_cast<uint64_t>(request->metadata()
+ .get(controls::SensorTimestamp));
+ notifyShutter(descriptor.frameNumber_, sensorTimestamp);
+
+ LOG(HAL, Debug) << "Request " << request->cookie() << " completed with "
+ << descriptor.buffers_.size() << " streams";
+
+ /*
+ * Generate the metadata associated with the captured buffers.
+ *
+ * Notify if the metadata generation has failed, but continue processing
+ * buffers and return an empty metadata pack.
*/
- FrameBuffer *buffer = buffers.begin()->second;
- resultMetadata = getResultMetadata(descriptor->frameNumber,
- buffer->metadata().timestamp);
+ std::unique_ptr<CameraMetadata> resultMetadata = getResultMetadata(descriptor);
+ if (!resultMetadata) {
+ notifyError(descriptor.frameNumber_, nullptr, CAMERA3_MSG_ERROR_RESULT);
+
+ /* The camera framework expects an empy metadata pack on error. */
+ resultMetadata = std::make_unique<CameraMetadata>(0, 0);
+ }
/* Handle any JPEG compression. */
- for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
+ for (camera3_stream_buffer_t &buffer : descriptor.buffers_) {
CameraStream *cameraStream =
- static_cast<CameraStream *>(descriptor->buffers[i].stream->priv);
-
- if (cameraStream->format() != formats::MJPEG)
- continue;
+ static_cast<CameraStream *>(buffer.stream->priv);
- Encoder *encoder = cameraStream->encoder();
- if (!encoder) {
- LOG(HAL, Error) << "Failed to identify encoder";
+ if (cameraStream->camera3Stream().format != HAL_PIXEL_FORMAT_BLOB)
continue;
- }
- StreamConfiguration *streamConfiguration = &config_->at(cameraStream->index());
- Stream *stream = streamConfiguration->stream();
- FrameBuffer *buffer = request->findBuffer(stream);
- if (!buffer) {
+ FrameBuffer *src = request->findBuffer(cameraStream->stream());
+ if (!src) {
LOG(HAL, Error) << "Failed to find a source stream buffer";
+ buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ notifyError(descriptor.frameNumber_, buffer.stream,
+ CAMERA3_MSG_ERROR_BUFFER);
continue;
}
+ int ret = cameraStream->process(*src, *buffer.buffer,
+ descriptor.settings_,
+ resultMetadata.get());
/*
- * \todo Buffer mapping and compression should be moved to a
- * separate thread.
+ * Return the FrameBuffer to the CameraStream now that we're
+ * done processing it.
*/
+ if (cameraStream->type() == CameraStream::Type::Internal)
+ cameraStream->putBuffer(src);
- MappedCamera3Buffer mapped(*descriptor->buffers[i].buffer,
- PROT_READ | PROT_WRITE);
- if (!mapped.isValid()) {
- LOG(HAL, Error) << "Failed to mmap android blob buffer";
- continue;
- }
-
- /* Set EXIF metadata for various tags. */
- Exif exif;
- /* \todo Set Make and Model from external vendor tags. */
- exif.setMake("libcamera");
- exif.setModel("cameraModel");
- exif.setOrientation(orientation_);
- exif.setSize(cameraStream->size());
- /*
- * We set the frame's EXIF timestamp as the time of encode.
- * Since the precision we need for EXIF timestamp is only one
- * second, it is good enough.
- */
- exif.setTimestamp(std::time(nullptr));
- if (exif.generate() != 0)
- LOG(HAL, Error) << "Failed to generate valid EXIF data";
-
- int jpeg_size = encoder->encode(buffer, mapped.maps()[0], exif.data());
- if (jpeg_size < 0) {
- LOG(HAL, Error) << "Failed to encode stream image";
- status = CAMERA3_BUFFER_STATUS_ERROR;
- continue;
+ if (ret) {
+ buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ notifyError(descriptor.frameNumber_, buffer.stream,
+ CAMERA3_MSG_ERROR_BUFFER);
}
-
- /*
- * Fill in the JPEG blob header.
- *
- * The mapped size of the buffer is being returned as
- * substantially larger than the requested JPEG_MAX_SIZE
- * (which is referenced from maxJpegBufferSize_). Utilise
- * this static size to ensure the correct offset of the blob is
- * determined.
- *
- * \todo Investigate if the buffer size mismatch is an issue or
- * expected behaviour.
- */
- uint8_t *resultPtr = mapped.maps()[0].data() +
- maxJpegBufferSize_ -
- sizeof(struct camera3_jpeg_blob);
- auto *blob = reinterpret_cast<struct camera3_jpeg_blob *>(resultPtr);
- blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
- blob->jpeg_size = jpeg_size;
-
- /* Update the JPEG result Metadata. */
- resultMetadata->addEntry(ANDROID_JPEG_SIZE,
- &jpeg_size, 1);
-
- const uint32_t jpeg_quality = 95;
- resultMetadata->addEntry(ANDROID_JPEG_QUALITY,
- &jpeg_quality, 1);
-
- const uint32_t jpeg_orientation = 0;
- resultMetadata->addEntry(ANDROID_JPEG_ORIENTATION,
- &jpeg_orientation, 1);
- }
-
- /* Prepare to call back the Android camera stack. */
- camera3_capture_result_t captureResult = {};
- captureResult.frame_number = descriptor->frameNumber;
- captureResult.num_output_buffers = descriptor->numBuffers;
- for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
- descriptor->buffers[i].acquire_fence = -1;
- descriptor->buffers[i].release_fence = -1;
- descriptor->buffers[i].status = status;
- }
- captureResult.output_buffers =
- const_cast<const camera3_stream_buffer_t *>(descriptor->buffers);
-
-
- if (status == CAMERA3_BUFFER_STATUS_OK) {
- notifyShutter(descriptor->frameNumber,
- buffer->metadata().timestamp);
-
- captureResult.partial_result = 1;
- captureResult.result = resultMetadata->get();
- }
-
- if (status == CAMERA3_BUFFER_STATUS_ERROR || !captureResult.result) {
- /* \todo Improve error handling. In case we notify an error
- * because the metadata generation fails, a shutter event has
- * already been notified for this frame number before the error
- * is here signalled. Make sure the error path plays well with
- * the camera stack state machine.
- */
- notifyError(descriptor->frameNumber,
- descriptor->buffers[0].stream);
}
+ captureResult.result = resultMetadata->get();
callbacks_->process_capture_result(callbacks_, &captureResult);
-
- delete descriptor;
}
std::string CameraDevice::logPrefix() const
@@ -1611,21 +1135,15 @@ void CameraDevice::notifyShutter(uint32_t frameNumber, uint64_t timestamp)
callbacks_->notify(callbacks_, &notify);
}
-void CameraDevice::notifyError(uint32_t frameNumber, camera3_stream_t *stream)
+void CameraDevice::notifyError(uint32_t frameNumber, camera3_stream_t *stream,
+ camera3_error_msg_code code)
{
camera3_notify_msg_t notify = {};
- /*
- * \todo Report and identify the stream number or configuration to
- * clarify the stream that failed.
- */
- LOG(HAL, Error) << "Error occurred on frame " << frameNumber << " ("
- << toPixelFormat(stream->format).toString() << ")";
-
notify.type = CAMERA3_MSG_ERROR;
notify.message.error.error_stream = stream;
notify.message.error.frame_number = frameNumber;
- notify.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
+ notify.message.error.error_code = code;
callbacks_->notify(callbacks_, &notify);
}
@@ -1634,63 +1152,182 @@ void CameraDevice::notifyError(uint32_t frameNumber, camera3_stream_t *stream)
* Produce a set of fixed result metadata.
*/
std::unique_ptr<CameraMetadata>
-CameraDevice::getResultMetadata([[maybe_unused]] int frame_number,
- int64_t timestamp)
+CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) const
{
+ const ControlList &metadata = descriptor.request_->metadata();
+ const CameraMetadata &settings = descriptor.settings_;
+ camera_metadata_ro_entry_t entry;
+ bool found;
+
/*
* \todo Keep this in sync with the actual number of entries.
- * Currently: 18 entries, 62 bytes
+ * Currently: 40 entries, 156 bytes
+ *
+ * Reserve more space for the JPEG metadata set by the post-processor.
+ * Currently:
+ * ANDROID_JPEG_GPS_COORDINATES (double x 3) = 24 bytes
+ * ANDROID_JPEG_GPS_PROCESSING_METHOD (byte x 32) = 32 bytes
+ * ANDROID_JPEG_GPS_TIMESTAMP (int64) = 8 bytes
+ * ANDROID_JPEG_SIZE (int32_t) = 4 bytes
+ * ANDROID_JPEG_QUALITY (byte) = 1 byte
+ * ANDROID_JPEG_ORIENTATION (int32_t) = 4 bytes
+ * ANDROID_JPEG_THUMBNAIL_QUALITY (byte) = 1 byte
+ * ANDROID_JPEG_THUMBNAIL_SIZE (int32 x 2) = 8 bytes
+ * Total bytes for JPEG metadata: 82
*/
std::unique_ptr<CameraMetadata> resultMetadata =
- std::make_unique<CameraMetadata>(18, 62);
+ std::make_unique<CameraMetadata>(44, 166);
if (!resultMetadata->isValid()) {
- LOG(HAL, Error) << "Failed to allocate static metadata";
+ LOG(HAL, Error) << "Failed to allocate result metadata";
return nullptr;
}
- const uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
- resultMetadata->addEntry(ANDROID_CONTROL_AE_STATE, &ae_state, 1);
+ /*
+ * \todo The value of the results metadata copied from the settings
+ * will have to be passed to the libcamera::Camera and extracted
+ * from libcamera::Request::metadata.
+ */
+
+ uint8_t value = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
+ value);
+
+ value = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE, value);
+
+ int32_t value32 = 0;
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ value32);
- const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF;
- resultMetadata->addEntry(ANDROID_CONTROL_AE_LOCK, &ae_lock, 1);
+ value = ANDROID_CONTROL_AE_LOCK_OFF;
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_LOCK, value);
- uint8_t af_state = ANDROID_CONTROL_AF_STATE_INACTIVE;
- resultMetadata->addEntry(ANDROID_CONTROL_AF_STATE, &af_state, 1);
+ value = ANDROID_CONTROL_AE_MODE_ON;
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_MODE, value);
- const uint8_t awb_state = ANDROID_CONTROL_AWB_STATE_CONVERGED;
- resultMetadata->addEntry(ANDROID_CONTROL_AWB_STATE, &awb_state, 1);
+ if (settings.getEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, &entry))
+ /*
+ * \todo Retrieve the AE FPS range from the libcamera metadata.
+ * As libcamera does not support that control, as a temporary
+ * workaround return what the framework asked.
+ */
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+ entry.data.i32, 2);
- const uint8_t awb_lock = ANDROID_CONTROL_AWB_LOCK_OFF;
- resultMetadata->addEntry(ANDROID_CONTROL_AWB_LOCK, &awb_lock, 1);
+ found = settings.getEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &entry);
+ value = found ? *entry.data.u8 :
+ (uint8_t)ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, value);
- const uint8_t lens_state = ANDROID_LENS_STATE_STATIONARY;
- resultMetadata->addEntry(ANDROID_LENS_STATE, &lens_state, 1);
+ value = ANDROID_CONTROL_AE_STATE_CONVERGED;
+ resultMetadata->addEntry(ANDROID_CONTROL_AE_STATE, value);
- int32_t sensorSizes[] = {
- 0, 0, 2560, 1920,
- };
- resultMetadata->addEntry(ANDROID_SCALER_CROP_REGION, sensorSizes, 4);
+ value = ANDROID_CONTROL_AF_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_CONTROL_AF_MODE, value);
+
+ value = ANDROID_CONTROL_AF_STATE_INACTIVE;
+ resultMetadata->addEntry(ANDROID_CONTROL_AF_STATE, value);
- resultMetadata->addEntry(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
+ value = ANDROID_CONTROL_AF_TRIGGER_IDLE;
+ resultMetadata->addEntry(ANDROID_CONTROL_AF_TRIGGER, value);
+
+ value = ANDROID_CONTROL_AWB_MODE_AUTO;
+ resultMetadata->addEntry(ANDROID_CONTROL_AWB_MODE, value);
+
+ value = ANDROID_CONTROL_AWB_LOCK_OFF;
+ resultMetadata->addEntry(ANDROID_CONTROL_AWB_LOCK, value);
+
+ value = ANDROID_CONTROL_AWB_STATE_CONVERGED;
+ resultMetadata->addEntry(ANDROID_CONTROL_AWB_STATE, value);
+
+ value = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+ resultMetadata->addEntry(ANDROID_CONTROL_CAPTURE_INTENT, value);
+
+ value = ANDROID_CONTROL_EFFECT_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_CONTROL_EFFECT_MODE, value);
+
+ value = ANDROID_CONTROL_MODE_AUTO;
+ resultMetadata->addEntry(ANDROID_CONTROL_MODE, value);
+
+ value = ANDROID_CONTROL_SCENE_MODE_DISABLED;
+ resultMetadata->addEntry(ANDROID_CONTROL_SCENE_MODE, value);
+
+ value = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, value);
+
+ value = ANDROID_FLASH_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_FLASH_MODE, value);
+
+ value = ANDROID_FLASH_STATE_UNAVAILABLE;
+ resultMetadata->addEntry(ANDROID_FLASH_STATE, value);
+
+ if (settings.getEntry(ANDROID_LENS_APERTURE, &entry))
+ resultMetadata->addEntry(ANDROID_LENS_APERTURE, entry.data.f, 1);
+
+ float focal_length = 1.0;
+ resultMetadata->addEntry(ANDROID_LENS_FOCAL_LENGTH, focal_length);
+
+ value = ANDROID_LENS_STATE_STATIONARY;
+ resultMetadata->addEntry(ANDROID_LENS_STATE, value);
+
+ value = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+ value);
+
+ value32 = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_SENSOR_TEST_PATTERN_MODE, value32);
+
+ value = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE, value);
+
+ value = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
+ value);
+
+ value = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, value);
+
+ value = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
+ resultMetadata->addEntry(ANDROID_STATISTICS_SCENE_FLICKER, value);
+
+ value = ANDROID_NOISE_REDUCTION_MODE_OFF;
+ resultMetadata->addEntry(ANDROID_NOISE_REDUCTION_MODE, value);
/* 33.3 msec */
const int64_t rolling_shutter_skew = 33300000;
resultMetadata->addEntry(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
- &rolling_shutter_skew, 1);
+ rolling_shutter_skew);
- /* 16.6 msec */
- const int64_t exposure_time = 16600000;
- resultMetadata->addEntry(ANDROID_SENSOR_EXPOSURE_TIME,
- &exposure_time, 1);
+ /* Add metadata tags reported by libcamera. */
+ const int64_t timestamp = metadata.get(controls::SensorTimestamp);
+ resultMetadata->addEntry(ANDROID_SENSOR_TIMESTAMP, timestamp);
- const uint8_t lens_shading_map_mode =
- ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
- resultMetadata->addEntry(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
- &lens_shading_map_mode, 1);
+ if (metadata.contains(controls::draft::PipelineDepth)) {
+ uint8_t pipeline_depth =
+ metadata.get<int32_t>(controls::draft::PipelineDepth);
+ resultMetadata->addEntry(ANDROID_REQUEST_PIPELINE_DEPTH,
+ pipeline_depth);
+ }
- const uint8_t scene_flicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
- resultMetadata->addEntry(ANDROID_STATISTICS_SCENE_FLICKER,
- &scene_flicker, 1);
+ if (metadata.contains(controls::ExposureTime)) {
+ int64_t exposure = metadata.get(controls::ExposureTime) * 1000ULL;
+ resultMetadata->addEntry(ANDROID_SENSOR_EXPOSURE_TIME, exposure);
+ }
+
+ if (metadata.contains(controls::FrameDuration)) {
+ int64_t duration = metadata.get(controls::FrameDuration) * 1000;
+ resultMetadata->addEntry(ANDROID_SENSOR_FRAME_DURATION,
+ duration);
+ }
+
+ if (metadata.contains(controls::ScalerCrop)) {
+ Rectangle crop = metadata.get(controls::ScalerCrop);
+ int32_t cropRect[] = {
+ crop.x, crop.y, static_cast<int32_t>(crop.width),
+ static_cast<int32_t>(crop.height),
+ };
+ resultMetadata->addEntry(ANDROID_SCALER_CROP_REGION, cropRect);
+ }
/*
* Return the result metadata pack even is not valid: get() will return
@@ -1700,5 +1337,12 @@ CameraDevice::getResultMetadata([[maybe_unused]] int frame_number,
LOG(HAL, Error) << "Failed to construct result metadata";
}
+ if (resultMetadata->resized()) {
+ auto [entryCount, dataCount] = resultMetadata->usage();
+ LOG(HAL, Info)
+ << "Result metadata resized: " << entryCount
+ << " entries and " << dataCount << " bytes used";
+ }
+
return resultMetadata;
}
diff --git a/src/android/camera_device.h b/src/android/camera_device.h
index 1837748d..089a6204 100644
--- a/src/android/camera_device.h
+++ b/src/android/camera_device.h
@@ -9,65 +9,52 @@
#include <map>
#include <memory>
-#include <tuple>
+#include <mutex>
#include <vector>
#include <hardware/camera3.h>
-#include <libcamera/buffer.h>
+#include <libcamera/base/class.h>
+#include <libcamera/base/log.h>
+#include <libcamera/base/message.h>
+#include <libcamera/base/thread.h>
+
#include <libcamera/camera.h>
-#include <libcamera/geometry.h>
+#include <libcamera/framebuffer.h>
#include <libcamera/request.h>
#include <libcamera/stream.h>
-#include "libcamera/internal/log.h"
-#include "libcamera/internal/message.h"
+#include "libcamera/internal/framebuffer.h"
+#include "camera_capabilities.h"
+#include "camera_metadata.h"
+#include "camera_stream.h"
+#include "camera_worker.h"
#include "jpeg/encoder.h"
-class CameraMetadata;
-
-class CameraStream
-{
-public:
- CameraStream(libcamera::PixelFormat format, libcamera::Size size,
- unsigned int index, Encoder *encoder = nullptr);
-
- const libcamera::PixelFormat &format() const { return format_; }
- const libcamera::Size &size() const { return size_; }
- unsigned int index() const { return index_; }
- Encoder *encoder() const { return encoder_.get(); }
-
-private:
- libcamera::PixelFormat format_;
- libcamera::Size size_;
- /*
- * The index of the libcamera StreamConfiguration as added during
- * configureStreams(). A single libcamera Stream may be used to deliver
- * one or more streams to the Android framework.
- */
- unsigned int index_;
- std::unique_ptr<Encoder> encoder_;
-};
-
+struct CameraConfigData;
class CameraDevice : protected libcamera::Loggable
{
public:
- static std::shared_ptr<CameraDevice> create(unsigned int id,
- const std::shared_ptr<libcamera::Camera> &cam);
+ static std::unique_ptr<CameraDevice> create(unsigned int id,
+ std::shared_ptr<libcamera::Camera> cam);
~CameraDevice();
- int initialize();
+ int initialize(const CameraConfigData *cameraConfigData);
int open(const hw_module_t *hardwareModule);
void close();
+ void flush();
unsigned int id() const { return id_; }
camera3_device_t *camera3Device() { return &camera3Device_; }
- const libcamera::Camera *camera() const { return camera_.get(); }
+ const std::shared_ptr<libcamera::Camera> &camera() const { return camera_; }
+ const std::string &maker() const { return maker_; }
+ const std::string &model() const { return model_; }
int facing() const { return facing_; }
int orientation() const { return orientation_; }
+ unsigned int maxJpegBufferSize() const;
void setCallbacks(const camera3_callback_ops_t *callbacks);
const camera_metadata_t *getStaticMetadata();
@@ -80,60 +67,69 @@ protected:
std::string logPrefix() const override;
private:
- CameraDevice(unsigned int id, const std::shared_ptr<libcamera::Camera> &camera);
+ LIBCAMERA_DISABLE_COPY_AND_MOVE(CameraDevice)
+
+ CameraDevice(unsigned int id, std::shared_ptr<libcamera::Camera> camera);
struct Camera3RequestDescriptor {
- Camera3RequestDescriptor(unsigned int frameNumber,
- unsigned int numBuffers);
- ~Camera3RequestDescriptor();
-
- uint32_t frameNumber;
- uint32_t numBuffers;
- camera3_stream_buffer_t *buffers;
- std::vector<std::unique_ptr<libcamera::FrameBuffer>> frameBuffers;
+ Camera3RequestDescriptor() = default;
+ ~Camera3RequestDescriptor() = default;
+ Camera3RequestDescriptor(libcamera::Camera *camera,
+ const camera3_capture_request_t *camera3Request);
+ Camera3RequestDescriptor &operator=(Camera3RequestDescriptor &&) = default;
+
+ uint32_t frameNumber_ = 0;
+ std::vector<camera3_stream_buffer_t> buffers_;
+ std::vector<std::unique_ptr<libcamera::FrameBuffer>> frameBuffers_;
+ CameraMetadata settings_;
+ std::unique_ptr<CaptureRequest> request_;
};
- struct Camera3StreamConfiguration {
- libcamera::Size resolution;
- int androidFormat;
+ enum class State {
+ Stopped,
+ Flushing,
+ Running,
};
- int initializeStreamConfigurations();
- std::vector<libcamera::Size>
- getYUVResolutions(libcamera::CameraConfiguration *cameraConfig,
- const libcamera::PixelFormat &pixelFormat,
- const std::vector<libcamera::Size> &resolutions);
- std::vector<libcamera::Size>
- getRawResolutions(const libcamera::PixelFormat &pixelFormat);
+ void stop();
- std::tuple<uint32_t, uint32_t> calculateStaticMetadataSize();
libcamera::FrameBuffer *createFrameBuffer(const buffer_handle_t camera3buffer);
+ void abortRequest(camera3_capture_request_t *request);
+ bool isValidRequest(camera3_capture_request_t *request) const;
void notifyShutter(uint32_t frameNumber, uint64_t timestamp);
- void notifyError(uint32_t frameNumber, camera3_stream_t *stream);
- CameraMetadata *requestTemplatePreview();
- libcamera::PixelFormat toPixelFormat(int format);
- std::unique_ptr<CameraMetadata> getResultMetadata(int frame_number,
- int64_t timestamp);
+ void notifyError(uint32_t frameNumber, camera3_stream_t *stream,
+ camera3_error_msg_code code);
+ int processControls(Camera3RequestDescriptor *descriptor);
+ std::unique_ptr<CameraMetadata> getResultMetadata(
+ const Camera3RequestDescriptor &descriptor) const;
unsigned int id_;
camera3_device_t camera3Device_;
- bool running_;
+ CameraWorker worker_;
+
+ libcamera::Mutex stateMutex_; /* Protects access to the camera state. */
+ State state_;
+
std::shared_ptr<libcamera::Camera> camera_;
std::unique_ptr<libcamera::CameraConfiguration> config_;
+ CameraCapabilities capabilities_;
- CameraMetadata *staticMetadata_;
- std::map<unsigned int, const CameraMetadata *> requestTemplates_;
+ std::map<unsigned int, std::unique_ptr<CameraMetadata>> requestTemplates_;
const camera3_callback_ops_t *callbacks_;
- std::vector<Camera3StreamConfiguration> streamConfigurations_;
- std::map<int, libcamera::PixelFormat> formatsMap_;
std::vector<CameraStream> streams_;
+ libcamera::Mutex descriptorsMutex_; /* Protects descriptors_. */
+ std::map<uint64_t, Camera3RequestDescriptor> descriptors_;
+
+ std::string maker_;
+ std::string model_;
+
int facing_;
int orientation_;
- unsigned int maxJpegBufferSize_;
+ CameraMetadata lastSettings_;
};
#endif /* __ANDROID_CAMERA_DEVICE_H__ */
diff --git a/src/android/camera_hal_config.cpp b/src/android/camera_hal_config.cpp
new file mode 100644
index 00000000..833cf4ba
--- /dev/null
+++ b/src/android/camera_hal_config.cpp
@@ -0,0 +1,407 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera_hal_config.cpp - Camera HAL configuration file manager
+ */
+#include "camera_hal_config.h"
+
+#if defined(_GLIBCXX_RELEASE) && _GLIBCXX_RELEASE < 8
+#include <experimental/filesystem>
+namespace std {
+namespace filesystem = std::experimental::filesystem;
+}
+#else
+#include <filesystem>
+#endif
+#include <stdio.h>
+#include <stdlib.h>
+#include <string>
+#include <yaml.h>
+
+#include <hardware/camera3.h>
+
+#include <libcamera/base/log.h>
+
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(HALConfig)
+
+class CameraHalConfig::Private : public Extensible::Private
+{
+ LIBCAMERA_DECLARE_PUBLIC(CameraHalConfig)
+
+public:
+ Private(CameraHalConfig *halConfig);
+
+ int parseConfigFile(FILE *fh, std::map<std::string, CameraConfigData> *cameras);
+
+private:
+ std::string parseValue();
+ std::string parseKey();
+ int parseValueBlock();
+ int parseCameraLocation(CameraConfigData *cameraConfigData,
+ const std::string &location);
+ int parseCameraConfigData(const std::string &cameraId);
+ int parseCameras();
+ int parseEntry();
+
+ yaml_parser_t parser_;
+ std::map<std::string, CameraConfigData> *cameras_;
+};
+
+CameraHalConfig::Private::Private(CameraHalConfig *halConfig)
+ : Extensible::Private(halConfig)
+{
+}
+
+std::string CameraHalConfig::Private::parseValue()
+{
+ yaml_token_t token;
+
+ /* Make sure the token type is a value and get its content. */
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_VALUE_TOKEN) {
+ yaml_token_delete(&token);
+ return "";
+ }
+ yaml_token_delete(&token);
+
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_SCALAR_TOKEN) {
+ yaml_token_delete(&token);
+ return "";
+ }
+
+ std::string value(reinterpret_cast<char *>(token.data.scalar.value),
+ token.data.scalar.length);
+ yaml_token_delete(&token);
+
+ return value;
+}
+
+std::string CameraHalConfig::Private::parseKey()
+{
+ yaml_token_t token;
+
+ /* Make sure the token type is a key and get its value. */
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_SCALAR_TOKEN) {
+ yaml_token_delete(&token);
+ return "";
+ }
+
+ std::string value(reinterpret_cast<char *>(token.data.scalar.value),
+ token.data.scalar.length);
+ yaml_token_delete(&token);
+
+ return value;
+}
+
+int CameraHalConfig::Private::parseValueBlock()
+{
+ yaml_token_t token;
+
+ /* Make sure the next token are VALUE and BLOCK_MAPPING_START. */
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_VALUE_TOKEN) {
+ yaml_token_delete(&token);
+ return -EINVAL;
+ }
+ yaml_token_delete(&token);
+
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_BLOCK_MAPPING_START_TOKEN) {
+ yaml_token_delete(&token);
+ return -EINVAL;
+ }
+ yaml_token_delete(&token);
+
+ return 0;
+}
+
+int CameraHalConfig::Private::parseCameraLocation(CameraConfigData *cameraConfigData,
+ const std::string &location)
+{
+ if (location == "front")
+ cameraConfigData->facing = CAMERA_FACING_FRONT;
+ else if (location == "back")
+ cameraConfigData->facing = CAMERA_FACING_BACK;
+ else if (location == "external")
+ cameraConfigData->facing = CAMERA_FACING_EXTERNAL;
+ else
+ return -EINVAL;
+
+ return 0;
+}
+
+int CameraHalConfig::Private::parseCameraConfigData(const std::string &cameraId)
+{
+ int ret = parseValueBlock();
+ if (ret)
+ return ret;
+
+ /*
+ * Parse the camera properties and store them in a cameraConfigData
+ * instance.
+ *
+ * Add a safety counter to make sure we don't loop indefinitely in case
+ * the configuration file is malformed.
+ */
+ CameraConfigData cameraConfigData;
+ unsigned int sentinel = 100;
+ bool blockEnd = false;
+ yaml_token_t token;
+
+ do {
+ yaml_parser_scan(&parser_, &token);
+ switch (token.type) {
+ case YAML_KEY_TOKEN: {
+ yaml_token_delete(&token);
+
+ /*
+ * Parse the camera property key and make sure it is
+ * valid.
+ */
+ std::string key = parseKey();
+ std::string value = parseValue();
+ if (key.empty() || value.empty())
+ return -EINVAL;
+
+ if (key == "location") {
+ ret = parseCameraLocation(&cameraConfigData, value);
+ if (ret) {
+ LOG(HALConfig, Error)
+ << "Unknown location: " << value;
+ return -EINVAL;
+ }
+ } else if (key == "rotation") {
+ ret = std::stoi(value);
+ if (ret < 0 || ret >= 360) {
+ LOG(HALConfig, Error)
+ << "Unknown rotation: " << value;
+ return -EINVAL;
+ }
+ cameraConfigData.rotation = ret;
+ } else {
+ LOG(HALConfig, Error)
+ << "Unknown key: " << key;
+ return -EINVAL;
+ }
+ break;
+ }
+
+ case YAML_BLOCK_END_TOKEN:
+ blockEnd = true;
+ [[fallthrough]];
+ default:
+ yaml_token_delete(&token);
+ break;
+ }
+
+ --sentinel;
+ } while (!blockEnd && sentinel);
+ if (!sentinel)
+ return -EINVAL;
+
+ (*cameras_)[cameraId] = cameraConfigData;
+
+ return 0;
+}
+
+int CameraHalConfig::Private::parseCameras()
+{
+ int ret = parseValueBlock();
+ if (ret) {
+ LOG(HALConfig, Error) << "Configuration file is not valid";
+ return ret;
+ }
+
+ /*
+ * Parse the camera properties.
+ *
+ * Each camera properties block is a list of properties associated
+ * with the ID (as assembled by CameraSensor::generateId()) of the
+ * camera they refer to.
+ *
+ * cameras:
+ * "camera0 id":
+ * key: value
+ * key: value
+ * ...
+ *
+ * "camera1 id":
+ * key: value
+ * key: value
+ * ...
+ */
+ bool blockEnd = false;
+ yaml_token_t token;
+ do {
+ yaml_parser_scan(&parser_, &token);
+ switch (token.type) {
+ case YAML_KEY_TOKEN: {
+ yaml_token_delete(&token);
+
+ /* Parse the camera ID as key of the property list. */
+ std::string cameraId = parseKey();
+ if (cameraId.empty())
+ return -EINVAL;
+
+ ret = parseCameraConfigData(cameraId);
+ if (ret)
+ return -EINVAL;
+ break;
+ }
+ case YAML_BLOCK_END_TOKEN:
+ blockEnd = true;
+ [[fallthrough]];
+ default:
+ yaml_token_delete(&token);
+ break;
+ }
+ } while (!blockEnd);
+
+ return 0;
+}
+
+int CameraHalConfig::Private::parseEntry()
+{
+ int ret = -EINVAL;
+
+ /*
+ * Parse each key we find in the file.
+ *
+ * The 'cameras' keys maps to a list of (lists) of camera properties.
+ */
+
+ std::string key = parseKey();
+ if (key.empty())
+ return ret;
+
+ if (key == "cameras")
+ ret = parseCameras();
+ else
+ LOG(HALConfig, Error) << "Unknown key: " << key;
+
+ return ret;
+}
+
+int CameraHalConfig::Private::parseConfigFile(FILE *fh,
+ std::map<std::string, CameraConfigData> *cameras)
+{
+ cameras_ = cameras;
+
+ int ret = yaml_parser_initialize(&parser_);
+ if (!ret) {
+ LOG(HALConfig, Error) << "Failed to initialize yaml parser";
+ return -EINVAL;
+ }
+ yaml_parser_set_input_file(&parser_, fh);
+
+ yaml_token_t token;
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_STREAM_START_TOKEN) {
+ LOG(HALConfig, Error) << "Configuration file is not valid";
+ yaml_token_delete(&token);
+ yaml_parser_delete(&parser_);
+ return -EINVAL;
+ }
+ yaml_token_delete(&token);
+
+ yaml_parser_scan(&parser_, &token);
+ if (token.type != YAML_BLOCK_MAPPING_START_TOKEN) {
+ LOG(HALConfig, Error) << "Configuration file is not valid";
+ yaml_token_delete(&token);
+ yaml_parser_delete(&parser_);
+ return -EINVAL;
+ }
+ yaml_token_delete(&token);
+
+ /* Parse the file and parse each single key one by one. */
+ do {
+ yaml_parser_scan(&parser_, &token);
+ switch (token.type) {
+ case YAML_KEY_TOKEN:
+ yaml_token_delete(&token);
+ ret = parseEntry();
+ break;
+
+ case YAML_STREAM_END_TOKEN:
+ ret = -ENOENT;
+ [[fallthrough]];
+ default:
+ yaml_token_delete(&token);
+ break;
+ }
+ } while (ret >= 0);
+ yaml_parser_delete(&parser_);
+
+ if (ret && ret != -ENOENT)
+ LOG(HALConfig, Error) << "Configuration file is not valid";
+
+ return ret == -ENOENT ? 0 : ret;
+}
+
+CameraHalConfig::CameraHalConfig()
+ : Extensible(new Private(this)), exists_(false), valid_(false)
+{
+ parseConfigurationFile();
+}
+
+/*
+ * Open the HAL configuration file and validate its content.
+ * Return 0 on success, a negative error code otherwise
+ * retval -ENOENT The configuration file is not available
+ * retval -EINVAL The configuration file is available but not valid
+ */
+int CameraHalConfig::parseConfigurationFile()
+{
+ std::filesystem::path filePath = LIBCAMERA_SYSCONF_DIR;
+ filePath /= "camera_hal.yaml";
+ if (!std::filesystem::is_regular_file(filePath)) {
+ LOG(HALConfig, Debug)
+ << "Configuration file: \"" << filePath << "\" not found";
+ return -ENOENT;
+ }
+
+ FILE *fh = fopen(filePath.c_str(), "r");
+ if (!fh) {
+ int ret = -errno;
+ LOG(HALConfig, Error) << "Failed to open configuration file "
+ << filePath << ": " << strerror(-ret);
+ return ret;
+ }
+
+ exists_ = true;
+
+ int ret = _d()->parseConfigFile(fh, &cameras_);
+ fclose(fh);
+ if (ret)
+ return -EINVAL;
+
+ valid_ = true;
+
+ for (const auto &c : cameras_) {
+ const std::string &cameraId = c.first;
+ const CameraConfigData &camera = c.second;
+ LOG(HALConfig, Debug) << "'" << cameraId << "' "
+ << "(" << camera.facing << ")["
+ << camera.rotation << "]";
+ }
+
+ return 0;
+}
+
+const CameraConfigData *CameraHalConfig::cameraConfigData(const std::string &cameraId) const
+{
+ const auto &it = cameras_.find(cameraId);
+ if (it == cameras_.end()) {
+ LOG(HALConfig, Error)
+ << "Camera '" << cameraId
+ << "' not described in the HAL configuration file";
+ return nullptr;
+ }
+
+ return &it->second;
+}
diff --git a/src/android/camera_hal_config.h b/src/android/camera_hal_config.h
new file mode 100644
index 00000000..a79d5d6c
--- /dev/null
+++ b/src/android/camera_hal_config.h
@@ -0,0 +1,39 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera_hal_config.h - Camera HAL configuration file manager
+ */
+#ifndef __ANDROID_CAMERA_HAL_CONFIG_H__
+#define __ANDROID_CAMERA_HAL_CONFIG_H__
+
+#include <map>
+#include <string>
+
+#include <libcamera/base/class.h>
+
+struct CameraConfigData {
+ int facing = -1;
+ int rotation = -1;
+};
+
+class CameraHalConfig final : public libcamera::Extensible
+{
+ LIBCAMERA_DECLARE_PRIVATE()
+
+public:
+ CameraHalConfig();
+
+ bool exists() const { return exists_; }
+ bool isValid() const { return valid_; }
+
+ const CameraConfigData *cameraConfigData(const std::string &cameraId) const;
+
+private:
+ bool exists_;
+ bool valid_;
+ std::map<std::string, CameraConfigData> cameras_;
+
+ int parseConfigurationFile();
+};
+#endif /* __ANDROID_CAMERA_HAL_CONFIG_H__ */
diff --git a/src/android/camera_hal_manager.cpp b/src/android/camera_hal_manager.cpp
index 05b47401..4cd67544 100644
--- a/src/android/camera_hal_manager.cpp
+++ b/src/android/camera_hal_manager.cpp
@@ -7,16 +7,16 @@
#include "camera_hal_manager.h"
+#include <libcamera/base/log.h>
+
#include <libcamera/camera.h>
#include <libcamera/property_ids.h>
-#include "libcamera/internal/log.h"
-
#include "camera_device.h"
using namespace libcamera;
-LOG_DECLARE_CATEGORY(HAL);
+LOG_DECLARE_CATEGORY(HAL)
/*
* \class CameraHalManager
@@ -34,20 +34,28 @@ CameraHalManager::CameraHalManager()
{
}
-CameraHalManager::~CameraHalManager()
-{
- cameras_.clear();
+/* CameraManager calls stop() in the destructor. */
+CameraHalManager::~CameraHalManager() = default;
- if (cameraManager_) {
- cameraManager_->stop();
- delete cameraManager_;
- cameraManager_ = nullptr;
- }
+/* static */
+CameraHalManager *CameraHalManager::instance()
+{
+ static CameraHalManager *cameraHalManager = new CameraHalManager;
+ return cameraHalManager;
}
int CameraHalManager::init()
{
- cameraManager_ = new CameraManager();
+ cameraManager_ = std::make_unique<CameraManager>();
+
+ /*
+ * If the configuration file is not available the HAL only supports
+ * external cameras. If it exists but it's not valid then error out.
+ */
+ if (halConfig_.exists() && !halConfig_.isValid()) {
+ LOG(HAL, Error) << "HAL configuration file is not valid";
+ return -EINVAL;
+ }
/* Support camera hotplug. */
cameraManager_->cameraAdded.connect(this, &CameraHalManager::cameraAdded);
@@ -57,36 +65,36 @@ int CameraHalManager::init()
if (ret) {
LOG(HAL, Error) << "Failed to start camera manager: "
<< strerror(-ret);
- delete cameraManager_;
- cameraManager_ = nullptr;
+ cameraManager_.reset();
return ret;
}
return 0;
}
-CameraDevice *CameraHalManager::open(unsigned int id,
- const hw_module_t *hardwareModule)
+std::tuple<CameraDevice *, int>
+CameraHalManager::open(unsigned int id, const hw_module_t *hardwareModule)
{
MutexLocker locker(mutex_);
if (!callbacks_) {
LOG(HAL, Error) << "Can't open camera before callbacks are set";
- return nullptr;
+ return { nullptr, -ENODEV };
}
CameraDevice *camera = cameraDeviceFromHalId(id);
if (!camera) {
LOG(HAL, Error) << "Invalid camera id '" << id << "'";
- return nullptr;
+ return { nullptr, -ENODEV };
}
- if (camera->open(hardwareModule))
- return nullptr;
+ int ret = camera->open(hardwareModule);
+ if (ret)
+ return { nullptr, ret };
LOG(HAL, Info) << "Open camera '" << id << "'";
- return camera;
+ return { camera, 0 };
}
void CameraHalManager::cameraAdded(std::shared_ptr<Camera> cam)
@@ -108,6 +116,8 @@ void CameraHalManager::cameraAdded(std::shared_ptr<Camera> cam)
auto iter = cameraIdsMap_.find(cam->id());
if (iter != cameraIdsMap_.end()) {
id = iter->second;
+ if (id >= firstExternalCameraId_)
+ isCameraExternal = true;
} else {
isCameraNew = true;
@@ -124,8 +134,28 @@ void CameraHalManager::cameraAdded(std::shared_ptr<Camera> cam)
}
/* Create a CameraDevice instance to wrap the libcamera Camera. */
- std::shared_ptr<CameraDevice> camera = CameraDevice::create(id, std::move(cam));
- int ret = camera->initialize();
+ std::unique_ptr<CameraDevice> camera = CameraDevice::create(id, cam);
+
+ /*
+ * The configuration file must be valid, and contain a corresponding
+ * entry for internal cameras. External cameras can be initialized
+ * without configuration file.
+ */
+ if (!isCameraExternal && !halConfig_.exists()) {
+ LOG(HAL, Error)
+ << "HAL configuration file is mandatory for internal cameras";
+ return;
+ }
+
+ const CameraConfigData *cameraConfigData = halConfig_.cameraConfigData(cam->id());
+ if (!isCameraExternal && !cameraConfigData) {
+ LOG(HAL, Error)
+ << "HAL configuration entry for internal camera "
+ << cam->id() << " is missing";
+ return;
+ }
+
+ int ret = camera->initialize(cameraConfigData);
if (ret) {
LOG(HAL, Error) << "Failed to initialize camera: " << cam->id();
return;
@@ -154,8 +184,8 @@ void CameraHalManager::cameraRemoved(std::shared_ptr<Camera> cam)
MutexLocker locker(mutex_);
auto iter = std::find_if(cameras_.begin(), cameras_.end(),
- [&cam](std::shared_ptr<CameraDevice> &camera) {
- return cam.get() == camera->camera();
+ [&cam](const std::unique_ptr<CameraDevice> &camera) {
+ return cam == camera->camera();
});
if (iter == cameras_.end())
return;
@@ -191,7 +221,7 @@ int32_t CameraHalManager::cameraLocation(const Camera *cam)
CameraDevice *CameraHalManager::cameraDeviceFromHalId(unsigned int id)
{
auto iter = std::find_if(cameras_.begin(), cameras_.end(),
- [id](std::shared_ptr<CameraDevice> &camera) {
+ [id](const std::unique_ptr<CameraDevice> &camera) {
return camera->id() == id;
});
if (iter == cameras_.end())
@@ -243,7 +273,7 @@ void CameraHalManager::setCallbacks(const camera_module_callbacks_t *callbacks)
* Internal cameras are already assumed to be present at module load
* time by the Android framework.
*/
- for (std::shared_ptr<CameraDevice> &camera : cameras_) {
+ for (const std::unique_ptr<CameraDevice> &camera : cameras_) {
unsigned int id = camera->id();
if (id >= firstExternalCameraId_)
callbacks_->camera_device_status_change(callbacks_, id,
diff --git a/src/android/camera_hal_manager.h b/src/android/camera_hal_manager.h
index a91decc7..3f6d302a 100644
--- a/src/android/camera_hal_manager.h
+++ b/src/android/camera_hal_manager.h
@@ -10,36 +10,47 @@
#include <map>
#include <mutex>
#include <stddef.h>
+#include <tuple>
#include <vector>
#include <hardware/camera_common.h>
#include <hardware/hardware.h>
#include <system/camera_metadata.h>
+#include <libcamera/base/class.h>
+
#include <libcamera/camera_manager.h>
+#include "camera_hal_config.h"
+
class CameraDevice;
class CameraHalManager
{
public:
- CameraHalManager();
~CameraHalManager();
+ static CameraHalManager *instance();
+
int init();
- CameraDevice *open(unsigned int id, const hw_module_t *module);
+ std::tuple<CameraDevice *, int>
+ open(unsigned int id, const hw_module_t *module);
unsigned int numCameras() const;
int getCameraInfo(unsigned int id, struct camera_info *info);
void setCallbacks(const camera_module_callbacks_t *callbacks);
private:
+ LIBCAMERA_DISABLE_COPY_AND_MOVE(CameraHalManager)
+
using Mutex = std::mutex;
using MutexLocker = std::unique_lock<std::mutex>;
static constexpr unsigned int firstExternalCameraId_ = 1000;
+ CameraHalManager();
+
static int32_t cameraLocation(const libcamera::Camera *cam);
void cameraAdded(std::shared_ptr<libcamera::Camera> cam);
@@ -47,10 +58,11 @@ private:
CameraDevice *cameraDeviceFromHalId(unsigned int id);
- libcamera::CameraManager *cameraManager_;
+ std::unique_ptr<libcamera::CameraManager> cameraManager_;
+ CameraHalConfig halConfig_;
const camera_module_callbacks_t *callbacks_;
- std::vector<std::shared_ptr<CameraDevice>> cameras_;
+ std::vector<std::unique_ptr<CameraDevice>> cameras_;
std::map<std::string, unsigned int> cameraIdsMap_;
Mutex mutex_;
diff --git a/src/android/camera_metadata.cpp b/src/android/camera_metadata.cpp
index f0da9ea9..3fc7cf27 100644
--- a/src/android/camera_metadata.cpp
+++ b/src/android/camera_metadata.cpp
@@ -7,29 +7,132 @@
#include "camera_metadata.h"
-#include "libcamera/internal/log.h"
+#include <libcamera/base/log.h>
using namespace libcamera;
-LOG_DEFINE_CATEGORY(CameraMetadata);
+LOG_DEFINE_CATEGORY(CameraMetadata)
+
+CameraMetadata::CameraMetadata()
+ : metadata_(nullptr), valid_(false), resized_(false)
+{
+}
CameraMetadata::CameraMetadata(size_t entryCapacity, size_t dataCapacity)
+ : resized_(false)
{
metadata_ = allocate_camera_metadata(entryCapacity, dataCapacity);
valid_ = metadata_ != nullptr;
}
+CameraMetadata::CameraMetadata(const camera_metadata_t *metadata)
+ : resized_(false)
+{
+ metadata_ = clone_camera_metadata(metadata);
+ valid_ = metadata_ != nullptr;
+}
+
+CameraMetadata::CameraMetadata(const CameraMetadata &other)
+ : CameraMetadata(other.get())
+{
+}
+
CameraMetadata::~CameraMetadata()
{
if (metadata_)
free_camera_metadata(metadata_);
}
-bool CameraMetadata::addEntry(uint32_t tag, const void *data, size_t count)
+CameraMetadata &CameraMetadata::operator=(const CameraMetadata &other)
+{
+ if (this == &other)
+ return *this;
+
+ if (metadata_)
+ free_camera_metadata(metadata_);
+
+ metadata_ = clone_camera_metadata(other.get());
+ valid_ = metadata_ != nullptr;
+
+ return *this;
+}
+
+std::tuple<size_t, size_t> CameraMetadata::usage() const
+{
+ size_t currentEntryCount = get_camera_metadata_entry_count(metadata_);
+ size_t currentDataCount = get_camera_metadata_data_count(metadata_);
+
+ return { currentEntryCount, currentDataCount };
+}
+
+bool CameraMetadata::getEntry(uint32_t tag, camera_metadata_ro_entry_t *entry) const
+{
+ if (find_camera_metadata_ro_entry(metadata_, tag, entry))
+ return false;
+
+ return true;
+}
+
+/*
+ * \brief Resize the metadata container, if necessary
+ * \param[in] count Number of entries to add to the container
+ * \param[in] size Total size of entries to add, in bytes
+ * \return True if resize was successful or unnecessary, false otherwise
+ */
+bool CameraMetadata::resize(size_t count, size_t size)
+{
+ if (!valid_)
+ return false;
+
+ if (!count && !size)
+ return true;
+
+ size_t currentEntryCount = get_camera_metadata_entry_count(metadata_);
+ size_t currentEntryCapacity = get_camera_metadata_entry_capacity(metadata_);
+ size_t newEntryCapacity = currentEntryCapacity < currentEntryCount + count ?
+ currentEntryCapacity * 2 : currentEntryCapacity;
+
+ size_t currentDataCount = get_camera_metadata_data_count(metadata_);
+ size_t currentDataCapacity = get_camera_metadata_data_capacity(metadata_);
+ size_t newDataCapacity = currentDataCapacity < currentDataCount + size ?
+ currentDataCapacity * 2 : currentDataCapacity;
+
+ if (newEntryCapacity > currentEntryCapacity ||
+ newDataCapacity > currentDataCapacity) {
+ camera_metadata_t *oldMetadata = metadata_;
+ metadata_ = allocate_camera_metadata(newEntryCapacity, newDataCapacity);
+ if (!metadata_) {
+ metadata_ = oldMetadata;
+ return false;
+ }
+
+ LOG(CameraMetadata, Info)
+ << "Resized: old entry capacity " << currentEntryCapacity
+ << ", old data capacity " << currentDataCapacity
+ << ", new entry capacity " << newEntryCapacity
+ << ", new data capacity " << newDataCapacity;
+
+ append_camera_metadata(metadata_, oldMetadata);
+ free_camera_metadata(oldMetadata);
+
+ resized_ = true;
+ }
+
+ return true;
+}
+
+bool CameraMetadata::addEntry(uint32_t tag, const void *data, size_t count,
+ size_t elementSize)
{
if (!valid_)
return false;
+ if (!resize(1, count * elementSize)) {
+ LOG(CameraMetadata, Error) << "Failed to resize";
+ valid_ = false;
+ return false;
+ }
+
if (!add_camera_metadata_entry(metadata_, tag, data, count))
return true;
@@ -46,7 +149,8 @@ bool CameraMetadata::addEntry(uint32_t tag, const void *data, size_t count)
return false;
}
-bool CameraMetadata::updateEntry(uint32_t tag, const void *data, size_t count)
+bool CameraMetadata::updateEntry(uint32_t tag, const void *data, size_t count,
+ size_t elementSize)
{
if (!valid_)
return false;
@@ -61,16 +165,39 @@ bool CameraMetadata::updateEntry(uint32_t tag, const void *data, size_t count)
return false;
}
- ret = update_camera_metadata_entry(metadata_, entry.index, data,
- count, nullptr);
- if (ret) {
+ if (camera_metadata_type_size[entry.type] != elementSize) {
const char *name = get_camera_metadata_tag_name(tag);
- LOG(CameraMetadata, Error)
- << "Failed to update tag " << (name ? name : "<unknown>");
+ LOG(CameraMetadata, Fatal)
+ << "Invalid element size for tag "
+ << (name ? name : "<unknown>");
return false;
}
- return true;
+ size_t oldSize =
+ calculate_camera_metadata_entry_data_size(entry.type,
+ entry.count);
+ size_t newSize =
+ calculate_camera_metadata_entry_data_size(entry.type,
+ count);
+ size_t sizeIncrement = newSize - oldSize > 0 ? newSize - oldSize : 0;
+ if (!resize(0, sizeIncrement)) {
+ LOG(CameraMetadata, Error) << "Failed to resize";
+ valid_ = false;
+ return false;
+ }
+
+ ret = update_camera_metadata_entry(metadata_, entry.index, data,
+ count, nullptr);
+ if (!ret)
+ return true;
+
+ const char *name = get_camera_metadata_tag_name(tag);
+ LOG(CameraMetadata, Error)
+ << "Failed to update tag " << (name ? name : "<unknown>");
+
+ valid_ = false;
+
+ return false;
}
camera_metadata_t *CameraMetadata::get()
diff --git a/src/android/camera_metadata.h b/src/android/camera_metadata.h
index 9d047b1b..3b7c9e24 100644
--- a/src/android/camera_metadata.h
+++ b/src/android/camera_metadata.h
@@ -8,25 +8,91 @@
#define __ANDROID_CAMERA_METADATA_H__
#include <stdint.h>
+#include <vector>
#include <system/camera_metadata.h>
class CameraMetadata
{
public:
+ CameraMetadata();
CameraMetadata(size_t entryCapacity, size_t dataCapacity);
+ CameraMetadata(const camera_metadata_t *metadata);
+ CameraMetadata(const CameraMetadata &other);
~CameraMetadata();
+ CameraMetadata &operator=(const CameraMetadata &other);
+
+ std::tuple<size_t, size_t> usage() const;
+ bool resized() const { return resized_; }
+
bool isValid() const { return valid_; }
- bool addEntry(uint32_t tag, const void *data, size_t data_count);
- bool updateEntry(uint32_t tag, const void *data, size_t data_count);
+ bool getEntry(uint32_t tag, camera_metadata_ro_entry_t *entry) const;
+
+ template<typename T,
+ std::enable_if_t<std::is_arithmetic_v<T>> * = nullptr>
+ bool addEntry(uint32_t tag, const T &data)
+ {
+ return addEntry(tag, &data, 1, sizeof(T));
+ }
+
+ template<typename T, size_t size>
+ bool addEntry(uint32_t tag, const T (&data)[size])
+ {
+ return addEntry(tag, data, size, sizeof(T));
+ }
+
+ template<typename S,
+ typename T = typename S::value_type>
+ bool addEntry(uint32_t tag, const S &data)
+ {
+ return addEntry(tag, data.data(), data.size(), sizeof(T));
+ }
+
+ template<typename T>
+ bool addEntry(uint32_t tag, const T *data, size_t count)
+ {
+ return addEntry(tag, data, count, sizeof(T));
+ }
+
+ template<typename T>
+ bool updateEntry(uint32_t tag, const T &data)
+ {
+ return updateEntry(tag, &data, 1, sizeof(T));
+ }
+
+ template<typename T, size_t size>
+ bool updateEntry(uint32_t tag, const T (&data)[size])
+ {
+ return updateEntry(tag, data, size, sizeof(T));
+ }
+
+ template<typename S,
+ typename T = typename S::value_type>
+ bool updateEntry(uint32_t tag, const S &data)
+ {
+ return updateEntry(tag, data.data(), data.size(), sizeof(T));
+ }
+
+ template<typename T>
+ bool updateEntry(uint32_t tag, const T *data, size_t count)
+ {
+ return updateEntry(tag, data, count, sizeof(T));
+ }
camera_metadata_t *get();
const camera_metadata_t *get() const;
private:
+ bool resize(size_t count, size_t size);
+ bool addEntry(uint32_t tag, const void *data, size_t count,
+ size_t elementSize);
+ bool updateEntry(uint32_t tag, const void *data, size_t count,
+ size_t elementSize);
+
camera_metadata_t *metadata_;
bool valid_;
+ bool resized_;
};
#endif /* __ANDROID_CAMERA_METADATA_H__ */
diff --git a/src/android/camera_ops.cpp b/src/android/camera_ops.cpp
index 696e8043..8a3cfa17 100644
--- a/src/android/camera_ops.cpp
+++ b/src/android/camera_ops.cpp
@@ -66,8 +66,14 @@ static void hal_dev_dump([[maybe_unused]] const struct camera3_device *dev,
{
}
-static int hal_dev_flush([[maybe_unused]] const struct camera3_device *dev)
+static int hal_dev_flush(const struct camera3_device *dev)
{
+ if (!dev)
+ return -EINVAL;
+
+ CameraDevice *camera = reinterpret_cast<CameraDevice *>(dev->priv);
+ camera->flush();
+
return 0;
}
diff --git a/src/android/camera_stream.cpp b/src/android/camera_stream.cpp
new file mode 100644
index 00000000..bf4a7b41
--- /dev/null
+++ b/src/android/camera_stream.cpp
@@ -0,0 +1,146 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * camera_stream.cpp - Camera HAL stream
+ */
+
+#include "camera_stream.h"
+
+#include "camera_buffer.h"
+#include "camera_device.h"
+#include "camera_metadata.h"
+#include "jpeg/post_processor_jpeg.h"
+
+#include <libcamera/formats.h>
+
+using namespace libcamera;
+
+LOG_DECLARE_CATEGORY(HAL)
+
+/*
+ * \class CameraStream
+ * \brief Map a camera3_stream_t to a StreamConfiguration
+ *
+ * The CameraStream class maps a camera3_stream_t provided by Android
+ * camera framework to a libcamera::StreamConfiguration.
+ *
+ * The StreamConfiguration is represented by its index as recorded in the
+ * CameraConfiguration and not by pointer as StreamConfiguration is subject to
+ * relocation.
+ *
+ * A single StreamConfiguration may be used to deliver one or more streams to
+ * the Android framework. The mapping type between a camera3 stream to a
+ * StreamConfiguration is described by the CameraStream::Type.
+ *
+ * CameraStream handles all the aspects of producing a stream with the size
+ * and format requested by the camera3 stream from the data produced by
+ * the associated libcamera::Stream, including the creation of the encoder
+ * and buffer allocation.
+ */
+
+CameraStream::CameraStream(CameraDevice *const cameraDevice,
+ CameraConfiguration *config, Type type,
+ camera3_stream_t *camera3Stream, unsigned int index)
+ : cameraDevice_(cameraDevice), config_(config), type_(type),
+ camera3Stream_(camera3Stream), index_(index)
+{
+ if (type_ == Type::Internal || type_ == Type::Mapped) {
+ /*
+ * \todo There might be multiple post-processors. The logic
+ * which should be instantiated here, is deferred for the
+ * future. For now, we only have PostProcessorJpeg and that
+ * is what we instantiate here.
+ */
+ postProcessor_ = std::make_unique<PostProcessorJpeg>(cameraDevice_);
+ }
+
+ if (type == Type::Internal) {
+ allocator_ = std::make_unique<FrameBufferAllocator>(cameraDevice_->camera());
+ mutex_ = std::make_unique<std::mutex>();
+ }
+}
+
+const StreamConfiguration &CameraStream::configuration() const
+{
+ return config_->at(index_);
+}
+
+Stream *CameraStream::stream() const
+{
+ return configuration().stream();
+}
+
+int CameraStream::configure()
+{
+ if (postProcessor_) {
+ StreamConfiguration output = configuration();
+ output.pixelFormat = formats::MJPEG;
+ int ret = postProcessor_->configure(configuration(), output);
+ if (ret)
+ return ret;
+ }
+
+ if (allocator_) {
+ int ret = allocator_->allocate(stream());
+ if (ret < 0)
+ return ret;
+
+ /* Save a pointer to the reserved frame buffers */
+ for (const auto &frameBuffer : allocator_->buffers(stream()))
+ buffers_.push_back(frameBuffer.get());
+ }
+
+ camera3Stream_->max_buffers = configuration().bufferCount;
+
+ return 0;
+}
+
+int CameraStream::process(const libcamera::FrameBuffer &source,
+ buffer_handle_t camera3Dest,
+ const CameraMetadata &requestMetadata,
+ CameraMetadata *resultMetadata)
+{
+ if (!postProcessor_)
+ return 0;
+
+ /*
+ * \todo Buffer mapping and processing should be moved to a
+ * separate thread.
+ */
+ CameraBuffer dest(camera3Dest, PROT_READ | PROT_WRITE);
+ if (!dest.isValid()) {
+ LOG(HAL, Error) << "Failed to map android blob buffer";
+ return -EINVAL;
+ }
+
+ return postProcessor_->process(source, &dest, requestMetadata, resultMetadata);
+}
+
+FrameBuffer *CameraStream::getBuffer()
+{
+ if (!allocator_)
+ return nullptr;
+
+ std::lock_guard<std::mutex> locker(*mutex_);
+
+ if (buffers_.empty()) {
+ LOG(HAL, Error) << "Buffer underrun";
+ return nullptr;
+ }
+
+ FrameBuffer *buffer = buffers_.back();
+ buffers_.pop_back();
+
+ return buffer;
+}
+
+void CameraStream::putBuffer(libcamera::FrameBuffer *buffer)
+{
+ if (!allocator_)
+ return;
+
+ std::lock_guard<std::mutex> locker(*mutex_);
+
+ buffers_.push_back(buffer);
+}
diff --git a/src/android/camera_stream.h b/src/android/camera_stream.h
new file mode 100644
index 00000000..629d9e00
--- /dev/null
+++ b/src/android/camera_stream.h
@@ -0,0 +1,147 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * camera_stream.h - Camera HAL stream
+ */
+#ifndef __ANDROID_CAMERA_STREAM_H__
+#define __ANDROID_CAMERA_STREAM_H__
+
+#include <memory>
+#include <mutex>
+#include <vector>
+
+#include <hardware/camera3.h>
+
+#include <libcamera/camera.h>
+#include <libcamera/framebuffer.h>
+#include <libcamera/framebuffer_allocator.h>
+#include <libcamera/geometry.h>
+#include <libcamera/pixel_format.h>
+
+#include "libcamera/internal/framebuffer.h"
+
+class CameraDevice;
+class CameraMetadata;
+class PostProcessor;
+
+class CameraStream
+{
+public:
+ /*
+ * Enumeration of CameraStream types.
+ *
+ * A camera stream associates an Android stream to a libcamera stream.
+ * This enumeration describes how the two streams are associated and how
+ * and where data produced from libcamera are delivered to the
+ * Android framework.
+ *
+ * Direct:
+ *
+ * The Android stream is directly mapped onto a libcamera stream: frames
+ * are delivered by the library directly in the memory location
+ * specified by the Android stream (buffer_handle_t->data) and provided
+ * to the framework as they are. The Android stream characteristics are
+ * directly translated to the libcamera stream configuration.
+ *
+ * +-----+ +-----+
+ * | A | | L |
+ * +-----+ +-----+
+ * | |
+ * V V
+ * +-----+ +------+
+ * | B |<---------------| FB |
+ * +-----+ +------+
+ *
+ *
+ * Internal:
+ *
+ * Data for the Android stream is produced by processing a libcamera
+ * stream created by the HAL for that purpose. The libcamera stream
+ * needs to be supplied with intermediate buffers where the library
+ * delivers frames to be processed and then provided to the framework.
+ * The libcamera stream configuration is not a direct translation of the
+ * Android stream characteristics, but it describes the format and size
+ * required for the processing procedure to produce frames in the
+ * Android required format.
+ *
+ * +-----+ +-----+
+ * | A | | L |
+ * +-----+ +-----+
+ * | |
+ * V V
+ * +-----+ +------+
+ * | B | | FB |
+ * +-----+ +------+
+ * ^ |
+ * |-------Processing------|
+ *
+ *
+ * Mapped:
+ *
+ * Data for the Android stream is produced by processing a libcamera
+ * stream associated with another CameraStream. Mapped camera streams do
+ * not need any memory to be reserved for them as they process data
+ * produced by libcamera for a different stream whose format and size
+ * are compatible with the processing procedure requirements to produce
+ * frames in the Android required format.
+ *
+ * +-----+ +-----+ +-----+
+ * | A | | A' | | L |
+ * +-----+ +-----+ +-----+
+ * | | |
+ * V V V
+ * +-----+ +-----+ +------+
+ * | B | | B' |<---------| FB |
+ * +-----+ +-----+ +------+
+ * ^ |
+ * |--Processing--|
+ *
+ *
+ * --------------------------------------------------------------------
+ * A = Android stream
+ * L = libcamera stream
+ * B = memory buffer
+ * FB = libcamera FrameBuffer
+ * "Processing" = Frame processing procedure (Encoding, scaling etc)
+ */
+ enum class Type {
+ Direct,
+ Internal,
+ Mapped,
+ };
+ CameraStream(CameraDevice *const cameraDevice,
+ libcamera::CameraConfiguration *config, Type type,
+ camera3_stream_t *camera3Stream, unsigned int index);
+
+ Type type() const { return type_; }
+ const camera3_stream_t &camera3Stream() const { return *camera3Stream_; }
+ const libcamera::StreamConfiguration &configuration() const;
+ libcamera::Stream *stream() const;
+
+ int configure();
+ int process(const libcamera::FrameBuffer &source,
+ buffer_handle_t camera3Dest,
+ const CameraMetadata &requestMetadata,
+ CameraMetadata *resultMetadata);
+ libcamera::FrameBuffer *getBuffer();
+ void putBuffer(libcamera::FrameBuffer *buffer);
+
+private:
+ CameraDevice *const cameraDevice_;
+ const libcamera::CameraConfiguration *config_;
+ const Type type_;
+ camera3_stream_t *camera3Stream_;
+ const unsigned int index_;
+
+ std::unique_ptr<libcamera::FrameBufferAllocator> allocator_;
+ std::vector<libcamera::FrameBuffer *> buffers_;
+ /*
+ * The class has to be MoveConstructible as instances are stored in
+ * an std::vector in CameraDevice.
+ */
+ std::unique_ptr<std::mutex> mutex_;
+ std::unique_ptr<PostProcessor> postProcessor_;
+};
+
+#endif /* __ANDROID_CAMERA_STREAM__ */
diff --git a/src/android/camera_worker.cpp b/src/android/camera_worker.cpp
new file mode 100644
index 00000000..98dddd9e
--- /dev/null
+++ b/src/android/camera_worker.cpp
@@ -0,0 +1,129 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * camera_worker.cpp - Process capture requests on behalf of the Camera HAL
+ */
+
+#include "camera_worker.h"
+
+#include <errno.h>
+#include <string.h>
+#include <sys/poll.h>
+#include <unistd.h>
+
+#include "camera_device.h"
+
+using namespace libcamera;
+
+LOG_DECLARE_CATEGORY(HAL)
+
+/*
+ * \class CaptureRequest
+ * \brief Wrap a libcamera::Request associated with buffers and fences
+ *
+ * A CaptureRequest is constructed by the CameraDevice, filled with
+ * buffers and fences provided by the camera3 framework and then processed
+ * by the CameraWorker which queues it to the libcamera::Camera after handling
+ * fences.
+ */
+CaptureRequest::CaptureRequest(libcamera::Camera *camera)
+ : camera_(camera)
+{
+ request_ = camera_->createRequest(reinterpret_cast<uint64_t>(this));
+}
+
+void CaptureRequest::addBuffer(Stream *stream, FrameBuffer *buffer, int fence)
+{
+ request_->addBuffer(stream, buffer);
+ acquireFences_.push_back(fence);
+}
+
+void CaptureRequest::queue()
+{
+ camera_->queueRequest(request_.get());
+}
+
+/*
+ * \class CameraWorker
+ * \brief Process a CaptureRequest on an internal thread
+ *
+ * The CameraWorker class wraps a Worker that runs on an internal thread
+ * and schedules processing of CaptureRequest through it.
+ */
+CameraWorker::CameraWorker()
+{
+ worker_.moveToThread(this);
+}
+
+void CameraWorker::start()
+{
+ Thread::start();
+}
+
+void CameraWorker::stop()
+{
+ exit();
+ wait();
+}
+
+void CameraWorker::run()
+{
+ exec();
+ dispatchMessages(Message::Type::InvokeMessage);
+}
+
+void CameraWorker::queueRequest(CaptureRequest *request)
+{
+ /* Async process the request on the worker which runs its own thread. */
+ worker_.invokeMethod(&Worker::processRequest, ConnectionTypeQueued,
+ request);
+}
+
+/*
+ * \class CameraWorker::Worker
+ * \brief Process a CaptureRequest handling acquisition fences
+ */
+int CameraWorker::Worker::waitFence(int fence)
+{
+ /*
+ * \todo Better characterize the timeout. Currently equal to the one
+ * used by the Rockchip Camera HAL on ChromeOS.
+ */
+ constexpr unsigned int timeoutMs = 300;
+ struct pollfd fds = { fence, POLLIN, 0 };
+
+ do {
+ int ret = poll(&fds, 1, timeoutMs);
+ if (ret == 0)
+ return -ETIME;
+
+ if (ret > 0) {
+ if (fds.revents & (POLLERR | POLLNVAL))
+ return -EINVAL;
+
+ return 0;
+ }
+ } while (errno == EINTR || errno == EAGAIN);
+
+ return -errno;
+}
+
+void CameraWorker::Worker::processRequest(CaptureRequest *request)
+{
+ /* Wait on all fences before queuing the Request. */
+ for (int fence : request->fences()) {
+ if (fence == -1)
+ continue;
+
+ int ret = waitFence(fence);
+ close(fence);
+ if (ret < 0) {
+ LOG(HAL, Error) << "Failed waiting for fence: "
+ << fence << ": " << strerror(-ret);
+ return;
+ }
+ }
+
+ request->queue();
+}
diff --git a/src/android/camera_worker.h b/src/android/camera_worker.h
new file mode 100644
index 00000000..67ae50bd
--- /dev/null
+++ b/src/android/camera_worker.h
@@ -0,0 +1,71 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * camera_worker.h - Process capture requests on behalf of the Camera HAL
+ */
+#ifndef __ANDROID_CAMERA_WORKER_H__
+#define __ANDROID_CAMERA_WORKER_H__
+
+#include <memory>
+
+#include <libcamera/base/object.h>
+#include <libcamera/base/thread.h>
+
+#include <libcamera/camera.h>
+#include <libcamera/framebuffer.h>
+#include <libcamera/request.h>
+#include <libcamera/stream.h>
+
+class CameraDevice;
+
+class CaptureRequest
+{
+public:
+ CaptureRequest(libcamera::Camera *camera);
+
+ const std::vector<int> &fences() const { return acquireFences_; }
+ libcamera::ControlList &controls() { return request_->controls(); }
+ const libcamera::ControlList &metadata() const
+ {
+ return request_->metadata();
+ }
+ unsigned long cookie() const { return request_->cookie(); }
+
+ void addBuffer(libcamera::Stream *stream,
+ libcamera::FrameBuffer *buffer, int fence);
+ void queue();
+
+private:
+ libcamera::Camera *camera_;
+ std::vector<int> acquireFences_;
+ std::unique_ptr<libcamera::Request> request_;
+};
+
+class CameraWorker : private libcamera::Thread
+{
+public:
+ CameraWorker();
+
+ void start();
+ void stop();
+
+ void queueRequest(CaptureRequest *request);
+
+protected:
+ void run() override;
+
+private:
+ class Worker : public libcamera::Object
+ {
+ public:
+ void processRequest(CaptureRequest *request);
+
+ private:
+ int waitFence(int fence);
+ };
+
+ Worker worker_;
+};
+
+#endif /* __ANDROID_CAMERA_WORKER_H__ */
diff --git a/src/android/cros/camera3_hal.cpp b/src/android/cros/camera3_hal.cpp
new file mode 100644
index 00000000..fb863b5f
--- /dev/null
+++ b/src/android/cros/camera3_hal.cpp
@@ -0,0 +1,24 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * camera3_hal.cpp - cros-specific components of Android Camera HALv3 module
+ */
+
+#include <cros-camera/cros_camera_hal.h>
+
+#include "../camera_hal_manager.h"
+
+static void set_up([[maybe_unused]] cros::CameraMojoChannelManagerToken *token)
+{
+}
+
+static void tear_down()
+{
+ delete CameraHalManager::instance();
+}
+
+cros::cros_camera_hal_t CROS_CAMERA_EXPORT CROS_CAMERA_HAL_INFO_SYM = {
+ .set_up = set_up,
+ .tear_down = tear_down
+};
diff --git a/src/android/cros/meson.build b/src/android/cros/meson.build
new file mode 100644
index 00000000..35995dd8
--- /dev/null
+++ b/src/android/cros/meson.build
@@ -0,0 +1,13 @@
+# SPDX-License-Identifier: CC0-1.0
+
+if get_option('android_platform') != 'cros'
+ subdir_done()
+endif
+
+android_hal_sources += files([
+ 'camera3_hal.cpp',
+])
+
+android_deps += dependency('libcros_camera')
+
+android_cpp_args += ['-DOS_CHROMEOS']
diff --git a/src/android/data/soraka/camera_hal.yaml b/src/android/data/soraka/camera_hal.yaml
new file mode 100644
index 00000000..2e996403
--- /dev/null
+++ b/src/android/data/soraka/camera_hal.yaml
@@ -0,0 +1,8 @@
+cameras:
+ "\\_SB_.PCI0.I2C4.CAM1":
+ location: front
+ rotation: 0
+
+ "\\_SB_.PCI0.I2C2.CAM0":
+ location: back
+ rotation: 0
diff --git a/src/android/jpeg/encoder.h b/src/android/jpeg/encoder.h
index cf26d67a..a28522f4 100644
--- a/src/android/jpeg/encoder.h
+++ b/src/android/jpeg/encoder.h
@@ -1,4 +1,4 @@
-/* SPDX-License-Identifier: GPL-2.0-or-later */
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
@@ -7,19 +7,21 @@
#ifndef __ANDROID_JPEG_ENCODER_H__
#define __ANDROID_JPEG_ENCODER_H__
-#include <libcamera/buffer.h>
-#include <libcamera/span.h>
+#include <libcamera/base/span.h>
+
+#include <libcamera/framebuffer.h>
#include <libcamera/stream.h>
class Encoder
{
public:
- virtual ~Encoder() {};
+ virtual ~Encoder() = default;
virtual int configure(const libcamera::StreamConfiguration &cfg) = 0;
- virtual int encode(const libcamera::FrameBuffer *source,
- const libcamera::Span<uint8_t> &destination,
- const libcamera::Span<const uint8_t> &exifData) = 0;
+ virtual int encode(const libcamera::FrameBuffer &source,
+ libcamera::Span<uint8_t> destination,
+ libcamera::Span<const uint8_t> exifData,
+ unsigned int quality) = 0;
};
#endif /* __ANDROID_JPEG_ENCODER_H__ */
diff --git a/src/android/jpeg/encoder_libjpeg.cpp b/src/android/jpeg/encoder_libjpeg.cpp
index 510613cd..e6358ca9 100644
--- a/src/android/jpeg/encoder_libjpeg.cpp
+++ b/src/android/jpeg/encoder_libjpeg.cpp
@@ -1,4 +1,4 @@
-/* SPDX-License-Identifier: GPL-2.0-or-later */
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
@@ -16,16 +16,17 @@
#include <unistd.h>
#include <vector>
+#include <libcamera/base/log.h>
+
#include <libcamera/camera.h>
#include <libcamera/formats.h>
#include <libcamera/pixel_format.h>
#include "libcamera/internal/formats.h"
-#include "libcamera/internal/log.h"
using namespace libcamera;
-LOG_DEFINE_CATEGORY(JPEG)
+LOG_DECLARE_CATEGORY(JPEG)
namespace {
@@ -68,7 +69,6 @@ const struct JPEGPixelFormatInfo &findPixelInfo(const PixelFormat &format)
} /* namespace */
EncoderLibJpeg::EncoderLibJpeg()
- : quality_(95)
{
/* \todo Expand error handling coverage with a custom handler. */
compress_.err = jpeg_std_error(&jerr_);
@@ -94,7 +94,6 @@ int EncoderLibJpeg::configure(const StreamConfiguration &cfg)
compress_.input_components = info.colorSpace == JCS_GRAYSCALE ? 1 : 3;
jpeg_set_defaults(&compress_);
- jpeg_set_quality(&compress_, quality_, TRUE);
pixelFormatInfo_ = &info.pixelFormatInfo;
@@ -104,9 +103,9 @@ int EncoderLibJpeg::configure(const StreamConfiguration &cfg)
return 0;
}
-void EncoderLibJpeg::compressRGB(const libcamera::MappedBuffer *frame)
+void EncoderLibJpeg::compressRGB(Span<const uint8_t> frame)
{
- unsigned char *src = static_cast<unsigned char *>(frame->maps()[0].data());
+ unsigned char *src = const_cast<unsigned char *>(frame.data());
/* \todo Stride information should come from buffer configuration. */
unsigned int stride = pixelFormatInfo_->stride(compress_.image_width, 0);
@@ -122,7 +121,7 @@ void EncoderLibJpeg::compressRGB(const libcamera::MappedBuffer *frame)
* Compress the incoming buffer from a supported NV format.
* This naively unpacks the semi-planar NV12 to a YUV888 format for libjpeg.
*/
-void EncoderLibJpeg::compressNV(const libcamera::MappedBuffer *frame)
+void EncoderLibJpeg::compressNV(Span<const uint8_t> frame)
{
uint8_t tmprowbuf[compress_.image_width * 3];
@@ -144,7 +143,7 @@ void EncoderLibJpeg::compressNV(const libcamera::MappedBuffer *frame)
unsigned int cb_pos = nvSwap_ ? 1 : 0;
unsigned int cr_pos = nvSwap_ ? 0 : 1;
- const unsigned char *src = static_cast<unsigned char *>(frame->maps()[0].data());
+ const unsigned char *src = frame.data();
const unsigned char *src_c = src + y_stride * compress_.image_height;
JSAMPROW row_pointer[1];
@@ -179,20 +178,27 @@ void EncoderLibJpeg::compressNV(const libcamera::MappedBuffer *frame)
}
}
-int EncoderLibJpeg::encode(const FrameBuffer *source,
- const libcamera::Span<uint8_t> &dest,
- const libcamera::Span<const uint8_t> &exifData)
+int EncoderLibJpeg::encode(const FrameBuffer &source, Span<uint8_t> dest,
+ Span<const uint8_t> exifData, unsigned int quality)
{
- MappedFrameBuffer frame(source, PROT_READ);
+ MappedFrameBuffer frame(&source, PROT_READ);
if (!frame.isValid()) {
LOG(JPEG, Error) << "Failed to map FrameBuffer : "
<< strerror(frame.error());
return frame.error();
}
+ return encode(frame.maps()[0], dest, exifData, quality);
+}
+
+int EncoderLibJpeg::encode(Span<const uint8_t> src, Span<uint8_t> dest,
+ Span<const uint8_t> exifData, unsigned int quality)
+{
unsigned char *destination = dest.data();
unsigned long size = dest.size();
+ jpeg_set_quality(&compress_, quality, TRUE);
+
/*
* The jpeg_mem_dest will reallocate if the required size is not
* sufficient. That means the output won't be written to the correct
@@ -215,9 +221,9 @@ int EncoderLibJpeg::encode(const FrameBuffer *source,
<< "x" << compress_.image_height;
if (nv_)
- compressNV(&frame);
+ compressNV(src);
else
- compressRGB(&frame);
+ compressRGB(src);
jpeg_finish_compress(&compress_);
diff --git a/src/android/jpeg/encoder_libjpeg.h b/src/android/jpeg/encoder_libjpeg.h
index 1e8df05a..14bf8922 100644
--- a/src/android/jpeg/encoder_libjpeg.h
+++ b/src/android/jpeg/encoder_libjpeg.h
@@ -1,4 +1,4 @@
-/* SPDX-License-Identifier: GPL-2.0-or-later */
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
@@ -9,8 +9,8 @@
#include "encoder.h"
-#include "libcamera/internal/buffer.h"
#include "libcamera/internal/formats.h"
+#include "libcamera/internal/framebuffer.h"
#include <jpeglib.h>
@@ -21,19 +21,22 @@ public:
~EncoderLibJpeg();
int configure(const libcamera::StreamConfiguration &cfg) override;
- int encode(const libcamera::FrameBuffer *source,
- const libcamera::Span<uint8_t> &destination,
- const libcamera::Span<const uint8_t> &exifData) override;
+ int encode(const libcamera::FrameBuffer &source,
+ libcamera::Span<uint8_t> destination,
+ libcamera::Span<const uint8_t> exifData,
+ unsigned int quality) override;
+ int encode(libcamera::Span<const uint8_t> source,
+ libcamera::Span<uint8_t> destination,
+ libcamera::Span<const uint8_t> exifData,
+ unsigned int quality);
private:
- void compressRGB(const libcamera::MappedBuffer *frame);
- void compressNV(const libcamera::MappedBuffer *frame);
+ void compressRGB(libcamera::Span<const uint8_t> frame);
+ void compressNV(libcamera::Span<const uint8_t> frame);
struct jpeg_compress_struct compress_;
struct jpeg_error_mgr jerr_;
- unsigned int quality_;
-
const libcamera::PixelFormatInfo *pixelFormatInfo_;
bool nv_;
diff --git a/src/android/jpeg/exif.cpp b/src/android/jpeg/exif.cpp
index 32cf8974..0ba4cb85 100644
--- a/src/android/jpeg/exif.cpp
+++ b/src/android/jpeg/exif.cpp
@@ -1,4 +1,4 @@
-/* SPDX-License-Identifier: GPL-2.0-or-later */
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
@@ -7,7 +7,15 @@
#include "exif.h"
-#include "libcamera/internal/log.h"
+#include <cmath>
+#include <iomanip>
+#include <map>
+#include <sstream>
+#include <tuple>
+#include <uchar.h>
+
+#include <libcamera/base/log.h>
+#include <libcamera/base/utils.h>
using namespace libcamera;
@@ -35,7 +43,8 @@ enum class _ExifTag {
* data can be obtained using the data() method.
*/
Exif::Exif()
- : valid_(false), data_(nullptr), exifData_(0), size_(0)
+ : valid_(false), data_(nullptr), order_(EXIF_BYTE_ORDER_INTEL),
+ exifData_(0), size_(0)
{
/* Create an ExifMem allocator to construct entries. */
mem_ = exif_mem_new_default();
@@ -59,7 +68,7 @@ Exif::Exif()
* Big-Endian: EXIF_BYTE_ORDER_MOTOROLA
* Little Endian: EXIF_BYTE_ORDER_INTEL
*/
- exif_data_set_byte_order(data_, EXIF_BYTE_ORDER_INTEL);
+ exif_data_set_byte_order(data_, order_);
setString(EXIF_IFD_EXIF, EXIF_TAG_EXIF_VERSION,
EXIF_FORMAT_UNDEFINED, "0231");
@@ -73,8 +82,16 @@ Exif::~Exif()
if (exifData_)
free(exifData_);
- if (data_)
+ if (data_) {
+ /*
+ * Reset thumbnail data to avoid getting double-freed by
+ * libexif. It is owned by the caller (i.e. PostProcessorJpeg).
+ */
+ data_->data = nullptr;
+ data_->size = 0;
+
exif_data_unref(data_);
+ }
if (mem_)
exif_mem_unref(mem_);
@@ -138,13 +155,23 @@ ExifEntry *Exif::createEntry(ExifIfd ifd, ExifTag tag, ExifFormat format,
return entry;
}
+void Exif::setByte(ExifIfd ifd, ExifTag tag, uint8_t item)
+{
+ ExifEntry *entry = createEntry(ifd, tag, EXIF_FORMAT_BYTE, 1, 1);
+ if (!entry)
+ return;
+
+ entry->data[0] = item;
+ exif_entry_unref(entry);
+}
+
void Exif::setShort(ExifIfd ifd, ExifTag tag, uint16_t item)
{
ExifEntry *entry = createEntry(ifd, tag);
if (!entry)
return;
- exif_set_short(entry->data, EXIF_BYTE_ORDER_INTEL, item);
+ exif_set_short(entry->data, order_, item);
exif_entry_unref(entry);
}
@@ -154,31 +181,96 @@ void Exif::setLong(ExifIfd ifd, ExifTag tag, uint32_t item)
if (!entry)
return;
- exif_set_long(entry->data, EXIF_BYTE_ORDER_INTEL, item);
+ exif_set_long(entry->data, order_, item);
exif_entry_unref(entry);
}
void Exif::setRational(ExifIfd ifd, ExifTag tag, ExifRational item)
{
- ExifEntry *entry = createEntry(ifd, tag);
+ setRational(ifd, tag, { &item, 1 });
+}
+
+void Exif::setRational(ExifIfd ifd, ExifTag tag, Span<const ExifRational> items)
+{
+ ExifEntry *entry = createEntry(ifd, tag, EXIF_FORMAT_RATIONAL,
+ items.size(),
+ items.size() * sizeof(ExifRational));
if (!entry)
return;
- exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL, item);
+ for (size_t i = 0; i < items.size(); i++)
+ exif_set_rational(entry->data + i * sizeof(ExifRational),
+ order_, items[i]);
exif_entry_unref(entry);
}
-void Exif::setString(ExifIfd ifd, ExifTag tag, ExifFormat format, const std::string &item)
+static const std::map<Exif::StringEncoding, std::array<uint8_t, 8>> stringEncodingCodes = {
+ { Exif::ASCII, { 0x41, 0x53, 0x43, 0x49, 0x49, 0x00, 0x00, 0x00 } },
+ { Exif::Unicode, { 0x55, 0x4e, 0x49, 0x43, 0x4f, 0x44, 0x45, 0x00 } },
+};
+
+void Exif::setString(ExifIfd ifd, ExifTag tag, ExifFormat format,
+ const std::string &item, StringEncoding encoding)
{
- /* Pad 1 extra byte for null-terminated string in ASCII format. */
- size_t length = format == EXIF_FORMAT_ASCII ?
- item.length() + 1 : item.length();
+ std::string ascii;
+ size_t length;
+ const char *str;
+ std::vector<uint8_t> buf;
+
+ if (format == EXIF_FORMAT_ASCII) {
+ ascii = utils::toAscii(item);
+ str = ascii.c_str();
+
+ /* Pad 1 extra byte to null-terminate the ASCII string. */
+ length = ascii.length() + 1;
+ } else {
+ std::u16string u16str;
+
+ auto encodingString = stringEncodingCodes.find(encoding);
+ if (encodingString != stringEncodingCodes.end()) {
+ buf = {
+ encodingString->second.begin(),
+ encodingString->second.end()
+ };
+ }
+
+ switch (encoding) {
+ case Unicode:
+ u16str = utf8ToUtf16(item);
+
+ buf.resize(8 + u16str.size() * 2);
+ for (size_t i = 0; i < u16str.size(); i++) {
+ if (order_ == EXIF_BYTE_ORDER_INTEL) {
+ buf[8 + 2 * i] = u16str[i] & 0xff;
+ buf[8 + 2 * i + 1] = (u16str[i] >> 8) & 0xff;
+ } else {
+ buf[8 + 2 * i] = (u16str[i] >> 8) & 0xff;
+ buf[8 + 2 * i + 1] = u16str[i] & 0xff;
+ }
+ }
+
+ break;
+
+ case ASCII:
+ case NoEncoding:
+ buf.insert(buf.end(), item.begin(), item.end());
+ break;
+ }
+
+ str = reinterpret_cast<const char *>(buf.data());
+
+ /*
+ * Strings stored in different formats (EXIF_FORMAT_UNDEFINED)
+ * are not null-terminated.
+ */
+ length = buf.size();
+ }
ExifEntry *entry = createEntry(ifd, tag, format, length, length);
if (!entry)
return;
- memcpy(entry->data, item.c_str(), length);
+ memcpy(entry->data, str, length);
exif_entry_unref(entry);
}
@@ -198,7 +290,7 @@ void Exif::setSize(const Size &size)
setLong(EXIF_IFD_EXIF, EXIF_TAG_PIXEL_X_DIMENSION, size.width);
}
-void Exif::setTimestamp(time_t timestamp)
+void Exif::setTimestamp(time_t timestamp, std::chrono::milliseconds msec)
{
struct tm tm;
localtime_r(&timestamp, &tm);
@@ -213,19 +305,107 @@ void Exif::setTimestamp(time_t timestamp)
/* Query and set timezone information if available. */
int r = strftime(str, sizeof(str), "%z", &tm);
- if (r > 0) {
- std::string tz(str);
- tz.insert(3, 1, ':');
- setString(EXIF_IFD_EXIF,
- static_cast<ExifTag>(_ExifTag::OFFSET_TIME),
- EXIF_FORMAT_ASCII, tz);
- setString(EXIF_IFD_EXIF,
- static_cast<ExifTag>(_ExifTag::OFFSET_TIME_ORIGINAL),
- EXIF_FORMAT_ASCII, tz);
- setString(EXIF_IFD_EXIF,
- static_cast<ExifTag>(_ExifTag::OFFSET_TIME_DIGITIZED),
- EXIF_FORMAT_ASCII, tz);
- }
+ if (r <= 0)
+ return;
+
+ std::string tz(str);
+ tz.insert(3, 1, ':');
+ setString(EXIF_IFD_EXIF,
+ static_cast<ExifTag>(_ExifTag::OFFSET_TIME),
+ EXIF_FORMAT_ASCII, tz);
+ setString(EXIF_IFD_EXIF,
+ static_cast<ExifTag>(_ExifTag::OFFSET_TIME_ORIGINAL),
+ EXIF_FORMAT_ASCII, tz);
+ setString(EXIF_IFD_EXIF,
+ static_cast<ExifTag>(_ExifTag::OFFSET_TIME_DIGITIZED),
+ EXIF_FORMAT_ASCII, tz);
+
+ std::stringstream sstr;
+ sstr << std::setfill('0') << std::setw(3) << msec.count();
+ std::string subsec = sstr.str();
+
+ setString(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME,
+ EXIF_FORMAT_ASCII, subsec);
+ setString(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_ORIGINAL,
+ EXIF_FORMAT_ASCII, subsec);
+ setString(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_DIGITIZED,
+ EXIF_FORMAT_ASCII, subsec);
+}
+
+void Exif::setGPSDateTimestamp(time_t timestamp)
+{
+ struct tm tm;
+ gmtime_r(&timestamp, &tm);
+
+ char str[11];
+ strftime(str, sizeof(str), "%Y:%m:%d", &tm);
+ std::string tsStr(str);
+
+ setString(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_DATE_STAMP),
+ EXIF_FORMAT_ASCII, tsStr);
+
+ /* Set GPS_TIME_STAMP */
+ ExifRational ts[] = {
+ { static_cast<ExifLong>(tm.tm_hour), 1 },
+ { static_cast<ExifLong>(tm.tm_min), 1 },
+ { static_cast<ExifLong>(tm.tm_sec), 1 },
+ };
+
+ setRational(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_TIME_STAMP),
+ ts);
+}
+
+std::tuple<int, int, int> Exif::degreesToDMS(double decimalDegrees)
+{
+ int degrees = std::trunc(decimalDegrees);
+ double minutes = std::abs((decimalDegrees - degrees) * 60);
+ double seconds = (minutes - std::trunc(minutes)) * 60;
+
+ return { degrees, std::trunc(minutes), std::round(seconds) };
+}
+
+void Exif::setGPSDMS(ExifIfd ifd, ExifTag tag, int deg, int min, int sec)
+{
+ ExifRational coords[] = {
+ { static_cast<ExifLong>(deg), 1 },
+ { static_cast<ExifLong>(min), 1 },
+ { static_cast<ExifLong>(sec), 1 },
+ };
+
+ setRational(ifd, tag, coords);
+}
+
+/*
+ * \brief Set GPS location (lat, long, alt)
+ * \param[in] coords Pointer to coordinates latitude, longitude, and altitude,
+ * first two in degrees, the third in meters
+ */
+void Exif::setGPSLocation(const double *coords)
+{
+ int deg, min, sec;
+
+ std::tie<int, int, int>(deg, min, sec) = degreesToDMS(coords[0]);
+ setString(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_LATITUDE_REF),
+ EXIF_FORMAT_ASCII, deg >= 0 ? "N" : "S");
+ setGPSDMS(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_LATITUDE),
+ std::abs(deg), min, sec);
+
+ std::tie<int, int, int>(deg, min, sec) = degreesToDMS(coords[1]);
+ setString(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_LONGITUDE_REF),
+ EXIF_FORMAT_ASCII, deg >= 0 ? "E" : "W");
+ setGPSDMS(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_LONGITUDE),
+ std::abs(deg), min, sec);
+
+ setByte(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_ALTITUDE_REF),
+ coords[2] >= 0 ? 0 : 1);
+ setRational(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_ALTITUDE),
+ ExifRational{ static_cast<ExifLong>(std::abs(coords[2])), 1 });
+}
+
+void Exif::setGPSMethod(const std::string &method)
+{
+ setString(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_PROCESSING_METHOD),
+ EXIF_FORMAT_UNDEFINED, method, NoEncoding);
}
void Exif::setOrientation(int orientation)
@@ -237,19 +417,94 @@ void Exif::setOrientation(int orientation)
value = 1;
break;
case 90:
- value = 8;
+ value = 6;
break;
case 180:
value = 3;
break;
case 270:
- value = 6;
+ value = 8;
break;
}
setShort(EXIF_IFD_0, EXIF_TAG_ORIENTATION, value);
}
+/*
+ * The thumbnail data should remain valid until the Exif object is destroyed.
+ * Failing to do so, might result in no thumbnail data being set even after a
+ * call to Exif::setThumbnail().
+ */
+void Exif::setThumbnail(Span<const unsigned char> thumbnail,
+ Compression compression)
+{
+ data_->data = const_cast<unsigned char *>(thumbnail.data());
+ data_->size = thumbnail.size();
+
+ setShort(EXIF_IFD_0, EXIF_TAG_COMPRESSION, compression);
+}
+
+void Exif::setFocalLength(float length)
+{
+ ExifRational rational = { static_cast<ExifLong>(length * 1000), 1000 };
+ setRational(EXIF_IFD_EXIF, EXIF_TAG_FOCAL_LENGTH, rational);
+}
+
+void Exif::setExposureTime(uint64_t nsec)
+{
+ ExifRational rational = { static_cast<ExifLong>(nsec), 1000000000 };
+ setRational(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_TIME, rational);
+}
+
+void Exif::setAperture(float size)
+{
+ ExifRational rational = { static_cast<ExifLong>(size * 10000), 10000 };
+ setRational(EXIF_IFD_EXIF, EXIF_TAG_FNUMBER, rational);
+}
+
+void Exif::setISO(uint16_t iso)
+{
+ setShort(EXIF_IFD_EXIF, EXIF_TAG_ISO_SPEED_RATINGS, iso);
+}
+
+void Exif::setFlash(Flash flash)
+{
+ setShort(EXIF_IFD_EXIF, EXIF_TAG_FLASH, static_cast<ExifShort>(flash));
+}
+
+void Exif::setWhiteBalance(WhiteBalance wb)
+{
+ setShort(EXIF_IFD_EXIF, EXIF_TAG_WHITE_BALANCE, static_cast<ExifShort>(wb));
+}
+
+/**
+ * \brief Convert UTF-8 string to UTF-16 string
+ * \param[in] str String to convert
+ *
+ * \return \a str in UTF-16
+ */
+std::u16string Exif::utf8ToUtf16(const std::string &str)
+{
+ mbstate_t state{};
+ char16_t c16;
+ const char *ptr = str.data();
+ const char *end = ptr + str.size();
+
+ std::u16string ret;
+ while (size_t rc = mbrtoc16(&c16, ptr, end - ptr + 1, &state)) {
+ if (rc == static_cast<size_t>(-2) ||
+ rc == static_cast<size_t>(-1))
+ break;
+
+ ret.push_back(c16);
+
+ if (rc > 0)
+ ptr += rc;
+ }
+
+ return ret;
+}
+
[[nodiscard]] int Exif::generate()
{
if (exifData_) {
diff --git a/src/android/jpeg/exif.h b/src/android/jpeg/exif.h
index f04cefce..23b0e097 100644
--- a/src/android/jpeg/exif.h
+++ b/src/android/jpeg/exif.h
@@ -1,4 +1,4 @@
-/* SPDX-License-Identifier: GPL-2.0-or-later */
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
* Copyright (C) 2020, Google Inc.
*
@@ -7,13 +7,15 @@
#ifndef __ANDROID_JPEG_EXIF_H__
#define __ANDROID_JPEG_EXIF_H__
+#include <chrono>
#include <string>
#include <time.h>
#include <libexif/exif-data.h>
+#include <libcamera/base/span.h>
+
#include <libcamera/geometry.h>
-#include <libcamera/span.h>
class Exif
{
@@ -21,12 +23,57 @@ public:
Exif();
~Exif();
+ enum Compression {
+ None = 1,
+ JPEG = 6,
+ };
+
+ enum Flash {
+ /* bit 0 */
+ Fired = 0x01,
+ /* bits 1 and 2 */
+ StrobeDetected = 0x04,
+ StrobeNotDetected = 0x06,
+ /* bits 3 and 4 */
+ ModeCompulsoryFiring = 0x08,
+ ModeCompulsorySuppression = 0x10,
+ ModeAuto = 0x18,
+ /* bit 5 */
+ FlashNotPresent = 0x20,
+ /* bit 6 */
+ RedEye = 0x40,
+ };
+
+ enum WhiteBalance {
+ Auto = 0,
+ Manual = 1,
+ };
+
+ enum StringEncoding {
+ NoEncoding = 0,
+ ASCII = 1,
+ Unicode = 2,
+ };
+
void setMake(const std::string &make);
void setModel(const std::string &model);
void setOrientation(int orientation);
void setSize(const libcamera::Size &size);
- void setTimestamp(time_t timestamp);
+ void setThumbnail(libcamera::Span<const unsigned char> thumbnail,
+ Compression compression);
+ void setTimestamp(time_t timestamp, std::chrono::milliseconds msec);
+
+ void setGPSDateTimestamp(time_t timestamp);
+ void setGPSLocation(const double *coords);
+ void setGPSMethod(const std::string &method);
+
+ void setFocalLength(float length);
+ void setExposureTime(uint64_t nsec);
+ void setAperture(float size);
+ void setISO(uint16_t iso);
+ void setFlash(Flash flash);
+ void setWhiteBalance(WhiteBalance wb);
libcamera::Span<const uint8_t> data() const { return { exifData_, size_ }; }
[[nodiscard]] int generate();
@@ -36,16 +83,26 @@ private:
ExifEntry *createEntry(ExifIfd ifd, ExifTag tag, ExifFormat format,
unsigned long components, unsigned int size);
+ void setByte(ExifIfd ifd, ExifTag tag, uint8_t item);
void setShort(ExifIfd ifd, ExifTag tag, uint16_t item);
void setLong(ExifIfd ifd, ExifTag tag, uint32_t item);
void setString(ExifIfd ifd, ExifTag tag, ExifFormat format,
- const std::string &item);
+ const std::string &item,
+ StringEncoding encoding = NoEncoding);
void setRational(ExifIfd ifd, ExifTag tag, ExifRational item);
+ void setRational(ExifIfd ifd, ExifTag tag,
+ libcamera::Span<const ExifRational> items);
+
+ std::tuple<int, int, int> degreesToDMS(double decimalDegrees);
+ void setGPSDMS(ExifIfd ifd, ExifTag tag, int deg, int min, int sec);
+
+ std::u16string utf8ToUtf16(const std::string &str);
bool valid_;
ExifData *data_;
ExifMem *mem_;
+ ExifByteOrder order_;
unsigned char *exifData_;
unsigned int size_;
diff --git a/src/android/jpeg/post_processor_jpeg.cpp b/src/android/jpeg/post_processor_jpeg.cpp
new file mode 100644
index 00000000..0e93f365
--- /dev/null
+++ b/src/android/jpeg/post_processor_jpeg.cpp
@@ -0,0 +1,196 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * post_processor_jpeg.cpp - JPEG Post Processor
+ */
+
+#include "post_processor_jpeg.h"
+
+#include <chrono>
+
+#include "../camera_device.h"
+#include "../camera_metadata.h"
+#include "encoder_libjpeg.h"
+#include "exif.h"
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/formats.h>
+
+using namespace libcamera;
+using namespace std::chrono_literals;
+
+LOG_DEFINE_CATEGORY(JPEG)
+
+PostProcessorJpeg::PostProcessorJpeg(CameraDevice *const device)
+ : cameraDevice_(device)
+{
+}
+
+int PostProcessorJpeg::configure(const StreamConfiguration &inCfg,
+ const StreamConfiguration &outCfg)
+{
+ if (inCfg.size != outCfg.size) {
+ LOG(JPEG, Error) << "Mismatch of input and output stream sizes";
+ return -EINVAL;
+ }
+
+ if (outCfg.pixelFormat != formats::MJPEG) {
+ LOG(JPEG, Error) << "Output stream pixel format is not JPEG";
+ return -EINVAL;
+ }
+
+ streamSize_ = outCfg.size;
+
+ thumbnailer_.configure(inCfg.size, inCfg.pixelFormat);
+
+ encoder_ = std::make_unique<EncoderLibJpeg>();
+
+ return encoder_->configure(inCfg);
+}
+
+void PostProcessorJpeg::generateThumbnail(const FrameBuffer &source,
+ const Size &targetSize,
+ unsigned int quality,
+ std::vector<unsigned char> *thumbnail)
+{
+ /* Stores the raw scaled-down thumbnail bytes. */
+ std::vector<unsigned char> rawThumbnail;
+
+ thumbnailer_.createThumbnail(source, targetSize, &rawThumbnail);
+
+ StreamConfiguration thCfg;
+ thCfg.size = targetSize;
+ thCfg.pixelFormat = thumbnailer_.pixelFormat();
+ int ret = thumbnailEncoder_.configure(thCfg);
+
+ if (!rawThumbnail.empty() && !ret) {
+ /*
+ * \todo Avoid value-initialization of all elements of the
+ * vector.
+ */
+ thumbnail->resize(rawThumbnail.size());
+
+ int jpeg_size = thumbnailEncoder_.encode(rawThumbnail,
+ *thumbnail, {}, quality);
+ thumbnail->resize(jpeg_size);
+
+ LOG(JPEG, Debug)
+ << "Thumbnail compress returned "
+ << jpeg_size << " bytes";
+ }
+}
+
+int PostProcessorJpeg::process(const FrameBuffer &source,
+ CameraBuffer *destination,
+ const CameraMetadata &requestMetadata,
+ CameraMetadata *resultMetadata)
+{
+ if (!encoder_)
+ return 0;
+
+ ASSERT(destination->numPlanes() == 1);
+
+ camera_metadata_ro_entry_t entry;
+ int ret;
+
+ /* Set EXIF metadata for various tags. */
+ Exif exif;
+ exif.setMake(cameraDevice_->maker());
+ exif.setModel(cameraDevice_->model());
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_ORIENTATION, &entry);
+
+ const uint32_t jpegOrientation = ret ? *entry.data.i32 : 0;
+ resultMetadata->addEntry(ANDROID_JPEG_ORIENTATION, jpegOrientation);
+ exif.setOrientation(jpegOrientation);
+
+ exif.setSize(streamSize_);
+ /*
+ * We set the frame's EXIF timestamp as the time of encode.
+ * Since the precision we need for EXIF timestamp is only one
+ * second, it is good enough.
+ */
+ exif.setTimestamp(std::time(nullptr), 0ms);
+
+ ret = resultMetadata->getEntry(ANDROID_SENSOR_EXPOSURE_TIME, &entry);
+ exif.setExposureTime(ret ? *entry.data.i64 : 0);
+ ret = requestMetadata.getEntry(ANDROID_LENS_APERTURE, &entry);
+ if (ret)
+ exif.setAperture(*entry.data.f);
+ exif.setISO(100);
+ exif.setFlash(Exif::Flash::FlashNotPresent);
+ exif.setWhiteBalance(Exif::WhiteBalance::Auto);
+
+ exif.setFocalLength(1.0);
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_GPS_TIMESTAMP, &entry);
+ if (ret) {
+ exif.setGPSDateTimestamp(*entry.data.i64);
+ resultMetadata->addEntry(ANDROID_JPEG_GPS_TIMESTAMP,
+ *entry.data.i64);
+ }
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_THUMBNAIL_SIZE, &entry);
+ if (ret) {
+ const int32_t *data = entry.data.i32;
+ Size thumbnailSize = { static_cast<uint32_t>(data[0]),
+ static_cast<uint32_t>(data[1]) };
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_THUMBNAIL_QUALITY, &entry);
+ uint8_t quality = ret ? *entry.data.u8 : 95;
+ resultMetadata->addEntry(ANDROID_JPEG_THUMBNAIL_QUALITY, quality);
+
+ if (thumbnailSize != Size(0, 0)) {
+ std::vector<unsigned char> thumbnail;
+ generateThumbnail(source, thumbnailSize, quality, &thumbnail);
+ if (!thumbnail.empty())
+ exif.setThumbnail(thumbnail, Exif::Compression::JPEG);
+ }
+
+ resultMetadata->addEntry(ANDROID_JPEG_THUMBNAIL_SIZE, data, 2);
+ }
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_GPS_COORDINATES, &entry);
+ if (ret) {
+ exif.setGPSLocation(entry.data.d);
+ resultMetadata->addEntry(ANDROID_JPEG_GPS_COORDINATES,
+ entry.data.d, 3);
+ }
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_GPS_PROCESSING_METHOD, &entry);
+ if (ret) {
+ std::string method(entry.data.u8, entry.data.u8 + entry.count);
+ exif.setGPSMethod(method);
+ resultMetadata->addEntry(ANDROID_JPEG_GPS_PROCESSING_METHOD,
+ entry.data.u8, entry.count);
+ }
+
+ if (exif.generate() != 0)
+ LOG(JPEG, Error) << "Failed to generate valid EXIF data";
+
+ ret = requestMetadata.getEntry(ANDROID_JPEG_QUALITY, &entry);
+ const uint8_t quality = ret ? *entry.data.u8 : 95;
+ resultMetadata->addEntry(ANDROID_JPEG_QUALITY, quality);
+
+ int jpeg_size = encoder_->encode(source, destination->plane(0),
+ exif.data(), quality);
+ if (jpeg_size < 0) {
+ LOG(JPEG, Error) << "Failed to encode stream image";
+ return jpeg_size;
+ }
+
+ /* Fill in the JPEG blob header. */
+ uint8_t *resultPtr = destination->plane(0).data()
+ + destination->jpegBufferSize(cameraDevice_->maxJpegBufferSize())
+ - sizeof(struct camera3_jpeg_blob);
+ auto *blob = reinterpret_cast<struct camera3_jpeg_blob *>(resultPtr);
+ blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
+ blob->jpeg_size = jpeg_size;
+
+ /* Update the JPEG result Metadata. */
+ resultMetadata->addEntry(ANDROID_JPEG_SIZE, jpeg_size);
+
+ return 0;
+}
diff --git a/src/android/jpeg/post_processor_jpeg.h b/src/android/jpeg/post_processor_jpeg.h
new file mode 100644
index 00000000..5c399be9
--- /dev/null
+++ b/src/android/jpeg/post_processor_jpeg.h
@@ -0,0 +1,45 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * post_processor_jpeg.h - JPEG Post Processor
+ */
+#ifndef __ANDROID_POST_PROCESSOR_JPEG_H__
+#define __ANDROID_POST_PROCESSOR_JPEG_H__
+
+#include "../post_processor.h"
+#include "encoder_libjpeg.h"
+#include "thumbnailer.h"
+
+#include <libcamera/geometry.h>
+
+#include "libcamera/internal/framebuffer.h"
+
+class CameraDevice;
+
+class PostProcessorJpeg : public PostProcessor
+{
+public:
+ PostProcessorJpeg(CameraDevice *const device);
+
+ int configure(const libcamera::StreamConfiguration &incfg,
+ const libcamera::StreamConfiguration &outcfg) override;
+ int process(const libcamera::FrameBuffer &source,
+ CameraBuffer *destination,
+ const CameraMetadata &requestMetadata,
+ CameraMetadata *resultMetadata) override;
+
+private:
+ void generateThumbnail(const libcamera::FrameBuffer &source,
+ const libcamera::Size &targetSize,
+ unsigned int quality,
+ std::vector<unsigned char> *thumbnail);
+
+ CameraDevice *const cameraDevice_;
+ std::unique_ptr<Encoder> encoder_;
+ libcamera::Size streamSize_;
+ EncoderLibJpeg thumbnailEncoder_;
+ Thumbnailer thumbnailer_;
+};
+
+#endif /* __ANDROID_POST_PROCESSOR_JPEG_H__ */
diff --git a/src/android/jpeg/thumbnailer.cpp b/src/android/jpeg/thumbnailer.cpp
new file mode 100644
index 00000000..5cb00744
--- /dev/null
+++ b/src/android/jpeg/thumbnailer.cpp
@@ -0,0 +1,93 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * thumbnailer.cpp - Simple image thumbnailer
+ */
+
+#include "thumbnailer.h"
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/formats.h>
+
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(Thumbnailer)
+
+Thumbnailer::Thumbnailer()
+ : valid_(false)
+{
+}
+
+void Thumbnailer::configure(const Size &sourceSize, PixelFormat pixelFormat)
+{
+ sourceSize_ = sourceSize;
+ pixelFormat_ = pixelFormat;
+
+ if (pixelFormat_ != formats::NV12) {
+ LOG(Thumbnailer, Error)
+ << "Failed to configure: Pixel Format "
+ << pixelFormat_.toString() << " unsupported.";
+ return;
+ }
+
+ valid_ = true;
+}
+
+void Thumbnailer::createThumbnail(const FrameBuffer &source,
+ const Size &targetSize,
+ std::vector<unsigned char> *destination)
+{
+ MappedFrameBuffer frame(&source, PROT_READ);
+ if (!frame.isValid()) {
+ LOG(Thumbnailer, Error)
+ << "Failed to map FrameBuffer : "
+ << strerror(frame.error());
+ return;
+ }
+
+ if (!valid_) {
+ LOG(Thumbnailer, Error) << "Config is unconfigured or invalid.";
+ return;
+ }
+
+ const unsigned int sw = sourceSize_.width;
+ const unsigned int sh = sourceSize_.height;
+ const unsigned int tw = targetSize.width;
+ const unsigned int th = targetSize.height;
+
+ ASSERT(tw % 2 == 0 && th % 2 == 0);
+
+ /* Image scaling block implementing nearest-neighbour algorithm. */
+ unsigned char *src = static_cast<unsigned char *>(frame.maps()[0].data());
+ unsigned char *srcC = src + sh * sw;
+ unsigned char *srcCb, *srcCr;
+ unsigned char *dstY, *srcY;
+
+ size_t dstSize = (th * tw) + ((th / 2) * tw);
+ destination->resize(dstSize);
+ unsigned char *dst = destination->data();
+ unsigned char *dstC = dst + th * tw;
+
+ for (unsigned int y = 0; y < th; y += 2) {
+ unsigned int sourceY = (sh * y + th / 2) / th;
+
+ dstY = dst + y * tw;
+ srcY = src + sw * sourceY;
+ srcCb = srcC + (sourceY / 2) * sw + 0;
+ srcCr = srcC + (sourceY / 2) * sw + 1;
+
+ for (unsigned int x = 0; x < tw; x += 2) {
+ unsigned int sourceX = (sw * x + tw / 2) / tw;
+
+ dstY[x] = srcY[sourceX];
+ dstY[tw + x] = srcY[sw + sourceX];
+ dstY[x + 1] = srcY[sourceX + 1];
+ dstY[tw + x + 1] = srcY[sw + sourceX + 1];
+
+ dstC[(y / 2) * tw + x + 0] = srcCb[(sourceX / 2) * 2];
+ dstC[(y / 2) * tw + x + 1] = srcCr[(sourceX / 2) * 2];
+ }
+ }
+}
diff --git a/src/android/jpeg/thumbnailer.h b/src/android/jpeg/thumbnailer.h
new file mode 100644
index 00000000..68cbf743
--- /dev/null
+++ b/src/android/jpeg/thumbnailer.h
@@ -0,0 +1,34 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * thumbnailer.h - Simple image thumbnailer
+ */
+#ifndef __ANDROID_JPEG_THUMBNAILER_H__
+#define __ANDROID_JPEG_THUMBNAILER_H__
+
+#include <libcamera/geometry.h>
+
+#include "libcamera/internal/formats.h"
+#include "libcamera/internal/framebuffer.h"
+
+class Thumbnailer
+{
+public:
+ Thumbnailer();
+
+ void configure(const libcamera::Size &sourceSize,
+ libcamera::PixelFormat pixelFormat);
+ void createThumbnail(const libcamera::FrameBuffer &source,
+ const libcamera::Size &targetSize,
+ std::vector<unsigned char> *dest);
+ const libcamera::PixelFormat &pixelFormat() const { return pixelFormat_; }
+
+private:
+ libcamera::PixelFormat pixelFormat_;
+ libcamera::Size sourceSize_;
+
+ bool valid_;
+};
+
+#endif /* __ANDROID_JPEG_THUMBNAILER_H__ */
diff --git a/src/android/meson.build b/src/android/meson.build
index 0293c203..7d1e7e85 100644
--- a/src/android/meson.build
+++ b/src/android/meson.build
@@ -3,6 +3,8 @@
android_deps = [
dependency('libexif', required : get_option('android')),
dependency('libjpeg', required : get_option('android')),
+ dependency('yaml-0.1', required : get_option('android')),
+ libcamera_private,
]
android_enabled = true
@@ -14,20 +16,63 @@ foreach dep : android_deps
endif
endforeach
+libyuv_dep = dependency('libyuv', required : false)
+
+# Fallback to a subproject if libyuv isn't found, as it's typically not
+# provided by distributions.
+if not libyuv_dep.found()
+ cmake = import('cmake')
+
+ libyuv_vars = cmake.subproject_options()
+ libyuv_vars.add_cmake_defines({'CMAKE_POSITION_INDEPENDENT_CODE': 'ON'})
+ libyuv_vars.set_override_option('cpp_std', 'c++17')
+ libyuv_vars.append_compile_args('cpp',
+ '-Wno-sign-compare',
+ '-Wno-unused-variable',
+ '-Wno-unused-parameter')
+ libyuv_vars.append_link_args('-ljpeg')
+ libyuv = cmake.subproject('libyuv', options : libyuv_vars)
+ libyuv_dep = libyuv.dependency('yuv')
+endif
+
+android_deps += [libyuv_dep]
+
android_hal_sources = files([
'camera3_hal.cpp',
- 'camera_hal_manager.cpp',
+ 'camera_capabilities.cpp',
'camera_device.cpp',
+ 'camera_hal_config.cpp',
+ 'camera_hal_manager.cpp',
'camera_metadata.cpp',
'camera_ops.cpp',
+ 'camera_stream.cpp',
+ 'camera_worker.cpp',
'jpeg/encoder_libjpeg.cpp',
'jpeg/exif.cpp',
+ 'jpeg/post_processor_jpeg.cpp',
+ 'jpeg/thumbnailer.cpp',
+ 'yuv/post_processor_yuv.cpp'
])
+android_cpp_args = []
+
+subdir('cros')
+subdir('mm')
+
android_camera_metadata_sources = files([
'metadata/camera_metadata.c',
])
android_camera_metadata = static_library('camera_metadata',
android_camera_metadata_sources,
+ c_args : '-Wno-shadow',
include_directories : android_includes)
+
+libcamera_hal = shared_library('libcamera-hal',
+ android_hal_sources,
+ name_prefix : '',
+ link_with : android_camera_metadata,
+ install : true,
+ cpp_args : android_cpp_args,
+ include_directories : android_includes,
+ dependencies : android_deps)
diff --git a/src/android/mm/cros_camera_buffer.cpp b/src/android/mm/cros_camera_buffer.cpp
new file mode 100644
index 00000000..bb55b95e
--- /dev/null
+++ b/src/android/mm/cros_camera_buffer.cpp
@@ -0,0 +1,134 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * cros_camera_buffer.cpp - Chromium OS buffer backend using CameraBufferManager
+ */
+
+#include "../camera_buffer.h"
+
+#include <libcamera/base/log.h>
+
+#include "cros-camera/camera_buffer_manager.h"
+
+using namespace libcamera;
+
+LOG_DECLARE_CATEGORY(HAL)
+
+class CameraBuffer::Private : public Extensible::Private
+{
+ LIBCAMERA_DECLARE_PUBLIC(CameraBuffer)
+
+public:
+ Private(CameraBuffer *cameraBuffer,
+ buffer_handle_t camera3Buffer, int flags);
+ ~Private();
+
+ bool isValid() const { return valid_; }
+
+ unsigned int numPlanes() const;
+
+ Span<uint8_t> plane(unsigned int plane);
+
+ size_t jpegBufferSize(size_t maxJpegBufferSize) const;
+
+private:
+ cros::CameraBufferManager *bufferManager_;
+ buffer_handle_t handle_;
+ unsigned int numPlanes_;
+ bool valid_;
+ bool registered_;
+ union {
+ void *addr;
+ android_ycbcr ycbcr;
+ } mem;
+};
+
+CameraBuffer::Private::Private(CameraBuffer *cameraBuffer,
+ buffer_handle_t camera3Buffer,
+ [[maybe_unused]] int flags)
+ : Extensible::Private(cameraBuffer), handle_(camera3Buffer),
+ numPlanes_(0), valid_(false), registered_(false)
+{
+ bufferManager_ = cros::CameraBufferManager::GetInstance();
+
+ int ret = bufferManager_->Register(camera3Buffer);
+ if (ret) {
+ LOG(HAL, Error) << "Failed registering a buffer: " << ret;
+ return;
+ }
+
+ registered_ = true;
+ numPlanes_ = bufferManager_->GetNumPlanes(camera3Buffer);
+ switch (numPlanes_) {
+ case 1: {
+ ret = bufferManager_->Lock(handle_, 0, 0, 0, 0, 0, &mem.addr);
+ if (ret) {
+ LOG(HAL, Error) << "Single plane buffer mapping failed";
+ return;
+ }
+ break;
+ }
+ case 2:
+ case 3: {
+ ret = bufferManager_->LockYCbCr(handle_, 0, 0, 0, 0, 0,
+ &mem.ycbcr);
+ if (ret) {
+ LOG(HAL, Error) << "YCbCr buffer mapping failed";
+ return;
+ }
+ break;
+ }
+ default:
+ LOG(HAL, Error) << "Invalid number of planes: " << numPlanes_;
+ return;
+ }
+
+ valid_ = true;
+}
+
+CameraBuffer::Private::~Private()
+{
+ if (valid_)
+ bufferManager_->Unlock(handle_);
+ if (registered_)
+ bufferManager_->Deregister(handle_);
+}
+
+unsigned int CameraBuffer::Private::numPlanes() const
+{
+ return bufferManager_->GetNumPlanes(handle_);
+}
+
+Span<uint8_t> CameraBuffer::Private::plane(unsigned int plane)
+{
+ void *addr;
+
+ switch (numPlanes()) {
+ case 1:
+ addr = mem.addr;
+ break;
+ default:
+ switch (plane) {
+ case 0:
+ addr = mem.ycbcr.y;
+ break;
+ case 1:
+ addr = mem.ycbcr.cb;
+ break;
+ case 2:
+ addr = mem.ycbcr.cr;
+ break;
+ }
+ }
+
+ return { static_cast<uint8_t *>(addr),
+ bufferManager_->GetPlaneSize(handle_, plane) };
+}
+
+size_t CameraBuffer::Private::jpegBufferSize([[maybe_unused]] size_t maxJpegBufferSize) const
+{
+ return bufferManager_->GetPlaneSize(handle_, 0);
+}
+
+PUBLIC_CAMERA_BUFFER_IMPLEMENTATION
diff --git a/src/android/mm/generic_camera_buffer.cpp b/src/android/mm/generic_camera_buffer.cpp
new file mode 100644
index 00000000..166be36e
--- /dev/null
+++ b/src/android/mm/generic_camera_buffer.cpp
@@ -0,0 +1,91 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * generic_camera_buffer.cpp - Generic Android frame buffer backend
+ */
+
+#include "../camera_buffer.h"
+
+#include <unistd.h>
+
+#include <libcamera/base/log.h>
+
+#include "libcamera/internal/framebuffer.h"
+
+using namespace libcamera;
+
+LOG_DECLARE_CATEGORY(HAL)
+
+class CameraBuffer::Private : public Extensible::Private,
+ public libcamera::MappedBuffer
+{
+ LIBCAMERA_DECLARE_PUBLIC(CameraBuffer)
+
+public:
+ Private(CameraBuffer *cameraBuffer,
+ buffer_handle_t camera3Buffer, int flags);
+ ~Private();
+
+ unsigned int numPlanes() const;
+
+ Span<uint8_t> plane(unsigned int plane);
+
+ size_t jpegBufferSize(size_t maxJpegBufferSize) const;
+};
+
+CameraBuffer::Private::Private(CameraBuffer *cameraBuffer,
+ buffer_handle_t camera3Buffer, int flags)
+ : Extensible::Private(cameraBuffer)
+{
+ maps_.reserve(camera3Buffer->numFds);
+ error_ = 0;
+
+ for (int i = 0; i < camera3Buffer->numFds; i++) {
+ if (camera3Buffer->data[i] == -1)
+ continue;
+
+ off_t length = lseek(camera3Buffer->data[i], 0, SEEK_END);
+ if (length < 0) {
+ error_ = -errno;
+ LOG(HAL, Error) << "Failed to query plane length";
+ break;
+ }
+
+ void *address = mmap(nullptr, length, flags, MAP_SHARED,
+ camera3Buffer->data[i], 0);
+ if (address == MAP_FAILED) {
+ error_ = -errno;
+ LOG(HAL, Error) << "Failed to mmap plane";
+ break;
+ }
+
+ maps_.emplace_back(static_cast<uint8_t *>(address),
+ static_cast<size_t>(length));
+ }
+}
+
+CameraBuffer::Private::~Private()
+{
+}
+
+unsigned int CameraBuffer::Private::numPlanes() const
+{
+ return maps_.size();
+}
+
+Span<uint8_t> CameraBuffer::Private::plane(unsigned int plane)
+{
+ if (plane >= maps_.size())
+ return {};
+
+ return maps_[plane];
+}
+
+size_t CameraBuffer::Private::jpegBufferSize(size_t maxJpegBufferSize) const
+{
+ return std::min<unsigned int>(maps_[0].size(),
+ maxJpegBufferSize);
+}
+
+PUBLIC_CAMERA_BUFFER_IMPLEMENTATION
diff --git a/src/android/mm/meson.build b/src/android/mm/meson.build
new file mode 100644
index 00000000..eeb5cc2e
--- /dev/null
+++ b/src/android/mm/meson.build
@@ -0,0 +1,9 @@
+# SPDX-License-Identifier: CC0-1.0
+
+platform = get_option('android_platform')
+if platform == 'generic'
+ android_hal_sources += files(['generic_camera_buffer.cpp'])
+elif platform == 'cros'
+ android_hal_sources += files(['cros_camera_buffer.cpp'])
+ android_deps += [dependency('libcros_camera')]
+endif
diff --git a/src/android/post_processor.h b/src/android/post_processor.h
new file mode 100644
index 00000000..689f85d9
--- /dev/null
+++ b/src/android/post_processor.h
@@ -0,0 +1,32 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2020, Google Inc.
+ *
+ * post_processor.h - CameraStream Post Processing Interface
+ */
+#ifndef __ANDROID_POST_PROCESSOR_H__
+#define __ANDROID_POST_PROCESSOR_H__
+
+#include <libcamera/framebuffer.h>
+#include <libcamera/stream.h>
+
+#include "libcamera/internal/framebuffer.h"
+
+#include "camera_buffer.h"
+
+class CameraMetadata;
+
+class PostProcessor
+{
+public:
+ virtual ~PostProcessor() = default;
+
+ virtual int configure(const libcamera::StreamConfiguration &inCfg,
+ const libcamera::StreamConfiguration &outCfg) = 0;
+ virtual int process(const libcamera::FrameBuffer &source,
+ CameraBuffer *destination,
+ const CameraMetadata &requestMetadata,
+ CameraMetadata *resultMetadata) = 0;
+};
+
+#endif /* __ANDROID_POST_PROCESSOR_H__ */
diff --git a/src/android/yuv/post_processor_yuv.cpp b/src/android/yuv/post_processor_yuv.cpp
new file mode 100644
index 00000000..772e805b
--- /dev/null
+++ b/src/android/yuv/post_processor_yuv.cpp
@@ -0,0 +1,143 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * post_processor_yuv.cpp - Post Processor using libyuv
+ */
+
+#include "post_processor_yuv.h"
+
+#include <libyuv/scale.h>
+
+#include <libcamera/base/log.h>
+
+#include <libcamera/formats.h>
+#include <libcamera/geometry.h>
+#include <libcamera/pixel_format.h>
+
+#include "libcamera/internal/formats.h"
+
+using namespace libcamera;
+
+LOG_DEFINE_CATEGORY(YUV)
+
+int PostProcessorYuv::configure(const StreamConfiguration &inCfg,
+ const StreamConfiguration &outCfg)
+{
+ if (inCfg.pixelFormat != outCfg.pixelFormat) {
+ LOG(YUV, Error) << "Pixel format conversion is not supported"
+ << " (from " << inCfg.pixelFormat.toString()
+ << " to " << outCfg.pixelFormat.toString() << ")";
+ return -EINVAL;
+ }
+
+ if (inCfg.size < outCfg.size) {
+ LOG(YUV, Error) << "Up-scaling is not supported"
+ << " (from " << inCfg.size.toString()
+ << " to " << outCfg.size.toString() << ")";
+ return -EINVAL;
+ }
+
+ if (inCfg.pixelFormat != formats::NV12) {
+ LOG(YUV, Error) << "Unsupported format " << inCfg.pixelFormat
+ << " (only NV12 is supported)";
+ return -EINVAL;
+ }
+
+ calculateLengths(inCfg, outCfg);
+ return 0;
+}
+
+int PostProcessorYuv::process(const FrameBuffer &source,
+ CameraBuffer *destination,
+ [[maybe_unused]] const CameraMetadata &requestMetadata,
+ [[maybe_unused]] CameraMetadata *metadata)
+{
+ if (!isValidBuffers(source, *destination))
+ return -EINVAL;
+
+ const MappedFrameBuffer sourceMapped(&source, PROT_READ);
+ if (!sourceMapped.isValid()) {
+ LOG(YUV, Error) << "Failed to mmap camera frame buffer";
+ return -EINVAL;
+ }
+
+ int ret = libyuv::NV12Scale(sourceMapped.maps()[0].data(),
+ sourceStride_[0],
+ sourceMapped.maps()[1].data(),
+ sourceStride_[1],
+ sourceSize_.width, sourceSize_.height,
+ destination->plane(0).data(),
+ destinationStride_[0],
+ destination->plane(1).data(),
+ destinationStride_[1],
+ destinationSize_.width,
+ destinationSize_.height,
+ libyuv::FilterMode::kFilterBilinear);
+ if (ret) {
+ LOG(YUV, Error) << "Failed NV12 scaling: " << ret;
+ return -EINVAL;
+ }
+
+ return 0;
+}
+
+bool PostProcessorYuv::isValidBuffers(const FrameBuffer &source,
+ const CameraBuffer &destination) const
+{
+ if (source.planes().size() != 2) {
+ LOG(YUV, Error) << "Invalid number of source planes: "
+ << source.planes().size();
+ return false;
+ }
+ if (destination.numPlanes() != 2) {
+ LOG(YUV, Error) << "Invalid number of destination planes: "
+ << destination.numPlanes();
+ return false;
+ }
+
+ if (source.planes()[0].length < sourceLength_[0] ||
+ source.planes()[1].length < sourceLength_[1]) {
+ LOG(YUV, Error)
+ << "The source planes lengths are too small, actual size: {"
+ << source.planes()[0].length << ", "
+ << source.planes()[1].length
+ << "}, expected size: {"
+ << sourceLength_[0] << ", "
+ << sourceLength_[1] << "}";
+ return false;
+ }
+ if (destination.plane(0).size() < destinationLength_[0] ||
+ destination.plane(1).size() < destinationLength_[1]) {
+ LOG(YUV, Error)
+ << "The destination planes lengths are too small, actual size: {"
+ << destination.plane(0).size() << ", "
+ << destination.plane(1).size()
+ << "}, expected size: {"
+ << sourceLength_[0] << ", "
+ << sourceLength_[1] << "}";
+ return false;
+ }
+
+ return true;
+}
+
+void PostProcessorYuv::calculateLengths(const StreamConfiguration &inCfg,
+ const StreamConfiguration &outCfg)
+{
+ sourceSize_ = inCfg.size;
+ destinationSize_ = outCfg.size;
+
+ const PixelFormatInfo &nv12Info = PixelFormatInfo::info(formats::NV12);
+ for (unsigned int i = 0; i < 2; i++) {
+ sourceStride_[i] = inCfg.stride;
+ destinationStride_[i] = nv12Info.stride(destinationSize_.width, i, 1);
+
+ const unsigned int vertSubSample =
+ nv12Info.planes[i].verticalSubSampling;
+ sourceLength_[i] = sourceStride_[i] *
+ ((sourceSize_.height + vertSubSample - 1) / vertSubSample);
+ destinationLength_[i] = destinationStride_[i] *
+ ((destinationSize_.height + vertSubSample - 1) / vertSubSample);
+ }
+}
diff --git a/src/android/yuv/post_processor_yuv.h b/src/android/yuv/post_processor_yuv.h
new file mode 100644
index 00000000..f8b1ba23
--- /dev/null
+++ b/src/android/yuv/post_processor_yuv.h
@@ -0,0 +1,42 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2021, Google Inc.
+ *
+ * post_processor_yuv.h - Post Processor using libyuv
+ */
+#ifndef __ANDROID_POST_PROCESSOR_YUV_H__
+#define __ANDROID_POST_PROCESSOR_YUV_H__
+
+#include "../post_processor.h"
+
+#include <libcamera/geometry.h>
+
+class CameraDevice;
+
+class PostProcessorYuv : public PostProcessor
+{
+public:
+ PostProcessorYuv() = default;
+
+ int configure(const libcamera::StreamConfiguration &incfg,
+ const libcamera::StreamConfiguration &outcfg) override;
+ int process(const libcamera::FrameBuffer &source,
+ CameraBuffer *destination,
+ const CameraMetadata &requestMetadata,
+ CameraMetadata *metadata) override;
+
+private:
+ bool isValidBuffers(const libcamera::FrameBuffer &source,
+ const CameraBuffer &destination) const;
+ void calculateLengths(const libcamera::StreamConfiguration &inCfg,
+ const libcamera::StreamConfiguration &outCfg);
+
+ libcamera::Size sourceSize_;
+ libcamera::Size destinationSize_;
+ unsigned int sourceLength_[2] = {};
+ unsigned int destinationLength_[2] = {};
+ unsigned int sourceStride_[2] = {};
+ unsigned int destinationStride_[2] = {};
+};
+
+#endif /* __ANDROID_POST_PROCESSOR_YUV_H__ */