diff options
Diffstat (limited to 'src/android/camera_device.cpp')
-rw-r--r-- | src/android/camera_device.cpp | 2074 |
1 files changed, 859 insertions, 1215 deletions
diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp index 751699cd..678cde23 100644 --- a/src/android/camera_device.cpp +++ b/src/android/camera_device.cpp @@ -6,174 +6,212 @@ */ #include "camera_device.h" +#include "camera_hal_config.h" #include "camera_ops.h" +#include "post_processor.h" +#include <algorithm> +#include <fstream> #include <sys/mman.h> -#include <tuple> +#include <unistd.h> #include <vector> +#include <libcamera/base/log.h> +#include <libcamera/base/thread.h> +#include <libcamera/base/utils.h> + +#include <libcamera/control_ids.h> #include <libcamera/controls.h> #include <libcamera/formats.h> #include <libcamera/property_ids.h> -#include "libcamera/internal/formats.h" -#include "libcamera/internal/log.h" -#include "libcamera/internal/utils.h" - -#include "camera_metadata.h" #include "system/graphics.h" -#include "jpeg/encoder_libjpeg.h" -#include "jpeg/exif.h" - using namespace libcamera; +LOG_DECLARE_CATEGORY(HAL) + namespace { /* - * \var camera3Resolutions - * \brief The list of image resolutions defined as mandatory to be supported by - * the Android Camera3 specification + * \struct Camera3StreamConfig + * \brief Data to store StreamConfiguration associated with camera3_stream(s) + * \var streams List of the pairs of a stream requested by Android HAL client + * and CameraStream::Type associated with the stream + * \var config StreamConfiguration for streams */ -const std::vector<Size> camera3Resolutions = { - { 320, 240 }, - { 640, 480 }, - { 1280, 720 }, - { 1920, 1080 } -}; +struct Camera3StreamConfig { + struct Camera3Stream { + camera3_stream_t *stream; + CameraStream::Type type; + }; -/* - * \struct Camera3Format - * \brief Data associated with an Android format identifier - * \var libcameraFormats List of libcamera pixel formats compatible with the - * Android format - * \var name The human-readable representation of the Android format code - */ -struct Camera3Format { - std::vector<PixelFormat> libcameraFormats; - bool mandatory; - const char *name; + std::vector<Camera3Stream> streams; + StreamConfiguration config; }; /* - * \var camera3FormatsMap - * \brief Associate Android format code with ancillary data + * Reorder the configurations so that libcamera::Camera can accept them as much + * as possible. The sort rule is as follows. + * 1.) The configuration for NV12 request whose resolution is the largest. + * 2.) The configuration for JPEG request. + * 3.) Others. Larger resolutions and different formats are put earlier. */ -const std::map<int, const Camera3Format> camera3FormatsMap = { - { - HAL_PIXEL_FORMAT_BLOB, { - { formats::MJPEG }, - true, - "BLOB" - } - }, { - HAL_PIXEL_FORMAT_YCbCr_420_888, { - { formats::NV12, formats::NV21 }, - true, - "YCbCr_420_888" +void sortCamera3StreamConfigs(std::vector<Camera3StreamConfig> &unsortedConfigs, + const camera3_stream_t *jpegStream) +{ + const Camera3StreamConfig *jpegConfig = nullptr; + + std::map<PixelFormat, std::vector<const Camera3StreamConfig *>> formatToConfigs; + for (const auto &streamConfig : unsortedConfigs) { + if (jpegStream && !jpegConfig) { + const auto &streams = streamConfig.streams; + if (std::find_if(streams.begin(), streams.end(), + [jpegStream](const auto &stream) { + return stream.stream == jpegStream; + }) != streams.end()) { + jpegConfig = &streamConfig; + continue; + } } - }, { + formatToConfigs[streamConfig.config.pixelFormat].push_back(&streamConfig); + } + + if (jpegStream && !jpegConfig) + LOG(HAL, Fatal) << "No Camera3StreamConfig is found for JPEG"; + + for (auto &fmt : formatToConfigs) { + auto &streamConfigs = fmt.second; + + /* Sorted by resolution. Smaller is put first. */ + std::sort(streamConfigs.begin(), streamConfigs.end(), + [](const auto *streamConfigA, const auto *streamConfigB) { + const Size &sizeA = streamConfigA->config.size; + const Size &sizeB = streamConfigB->config.size; + return sizeA < sizeB; + }); + } + + std::vector<Camera3StreamConfig> sortedConfigs; + sortedConfigs.reserve(unsortedConfigs.size()); + + /* + * NV12 is the most prioritized format. Put the configuration with NV12 + * and the largest resolution first. + */ + const auto nv12It = formatToConfigs.find(formats::NV12); + if (nv12It != formatToConfigs.end()) { + auto &nv12Configs = nv12It->second; + const Camera3StreamConfig *nv12Largest = nv12Configs.back(); + /* - * \todo Translate IMPLEMENTATION_DEFINED inspecting the gralloc - * usage flag. For now, copy the YCbCr_420 configuration. + * If JPEG will be created from NV12 and the size is larger than + * the largest NV12 configurations, then put the NV12 + * configuration for JPEG first. */ - HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, { - { formats::NV12, formats::NV21 }, - true, - "IMPLEMENTATION_DEFINED" - } - }, { - HAL_PIXEL_FORMAT_RAW10, { - { - formats::SBGGR10_CSI2P, - formats::SGBRG10_CSI2P, - formats::SGRBG10_CSI2P, - formats::SRGGB10_CSI2P - }, - false, - "RAW10" - } - }, { - HAL_PIXEL_FORMAT_RAW12, { - { - formats::SBGGR12_CSI2P, - formats::SGBRG12_CSI2P, - formats::SGRBG12_CSI2P, - formats::SRGGB12_CSI2P - }, - false, - "RAW12" - } - }, { - HAL_PIXEL_FORMAT_RAW16, { - { - formats::SBGGR16, - formats::SGBRG16, - formats::SGRBG16, - formats::SRGGB16 - }, - false, - "RAW16" + if (jpegConfig && jpegConfig->config.pixelFormat == formats::NV12) { + const Size &nv12SizeForJpeg = jpegConfig->config.size; + const Size &nv12LargestSize = nv12Largest->config.size; + + if (nv12LargestSize < nv12SizeForJpeg) { + LOG(HAL, Debug) << "Insert " << jpegConfig->config.toString(); + sortedConfigs.push_back(std::move(*jpegConfig)); + jpegConfig = nullptr; + } } - }, { - HAL_PIXEL_FORMAT_RAW_OPAQUE, { - { - formats::SBGGR10_IPU3, - formats::SGBRG10_IPU3, - formats::SGRBG10_IPU3, - formats::SRGGB10_IPU3 - }, - false, - "RAW_OPAQUE" + + LOG(HAL, Debug) << "Insert " << nv12Largest->config.toString(); + sortedConfigs.push_back(*nv12Largest); + nv12Configs.pop_back(); + + if (nv12Configs.empty()) + formatToConfigs.erase(nv12It); + } + + /* If the configuration for JPEG is there, then put it. */ + if (jpegConfig) { + LOG(HAL, Debug) << "Insert " << jpegConfig->config.toString(); + sortedConfigs.push_back(std::move(*jpegConfig)); + jpegConfig = nullptr; + } + + /* + * Put configurations with different formats and larger resolutions + * earlier. + */ + while (!formatToConfigs.empty()) { + for (auto it = formatToConfigs.begin(); it != formatToConfigs.end();) { + auto &configs = it->second; + LOG(HAL, Debug) << "Insert " << configs.back()->config.toString(); + sortedConfigs.push_back(*configs.back()); + configs.pop_back(); + + if (configs.empty()) + it = formatToConfigs.erase(it); + else + it++; } - }, -}; + } -} /* namespace */ + ASSERT(sortedConfigs.size() == unsortedConfigs.size()); -LOG_DECLARE_CATEGORY(HAL); + unsortedConfigs = sortedConfigs; +} -class MappedCamera3Buffer : public MappedBuffer +const char *rotationToString(int rotation) { -public: - MappedCamera3Buffer(const buffer_handle_t camera3buffer, int flags); -}; + switch (rotation) { + case CAMERA3_STREAM_ROTATION_0: + return "0"; + case CAMERA3_STREAM_ROTATION_90: + return "90"; + case CAMERA3_STREAM_ROTATION_180: + return "180"; + case CAMERA3_STREAM_ROTATION_270: + return "270"; + } + return "INVALID"; +} -MappedCamera3Buffer::MappedCamera3Buffer(const buffer_handle_t camera3buffer, - int flags) +#if defined(OS_CHROMEOS) +/* + * Check whether the crop_rotate_scale_degrees values for all streams in + * the list are valid according to the Chrome OS camera HAL API. + */ +bool validateCropRotate(const camera3_stream_configuration_t &streamList) { - maps_.reserve(camera3buffer->numFds); - error_ = 0; - - for (int i = 0; i < camera3buffer->numFds; i++) { - if (camera3buffer->data[i] == -1) - continue; - - off_t length = lseek(camera3buffer->data[i], 0, SEEK_END); - if (length < 0) { - error_ = -errno; - LOG(HAL, Error) << "Failed to query plane length"; + ASSERT(streamList.num_streams > 0); + const int cropRotateScaleDegrees = + streamList.streams[0]->crop_rotate_scale_degrees; + for (unsigned int i = 0; i < streamList.num_streams; ++i) { + const camera3_stream_t &stream = *streamList.streams[i]; + + switch (stream.crop_rotate_scale_degrees) { + case CAMERA3_STREAM_ROTATION_0: + case CAMERA3_STREAM_ROTATION_90: + case CAMERA3_STREAM_ROTATION_270: break; - } - void *address = mmap(nullptr, length, flags, MAP_SHARED, - camera3buffer->data[i], 0); - if (address == MAP_FAILED) { - error_ = -errno; - LOG(HAL, Error) << "Failed to mmap plane"; - break; + /* 180° rotation is specified by Chrome OS as invalid. */ + case CAMERA3_STREAM_ROTATION_180: + default: + LOG(HAL, Error) << "Invalid crop_rotate_scale_degrees: " + << stream.crop_rotate_scale_degrees; + return false; } - maps_.emplace_back(static_cast<uint8_t *>(address), - static_cast<size_t>(length)); + if (cropRotateScaleDegrees != stream.crop_rotate_scale_degrees) { + LOG(HAL, Error) << "crop_rotate_scale_degrees in all " + << "streams are not identical"; + return false; + } } -} -CameraStream::CameraStream(PixelFormat format, Size size, - unsigned int index, Encoder *encoder) - : format_(format), size_(size), index_(index), encoder_(encoder) -{ + return true; } +#endif + +} /* namespace */ /* * \struct Camera3RequestDescriptor @@ -183,16 +221,30 @@ CameraStream::CameraStream(PixelFormat format, Size size, */ CameraDevice::Camera3RequestDescriptor::Camera3RequestDescriptor( - unsigned int frameNumber, unsigned int numBuffers) - : frameNumber(frameNumber), numBuffers(numBuffers) + Camera *camera, const camera3_capture_request_t *camera3Request) { - buffers = new camera3_stream_buffer_t[numBuffers]; - frameBuffers.reserve(numBuffers); -} + frameNumber_ = camera3Request->frame_number; -CameraDevice::Camera3RequestDescriptor::~Camera3RequestDescriptor() -{ - delete[] buffers; + /* Copy the camera3 request stream information for later access. */ + const uint32_t numBuffers = camera3Request->num_output_buffers; + buffers_.resize(numBuffers); + for (uint32_t i = 0; i < numBuffers; i++) + buffers_[i] = camera3Request->output_buffers[i]; + + /* + * FrameBuffer instances created by wrapping a camera3 provided dmabuf + * are emplaced in this vector of unique_ptr<> for lifetime management. + */ + frameBuffers_.reserve(numBuffers); + + /* Clone the controls associated with the camera3 request. */ + settings_ = CameraMetadata(camera3Request->settings); + + /* + * Create the CaptureRequest, stored as a unique_ptr<> to tie its + * lifetime to the descriptor. + */ + request_ = std::make_unique<CaptureRequest>(camera); } /* @@ -208,42 +260,64 @@ CameraDevice::Camera3RequestDescriptor::~Camera3RequestDescriptor() * back to the framework using the designated callbacks. */ -CameraDevice::CameraDevice(unsigned int id, const std::shared_ptr<Camera> &camera) - : id_(id), running_(false), camera_(camera), staticMetadata_(nullptr), +CameraDevice::CameraDevice(unsigned int id, std::shared_ptr<Camera> camera) + : id_(id), state_(State::Stopped), camera_(std::move(camera)), facing_(CAMERA_FACING_FRONT), orientation_(0) { camera_->requestCompleted.connect(this, &CameraDevice::requestComplete); - /* - * \todo Determine a more accurate value for this during - * streamConfiguration. - */ - maxJpegBufferSize_ = 13 << 20; /* 13631488 from USB HAL */ -} + maker_ = "libcamera"; + model_ = "cameraModel"; -CameraDevice::~CameraDevice() -{ - if (staticMetadata_) - delete staticMetadata_; + /* \todo Support getting properties on Android */ + std::ifstream fstream("/var/cache/camera/camera.prop"); + if (!fstream.is_open()) + return; + + std::string line; + while (std::getline(fstream, line)) { + std::string::size_type delimPos = line.find("="); + if (delimPos == std::string::npos) + continue; + std::string key = line.substr(0, delimPos); + std::string val = line.substr(delimPos + 1); - for (auto &it : requestTemplates_) - delete it.second; + if (!key.compare("ro.product.model")) + model_ = val; + else if (!key.compare("ro.product.manufacturer")) + maker_ = val; + } } -std::shared_ptr<CameraDevice> CameraDevice::create(unsigned int id, - const std::shared_ptr<Camera> &cam) +CameraDevice::~CameraDevice() = default; + +std::unique_ptr<CameraDevice> CameraDevice::create(unsigned int id, + std::shared_ptr<Camera> cam) { - CameraDevice *camera = new CameraDevice(id, cam); - return std::shared_ptr<CameraDevice>(camera); + return std::unique_ptr<CameraDevice>( + new CameraDevice(id, std::move(cam))); } /* - * Initialize the camera static information. + * Initialize the camera static information retrieved from the + * Camera::properties or from the cameraConfigData. + * + * cameraConfigData is optional for external camera devices and can be + * nullptr. + * * This method is called before the camera device is opened. */ -int CameraDevice::initialize() +int CameraDevice::initialize(const CameraConfigData *cameraConfigData) { - /* Initialize orientation and facing side of the camera. */ + /* + * Initialize orientation and facing side of the camera. + * + * If the libcamera::Camera provides those information as retrieved + * from firmware use them, otherwise fallback to values parsed from + * the configuration file. If the configuration file is not available + * the camera is external so its location and rotation can be safely + * defaulted. + */ const ControlList &properties = camera_->properties(); if (properties.contains(properties::Location)) { @@ -259,6 +333,22 @@ int CameraDevice::initialize() facing_ = CAMERA_FACING_EXTERNAL; break; } + + if (cameraConfigData && cameraConfigData->facing != -1 && + facing_ != cameraConfigData->facing) { + LOG(HAL, Warning) + << "Camera location does not match" + << " configuration file. Using " << facing_; + } + } else if (cameraConfigData) { + if (cameraConfigData->facing == -1) { + LOG(HAL, Error) + << "Camera facing not in configuration file"; + return -EINVAL; + } + facing_ = cameraConfigData->facing; + } else { + facing_ = CAMERA_FACING_EXTERNAL; } /* @@ -272,234 +362,24 @@ int CameraDevice::initialize() if (properties.contains(properties::Rotation)) { int rotation = properties.get(properties::Rotation); orientation_ = (360 - rotation) % 360; - } - - int ret = camera_->acquire(); - if (ret) { - LOG(HAL, Error) << "Failed to temporarily acquire the camera"; - return ret; - } - - ret = initializeStreamConfigurations(); - camera_->release(); - return ret; -} - -std::vector<Size> CameraDevice::getYUVResolutions(CameraConfiguration *cameraConfig, - const PixelFormat &pixelFormat, - const std::vector<Size> &resolutions) -{ - std::vector<Size> supportedResolutions; - - StreamConfiguration &cfg = cameraConfig->at(0); - for (const Size &res : resolutions) { - cfg.pixelFormat = pixelFormat; - cfg.size = res; - - CameraConfiguration::Status status = cameraConfig->validate(); - if (status != CameraConfiguration::Valid) { - LOG(HAL, Debug) << cfg.toString() << " not supported"; - continue; + if (cameraConfigData && cameraConfigData->rotation != -1 && + orientation_ != cameraConfigData->rotation) { + LOG(HAL, Warning) + << "Camera orientation does not match" + << " configuration file. Using " << orientation_; } - - LOG(HAL, Debug) << cfg.toString() << " supported"; - - supportedResolutions.push_back(res); - } - - return supportedResolutions; -} - -std::vector<Size> CameraDevice::getRawResolutions(const libcamera::PixelFormat &pixelFormat) -{ - std::unique_ptr<CameraConfiguration> cameraConfig = - camera_->generateConfiguration({ StreamRole::Raw }); - StreamConfiguration &cfg = cameraConfig->at(0); - const StreamFormats &formats = cfg.formats(); - std::vector<Size> supportedResolutions = formats.sizes(pixelFormat); - - return supportedResolutions; -} - -/* - * Initialize the format conversion map to translate from Android format - * identifier to libcamera pixel formats and fill in the list of supported - * stream configurations to be reported to the Android camera framework through - * the static stream configuration metadata. - */ -int CameraDevice::initializeStreamConfigurations() -{ - /* - * Get the maximum output resolutions - * \todo Get this from the camera properties once defined - */ - std::unique_ptr<CameraConfiguration> cameraConfig = - camera_->generateConfiguration({ StillCapture }); - if (!cameraConfig) { - LOG(HAL, Error) << "Failed to get maximum resolution"; - return -EINVAL; - } - StreamConfiguration &cfg = cameraConfig->at(0); - - /* - * \todo JPEG - Adjust the maximum available resolution by taking the - * JPEG encoder requirements into account (alignment and aspect ratio). - */ - const Size maxRes = cfg.size; - LOG(HAL, Debug) << "Maximum supported resolution: " << maxRes.toString(); - - /* - * Build the list of supported image resolutions. - * - * The resolutions listed in camera3Resolution are mandatory to be - * supported, up to the camera maximum resolution. - * - * Augment the list by adding resolutions calculated from the camera - * maximum one. - */ - std::vector<Size> cameraResolutions; - std::copy_if(camera3Resolutions.begin(), camera3Resolutions.end(), - std::back_inserter(cameraResolutions), - [&](const Size &res) { return res < maxRes; }); - - /* - * The Camera3 specification suggests adding 1/2 and 1/4 of the maximum - * resolution. - */ - for (unsigned int divider = 2;; divider <<= 1) { - Size derivedSize{ - maxRes.width / divider, - maxRes.height / divider, - }; - - if (derivedSize.width < 320 || - derivedSize.height < 240) - break; - - cameraResolutions.push_back(derivedSize); - } - cameraResolutions.push_back(maxRes); - - /* Remove duplicated entries from the list of supported resolutions. */ - std::sort(cameraResolutions.begin(), cameraResolutions.end()); - auto last = std::unique(cameraResolutions.begin(), cameraResolutions.end()); - cameraResolutions.erase(last, cameraResolutions.end()); - - /* - * Build the list of supported camera formats. - * - * To each Android format a list of compatible libcamera formats is - * associated. The first libcamera format that tests successful is added - * to the format translation map used when configuring the streams. - * It is then tested against the list of supported camera resolutions to - * build the stream configuration map reported through the camera static - * metadata. - */ - for (const auto &format : camera3FormatsMap) { - int androidFormat = format.first; - const Camera3Format &camera3Format = format.second; - const std::vector<PixelFormat> &libcameraFormats = - camera3Format.libcameraFormats; - - LOG(HAL, Debug) << "Trying to map Android format " - << camera3Format.name; - - /* - * JPEG is always supported, either produced directly by the - * camera, or encoded in the HAL. - */ - if (androidFormat == HAL_PIXEL_FORMAT_BLOB) { - formatsMap_[androidFormat] = formats::MJPEG; - LOG(HAL, Debug) << "Mapped Android format " - << camera3Format.name << " to " - << formats::MJPEG.toString() - << " (fixed mapping)"; - continue; - } - - /* - * Test the libcamera formats that can produce images - * compatible with the format defined by Android. - */ - PixelFormat mappedFormat; - for (const PixelFormat &pixelFormat : libcameraFormats) { - - LOG(HAL, Debug) << "Testing " << pixelFormat.toString(); - - /* - * The stream configuration size can be adjusted, - * not the pixel format. - * - * \todo This could be simplified once all pipeline - * handlers will report the StreamFormats list of - * supported formats. - */ - cfg.pixelFormat = pixelFormat; - - CameraConfiguration::Status status = cameraConfig->validate(); - if (status != CameraConfiguration::Invalid && - cfg.pixelFormat == pixelFormat) { - mappedFormat = pixelFormat; - break; - } - } - - if (!mappedFormat.isValid()) { - /* If the format is not mandatory, skip it. */ - if (!camera3Format.mandatory) - continue; - + } else if (cameraConfigData) { + if (cameraConfigData->rotation == -1) { LOG(HAL, Error) - << "Failed to map mandatory Android format " - << camera3Format.name << " (" - << utils::hex(androidFormat) << "): aborting"; + << "Camera rotation not in configuration file"; return -EINVAL; } - - /* - * Record the mapping and then proceed to generate the - * stream configurations map, by testing the image resolutions. - */ - formatsMap_[androidFormat] = mappedFormat; - LOG(HAL, Debug) << "Mapped Android format " - << camera3Format.name << " to " - << mappedFormat.toString(); - - std::vector<Size> resolutions; - const PixelFormatInfo &info = PixelFormatInfo::info(mappedFormat); - if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW) - resolutions = getRawResolutions(mappedFormat); - else - resolutions = getYUVResolutions(cameraConfig.get(), - mappedFormat, - cameraResolutions); - - for (const Size &res : resolutions) { - streamConfigurations_.push_back({ res, androidFormat }); - - /* - * If the format is HAL_PIXEL_FORMAT_YCbCr_420_888 - * from which JPEG is produced, add an entry for - * the JPEG stream. - * - * \todo Wire the JPEG encoder to query the supported - * sizes provided a list of formats it can encode. - * - * \todo Support JPEG streams produced by the Camera - * natively. - */ - if (androidFormat == HAL_PIXEL_FORMAT_YCbCr_420_888) - streamConfigurations_.push_back( - { res, HAL_PIXEL_FORMAT_BLOB }); - } + orientation_ = cameraConfigData->rotation; + } else { + orientation_ = 0; } - LOG(HAL, Debug) << "Collected stream configuration map: "; - for (const auto &entry : streamConfigurations_) - LOG(HAL, Debug) << "{ " << entry.resolution.toString() << " - " - << utils::hex(entry.androidFormat) << " }"; - - return 0; + return capabilities_.initialize(camera_, orientation_, facing_); } /* @@ -532,571 +412,56 @@ int CameraDevice::open(const hw_module_t *hardwareModule) void CameraDevice::close() { - camera_->stop(); - camera_->release(); - - running_ = false; -} - -void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks) -{ - callbacks_ = callbacks; -} - -std::tuple<uint32_t, uint32_t> CameraDevice::calculateStaticMetadataSize() -{ - /* - * \todo Keep this in sync with the actual number of entries. - * Currently: 51 entries, 687 bytes of static metadata - */ - uint32_t numEntries = 51; - uint32_t byteSize = 687; + streams_.clear(); - /* - * Calculate space occupation in bytes for dynamically built metadata - * entries. - * - * Each stream configuration entry requires 52 bytes: - * 4 32bits integers for ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS - * 4 64bits integers for ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS - */ - byteSize += streamConfigurations_.size() * 48; + stop(); - return std::make_tuple(numEntries, byteSize); + camera_->release(); } -/* - * Return static information for the camera. - */ -const camera_metadata_t *CameraDevice::getStaticMetadata() +void CameraDevice::flush() { - if (staticMetadata_) - return staticMetadata_->get(); - - /* - * The here reported metadata are enough to implement a basic capture - * example application, but a real camera implementation will require - * more. - */ - uint32_t numEntries; - uint32_t byteSize; - std::tie(numEntries, byteSize) = calculateStaticMetadataSize(); - staticMetadata_ = new CameraMetadata(numEntries, byteSize); - if (!staticMetadata_->isValid()) { - LOG(HAL, Error) << "Failed to allocate static metadata"; - delete staticMetadata_; - staticMetadata_ = nullptr; - return nullptr; - } - - /* Color correction static metadata. */ - std::vector<uint8_t> aberrationModes = { - ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF, - }; - staticMetadata_->addEntry(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, - aberrationModes.data(), - aberrationModes.size()); - - /* Control static metadata. */ - std::vector<uint8_t> aeAvailableAntiBandingModes = { - ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, - ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, - ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, - ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, - }; - staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, - aeAvailableAntiBandingModes.data(), - aeAvailableAntiBandingModes.size()); - - std::vector<uint8_t> aeAvailableModes = { - ANDROID_CONTROL_AE_MODE_ON, - }; - staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_MODES, - aeAvailableModes.data(), - aeAvailableModes.size()); - - std::vector<int32_t> availableAeFpsTarget = { - 15, 30, - }; - staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, - availableAeFpsTarget.data(), - availableAeFpsTarget.size()); - - std::vector<int32_t> aeCompensationRange = { - 0, 0, - }; - staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_RANGE, - aeCompensationRange.data(), - aeCompensationRange.size()); - - const camera_metadata_rational_t aeCompensationStep[] = { - { 0, 1 } - }; - staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_STEP, - aeCompensationStep, 1); - - std::vector<uint8_t> availableAfModes = { - ANDROID_CONTROL_AF_MODE_OFF, - }; - staticMetadata_->addEntry(ANDROID_CONTROL_AF_AVAILABLE_MODES, - availableAfModes.data(), - availableAfModes.size()); - - std::vector<uint8_t> availableEffects = { - ANDROID_CONTROL_EFFECT_MODE_OFF, - }; - staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_EFFECTS, - availableEffects.data(), - availableEffects.size()); - - std::vector<uint8_t> availableSceneModes = { - ANDROID_CONTROL_SCENE_MODE_DISABLED, - }; - staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, - availableSceneModes.data(), - availableSceneModes.size()); - - std::vector<uint8_t> availableStabilizationModes = { - ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF, - }; - staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, - availableStabilizationModes.data(), - availableStabilizationModes.size()); - - std::vector<uint8_t> availableAwbModes = { - ANDROID_CONTROL_AWB_MODE_OFF, - }; - staticMetadata_->addEntry(ANDROID_CONTROL_AWB_AVAILABLE_MODES, - availableAwbModes.data(), - availableAwbModes.size()); - - std::vector<int32_t> availableMaxRegions = { - 0, 0, 0, - }; - staticMetadata_->addEntry(ANDROID_CONTROL_MAX_REGIONS, - availableMaxRegions.data(), - availableMaxRegions.size()); - - std::vector<uint8_t> sceneModesOverride = { - ANDROID_CONTROL_AE_MODE_ON, - ANDROID_CONTROL_AWB_MODE_AUTO, - ANDROID_CONTROL_AF_MODE_AUTO, - }; - staticMetadata_->addEntry(ANDROID_CONTROL_SCENE_MODE_OVERRIDES, - sceneModesOverride.data(), - sceneModesOverride.size()); - - uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE; - staticMetadata_->addEntry(ANDROID_CONTROL_AE_LOCK_AVAILABLE, - &aeLockAvailable, 1); - - uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE; - staticMetadata_->addEntry(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, - &awbLockAvailable, 1); - - char availableControlModes = ANDROID_CONTROL_MODE_AUTO; - staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_MODES, - &availableControlModes, 1); - - /* JPEG static metadata. */ - std::vector<int32_t> availableThumbnailSizes = { - 0, 0, - }; - staticMetadata_->addEntry(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, - availableThumbnailSizes.data(), - availableThumbnailSizes.size()); - - /* - * \todo Calculate the maximum JPEG buffer size by asking the encoder - * giving the maximum frame size required. - */ - staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, &maxJpegBufferSize_, 1); - - /* Sensor static metadata. */ - int32_t pixelArraySize[] = { - 2592, 1944, - }; - staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, - &pixelArraySize, 2); - - int32_t sensorSizes[] = { - 0, 0, 2560, 1920, - }; - staticMetadata_->addEntry(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, - &sensorSizes, 4); - - int32_t sensitivityRange[] = { - 32, 2400, - }; - staticMetadata_->addEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, - &sensitivityRange, 2); - - uint16_t filterArr = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG; - staticMetadata_->addEntry(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, - &filterArr, 1); - - int64_t exposureTimeRange[] = { - 100000, 200000000, - }; - staticMetadata_->addEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, - &exposureTimeRange, 2); - - staticMetadata_->addEntry(ANDROID_SENSOR_ORIENTATION, &orientation_, 1); - - std::vector<int32_t> testPatterModes = { - ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, - }; - staticMetadata_->addEntry(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, - testPatterModes.data(), - testPatterModes.size()); - - std::vector<float> physicalSize = { - 2592, 1944, - }; - staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, - physicalSize.data(), - physicalSize.size()); - - uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN; - staticMetadata_->addEntry(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, - ×tampSource, 1); - - /* Statistics static metadata. */ - uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; - staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, - &faceDetectMode, 1); - - int32_t maxFaceCount = 0; - staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, - &maxFaceCount, 1); - - /* Sync static metadata. */ - int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN; - staticMetadata_->addEntry(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1); - - /* Flash static metadata. */ - char flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE; - staticMetadata_->addEntry(ANDROID_FLASH_INFO_AVAILABLE, - &flashAvailable, 1); - - /* Lens static metadata. */ - std::vector<float> lensApertures = { - 2.53 / 100, - }; - staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_APERTURES, - lensApertures.data(), - lensApertures.size()); - - uint8_t lensFacing; - switch (facing_) { - default: - case CAMERA_FACING_FRONT: - lensFacing = ANDROID_LENS_FACING_FRONT; - break; - case CAMERA_FACING_BACK: - lensFacing = ANDROID_LENS_FACING_BACK; - break; - case CAMERA_FACING_EXTERNAL: - lensFacing = ANDROID_LENS_FACING_EXTERNAL; - break; - } - staticMetadata_->addEntry(ANDROID_LENS_FACING, &lensFacing, 1); - - std::vector<float> lensFocalLenghts = { - 1, - }; - staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, - lensFocalLenghts.data(), - lensFocalLenghts.size()); - - std::vector<uint8_t> opticalStabilizations = { - ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF, - }; - staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, - opticalStabilizations.data(), - opticalStabilizations.size()); - - float hypeFocalDistance = 0; - staticMetadata_->addEntry(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, - &hypeFocalDistance, 1); - - float minFocusDistance = 0; - staticMetadata_->addEntry(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, - &minFocusDistance, 1); - - /* Noise reduction modes. */ - uint8_t noiseReductionModes = ANDROID_NOISE_REDUCTION_MODE_OFF; - staticMetadata_->addEntry(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, - &noiseReductionModes, 1); - - /* Scaler static metadata. */ - float maxDigitalZoom = 1; - staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, - &maxDigitalZoom, 1); - - std::vector<uint32_t> availableStreamConfigurations; - availableStreamConfigurations.reserve(streamConfigurations_.size() * 4); - for (const auto &entry : streamConfigurations_) { - availableStreamConfigurations.push_back(entry.androidFormat); - availableStreamConfigurations.push_back(entry.resolution.width); - availableStreamConfigurations.push_back(entry.resolution.height); - availableStreamConfigurations.push_back( - ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT); - } - staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, - availableStreamConfigurations.data(), - availableStreamConfigurations.size()); - - std::vector<int64_t> availableStallDurations = { - ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920, 33333333, - }; - staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, - availableStallDurations.data(), - availableStallDurations.size()); - - /* \todo Collect the minimum frame duration from the camera. */ - std::vector<int64_t> minFrameDurations; - minFrameDurations.reserve(streamConfigurations_.size() * 4); - for (const auto &entry : streamConfigurations_) { - minFrameDurations.push_back(entry.androidFormat); - minFrameDurations.push_back(entry.resolution.width); - minFrameDurations.push_back(entry.resolution.height); - minFrameDurations.push_back(33333333); - } - staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, - minFrameDurations.data(), - minFrameDurations.size()); - - uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; - staticMetadata_->addEntry(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1); - - /* Info static metadata. */ - uint8_t supportedHWLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; - staticMetadata_->addEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, - &supportedHWLevel, 1); - - /* Request static metadata. */ - int32_t partialResultCount = 1; - staticMetadata_->addEntry(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, - &partialResultCount, 1); - - uint8_t maxPipelineDepth = 2; - staticMetadata_->addEntry(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, - &maxPipelineDepth, 1); - - /* LIMITED does not support reprocessing. */ - uint32_t maxNumInputStreams = 0; - staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, - &maxNumInputStreams, 1); - - std::vector<uint8_t> availableCapabilities = { - ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE, - }; + { + MutexLocker stateLock(stateMutex_); + if (state_ != State::Running) + return; - /* Report if camera supports RAW. */ - std::unique_ptr<CameraConfiguration> cameraConfig = - camera_->generateConfiguration({ StreamRole::Raw }); - if (cameraConfig && !cameraConfig->empty()) { - const PixelFormatInfo &info = - PixelFormatInfo::info(cameraConfig->at(0).pixelFormat); - if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW) - availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW); + state_ = State::Flushing; } - staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, - availableCapabilities.data(), - availableCapabilities.size()); - - std::vector<int32_t> availableCharacteristicsKeys = { - ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, - ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, - ANDROID_CONTROL_AE_AVAILABLE_MODES, - ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, - ANDROID_CONTROL_AE_COMPENSATION_RANGE, - ANDROID_CONTROL_AE_COMPENSATION_STEP, - ANDROID_CONTROL_AF_AVAILABLE_MODES, - ANDROID_CONTROL_AVAILABLE_EFFECTS, - ANDROID_CONTROL_AVAILABLE_SCENE_MODES, - ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, - ANDROID_CONTROL_AWB_AVAILABLE_MODES, - ANDROID_CONTROL_MAX_REGIONS, - ANDROID_CONTROL_SCENE_MODE_OVERRIDES, - ANDROID_CONTROL_AE_LOCK_AVAILABLE, - ANDROID_CONTROL_AWB_LOCK_AVAILABLE, - ANDROID_CONTROL_AVAILABLE_MODES, - ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, - ANDROID_JPEG_MAX_SIZE, - ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, - ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, - ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, - ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, - ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, - ANDROID_SENSOR_ORIENTATION, - ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, - ANDROID_SENSOR_INFO_PHYSICAL_SIZE, - ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, - ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, - ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, - ANDROID_SYNC_MAX_LATENCY, - ANDROID_FLASH_INFO_AVAILABLE, - ANDROID_LENS_INFO_AVAILABLE_APERTURES, - ANDROID_LENS_FACING, - ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, - ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, - ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, - ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, - ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, - ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, - ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, - ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, - ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, - ANDROID_SCALER_CROPPING_TYPE, - ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, - ANDROID_REQUEST_PARTIAL_RESULT_COUNT, - ANDROID_REQUEST_PIPELINE_MAX_DEPTH, - ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, - ANDROID_REQUEST_AVAILABLE_CAPABILITIES, - }; - staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, - availableCharacteristicsKeys.data(), - availableCharacteristicsKeys.size()); - - std::vector<int32_t> availableRequestKeys = { - ANDROID_CONTROL_AE_MODE, - ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, - ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, - ANDROID_CONTROL_AE_TARGET_FPS_RANGE, - ANDROID_CONTROL_AE_ANTIBANDING_MODE, - ANDROID_CONTROL_AE_LOCK, - ANDROID_CONTROL_AF_TRIGGER, - ANDROID_CONTROL_AWB_MODE, - ANDROID_CONTROL_AWB_LOCK, - ANDROID_FLASH_MODE, - ANDROID_STATISTICS_FACE_DETECT_MODE, - ANDROID_NOISE_REDUCTION_MODE, - ANDROID_COLOR_CORRECTION_ABERRATION_MODE, - ANDROID_LENS_APERTURE, - ANDROID_LENS_OPTICAL_STABILIZATION_MODE, - ANDROID_CONTROL_MODE, - ANDROID_CONTROL_CAPTURE_INTENT, - }; - staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, - availableRequestKeys.data(), - availableRequestKeys.size()); - - std::vector<int32_t> availableResultKeys = { - ANDROID_CONTROL_AE_STATE, - ANDROID_CONTROL_AE_LOCK, - ANDROID_CONTROL_AF_STATE, - ANDROID_CONTROL_AWB_STATE, - ANDROID_CONTROL_AWB_LOCK, - ANDROID_LENS_STATE, - ANDROID_SCALER_CROP_REGION, - ANDROID_SENSOR_TIMESTAMP, - ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, - ANDROID_SENSOR_EXPOSURE_TIME, - ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, - ANDROID_STATISTICS_SCENE_FLICKER, - ANDROID_JPEG_SIZE, - ANDROID_JPEG_QUALITY, - ANDROID_JPEG_ORIENTATION, - }; - staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, - availableResultKeys.data(), - availableResultKeys.size()); - - if (!staticMetadata_->isValid()) { - LOG(HAL, Error) << "Failed to construct static metadata"; - delete staticMetadata_; - staticMetadata_ = nullptr; - return nullptr; - } + worker_.stop(); + camera_->stop(); - return staticMetadata_->get(); + MutexLocker stateLock(stateMutex_); + state_ = State::Stopped; } -CameraMetadata *CameraDevice::requestTemplatePreview() +void CameraDevice::stop() { - /* - * \todo Keep this in sync with the actual number of entries. - * Currently: 20 entries, 35 bytes - */ - CameraMetadata *requestTemplate = new CameraMetadata(20, 35); - if (!requestTemplate->isValid()) { - delete requestTemplate; - return nullptr; - } - - uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; - requestTemplate->addEntry(ANDROID_CONTROL_AE_MODE, - &aeMode, 1); - - int32_t aeExposureCompensation = 0; - requestTemplate->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, - &aeExposureCompensation, 1); + MutexLocker stateLock(stateMutex_); + if (state_ == State::Stopped) + return; - uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; - requestTemplate->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, - &aePrecaptureTrigger, 1); - - uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; - requestTemplate->addEntry(ANDROID_CONTROL_AE_LOCK, - &aeLock, 1); - - std::vector<int32_t> aeFpsTarget = { - 15, 30, - }; - requestTemplate->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, - aeFpsTarget.data(), - aeFpsTarget.size()); - - uint8_t aeAntibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; - requestTemplate->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE, - &aeAntibandingMode, 1); - - uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; - requestTemplate->addEntry(ANDROID_CONTROL_AF_TRIGGER, - &afTrigger, 1); - - uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; - requestTemplate->addEntry(ANDROID_CONTROL_AWB_MODE, - &awbMode, 1); - - uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; - requestTemplate->addEntry(ANDROID_CONTROL_AWB_LOCK, - &awbLock, 1); - - uint8_t flashMode = ANDROID_FLASH_MODE_OFF; - requestTemplate->addEntry(ANDROID_FLASH_MODE, - &flashMode, 1); - - uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; - requestTemplate->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE, - &faceDetectMode, 1); - - uint8_t noiseReduction = ANDROID_NOISE_REDUCTION_MODE_OFF; - requestTemplate->addEntry(ANDROID_NOISE_REDUCTION_MODE, - &noiseReduction, 1); - - uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF; - requestTemplate->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, - &aberrationMode, 1); - - uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; - requestTemplate->addEntry(ANDROID_CONTROL_MODE, &controlMode, 1); + worker_.stop(); + camera_->stop(); - float lensAperture = 2.53 / 100; - requestTemplate->addEntry(ANDROID_LENS_APERTURE, &lensAperture, 1); + descriptors_.clear(); + state_ = State::Stopped; +} - uint8_t opticalStabilization = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; - requestTemplate->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, - &opticalStabilization, 1); +unsigned int CameraDevice::maxJpegBufferSize() const +{ + return capabilities_.maxJpegBufferSize(); +} - uint8_t captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; - requestTemplate->addEntry(ANDROID_CONTROL_CAPTURE_INTENT, - &captureIntent, 1); +void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks) +{ + callbacks_ = callbacks; +} - return requestTemplate; +const camera_metadata_t *CameraDevice::getStaticMetadata() +{ + return capabilities_.staticMetadata()->get(); } /* @@ -1109,57 +474,47 @@ const camera_metadata_t *CameraDevice::constructDefaultRequestSettings(int type) return it->second->get(); /* Use the capture intent matching the requested template type. */ - CameraMetadata *requestTemplate; + std::unique_ptr<CameraMetadata> requestTemplate; uint8_t captureIntent; switch (type) { case CAMERA3_TEMPLATE_PREVIEW: captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; + requestTemplate = capabilities_.requestTemplatePreview(); break; case CAMERA3_TEMPLATE_STILL_CAPTURE: + /* + * Use the preview template for still capture, they only differ + * for the torch mode we currently do not support. + */ captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; + requestTemplate = capabilities_.requestTemplatePreview(); break; case CAMERA3_TEMPLATE_VIDEO_RECORD: captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; + requestTemplate = capabilities_.requestTemplateVideo(); break; case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; + requestTemplate = capabilities_.requestTemplateVideo(); break; + /* \todo Implement templates generation for the remaining use cases. */ case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: - captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; - break; case CAMERA3_TEMPLATE_MANUAL: - captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL; - break; default: - LOG(HAL, Error) << "Invalid template request type: " << type; + LOG(HAL, Error) << "Unsupported template request type: " << type; return nullptr; } - requestTemplate = requestTemplatePreview(); if (!requestTemplate || !requestTemplate->isValid()) { LOG(HAL, Error) << "Failed to construct request template"; - delete requestTemplate; return nullptr; } requestTemplate->updateEntry(ANDROID_CONTROL_CAPTURE_INTENT, - &captureIntent, 1); - - requestTemplates_[type] = requestTemplate; - return requestTemplate->get(); -} + captureIntent); -PixelFormat CameraDevice::toPixelFormat(int format) -{ - /* Translate Android format code to libcamera pixel format. */ - auto it = formatsMap_.find(format); - if (it == formatsMap_.end()) { - LOG(HAL, Error) << "Requested format " << utils::hex(format) - << " not supported"; - return PixelFormat(); - } - - return it->second; + requestTemplates_[type] = std::move(requestTemplate); + return requestTemplates_[type]->get(); } /* @@ -1168,12 +523,25 @@ PixelFormat CameraDevice::toPixelFormat(int format) */ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list) { + /* Before any configuration attempt, stop the camera. */ + stop(); + + if (stream_list->num_streams == 0) { + LOG(HAL, Error) << "No streams in configuration"; + return -EINVAL; + } + +#if defined(OS_CHROMEOS) + if (!validateCropRotate(*stream_list)) + return -EINVAL; +#endif + /* * Generate an empty configuration, and construct a StreamConfiguration * for each camera3_stream to add to it. */ - config_ = camera_->generateConfiguration(); - if (!config_) { + std::unique_ptr<CameraConfiguration> config = camera_->generateConfiguration(); + if (!config) { LOG(HAL, Error) << "Failed to generate camera configuration"; return -EINVAL; } @@ -1186,24 +554,44 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list) streams_.clear(); streams_.reserve(stream_list->num_streams); + std::vector<Camera3StreamConfig> streamConfigs; + streamConfigs.reserve(stream_list->num_streams); + /* First handle all non-MJPEG streams. */ camera3_stream_t *jpegStream = nullptr; for (unsigned int i = 0; i < stream_list->num_streams; ++i) { camera3_stream_t *stream = stream_list->streams[i]; Size size(stream->width, stream->height); - PixelFormat format = toPixelFormat(stream->format); + PixelFormat format = capabilities_.toPixelFormat(stream->format); LOG(HAL, Info) << "Stream #" << i << ", direction: " << stream->stream_type << ", width: " << stream->width << ", height: " << stream->height << ", format: " << utils::hex(stream->format) + << ", rotation: " << rotationToString(stream->rotation) +#if defined(OS_CHROMEOS) + << ", crop_rotate_scale_degrees: " + << rotationToString(stream->crop_rotate_scale_degrees) +#endif << " (" << format.toString() << ")"; if (!format.isValid()) return -EINVAL; + /* \todo Support rotation. */ + if (stream->rotation != CAMERA3_STREAM_ROTATION_0) { + LOG(HAL, Error) << "Rotation is not supported"; + return -EINVAL; + } +#if defined(OS_CHROMEOS) + if (stream->crop_rotate_scale_degrees != CAMERA3_STREAM_ROTATION_0) { + LOG(HAL, Error) << "Rotation is not supported"; + return -EINVAL; + } +#endif + /* Defer handling of MJPEG streams until all others are known. */ if (stream->format == HAL_PIXEL_FORMAT_BLOB) { if (jpegStream) { @@ -1216,23 +604,25 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list) continue; } - StreamConfiguration streamConfiguration; - streamConfiguration.size = size; - streamConfiguration.pixelFormat = format; + Camera3StreamConfig streamConfig; + streamConfig.streams = { { stream, CameraStream::Type::Direct } }; + streamConfig.config.size = size; + streamConfig.config.pixelFormat = format; + streamConfigs.push_back(std::move(streamConfig)); - config_->addConfiguration(streamConfiguration); - unsigned int index = config_->size() - 1; - streams_.emplace_back(format, size, index); - stream->priv = static_cast<void *>(&streams_.back()); + /* This stream will be produced by hardware. */ + stream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE; } /* Now handle the MJPEG streams, adding a new stream if required. */ if (jpegStream) { + CameraStream::Type type; int index = -1; /* Search for a compatible stream in the non-JPEG ones. */ - for (unsigned int i = 0; i < config_->size(); i++) { - StreamConfiguration &cfg = config_->at(i); + for (size_t i = 0; i < streamConfigs.size(); ++i) { + Camera3StreamConfig &streamConfig = streamConfigs[i]; + const auto &cfg = streamConfig.config; /* * \todo The PixelFormat must also be compatible with @@ -1245,7 +635,15 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list) LOG(HAL, Info) << "Android JPEG stream mapped to libcamera stream " << i; + type = CameraStream::Type::Mapped; index = i; + + /* + * The source stream will be read by software to + * produce the JPEG stream. + */ + camera3_stream_t *stream = streamConfig.streams[0].stream; + stream->usage |= GRALLOC_USAGE_SW_READ_OFTEN; break; } @@ -1254,80 +652,82 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list) * introduce a new stream to satisfy the request requirements. */ if (index < 0) { - StreamConfiguration streamConfiguration; - /* * \todo The pixelFormat should be a 'best-fit' choice * and may require a validation cycle. This is not yet * handled, and should be considered as part of any * stream configuration reworks. */ - streamConfiguration.size.width = jpegStream->width; - streamConfiguration.size.height = jpegStream->height; - streamConfiguration.pixelFormat = formats::NV12; + Camera3StreamConfig streamConfig; + streamConfig.config.size.width = jpegStream->width; + streamConfig.config.size.height = jpegStream->height; + streamConfig.config.pixelFormat = formats::NV12; + streamConfigs.push_back(std::move(streamConfig)); - LOG(HAL, Info) << "Adding " << streamConfiguration.toString() + LOG(HAL, Info) << "Adding " << streamConfig.config.toString() << " for MJPEG support"; - config_->addConfiguration(streamConfiguration); - index = config_->size() - 1; + type = CameraStream::Type::Internal; + index = streamConfigs.size() - 1; } - StreamConfiguration &cfg = config_->at(index); + /* The JPEG stream will be produced by software. */ + jpegStream->usage |= GRALLOC_USAGE_SW_WRITE_OFTEN; - /* - * Construct a software encoder for the MJPEG streams from the - * chosen libcamera source stream. - */ - Encoder *encoder = new EncoderLibJpeg(); - int ret = encoder->configure(cfg); - if (ret) { - LOG(HAL, Error) << "Failed to configure encoder"; - delete encoder; - return ret; - } + streamConfigs[index].streams.push_back({ jpegStream, type }); + } + + sortCamera3StreamConfigs(streamConfigs, jpegStream); + for (const auto &streamConfig : streamConfigs) { + config->addConfiguration(streamConfig.config); - streams_.emplace_back(formats::MJPEG, cfg.size, index, encoder); - jpegStream->priv = static_cast<void *>(&streams_.back()); + for (auto &stream : streamConfig.streams) { + streams_.emplace_back(this, config.get(), stream.type, + stream.stream, config->size() - 1); + stream.stream->priv = static_cast<void *>(&streams_.back()); + } } - switch (config_->validate()) { + switch (config->validate()) { case CameraConfiguration::Valid: break; case CameraConfiguration::Adjusted: LOG(HAL, Info) << "Camera configuration adjusted"; - for (const StreamConfiguration &cfg : *config_) + for (const StreamConfiguration &cfg : *config) LOG(HAL, Info) << " - " << cfg.toString(); - config_.reset(); return -EINVAL; case CameraConfiguration::Invalid: LOG(HAL, Info) << "Camera configuration invalid"; - config_.reset(); return -EINVAL; } - for (unsigned int i = 0; i < stream_list->num_streams; ++i) { - camera3_stream_t *stream = stream_list->streams[i]; - CameraStream *cameraStream = static_cast<CameraStream *>(stream->priv); - StreamConfiguration &cfg = config_->at(cameraStream->index()); - - /* Use the bufferCount confirmed by the validation process. */ - stream->max_buffers = cfg.bufferCount; - } - /* * Once the CameraConfiguration has been adjusted/validated * it can be applied to the camera. */ - int ret = camera_->configure(config_.get()); + int ret = camera_->configure(config.get()); if (ret) { LOG(HAL, Error) << "Failed to configure camera '" << camera_->id() << "'"; return ret; } + /* + * Configure the HAL CameraStream instances using the associated + * StreamConfiguration and set the number of required buffers in + * the Android camera3_stream_t. + */ + for (CameraStream &cameraStream : streams_) { + ret = cameraStream.configure(); + if (ret) { + LOG(HAL, Error) << "Failed to configure camera stream"; + return ret; + } + } + + config_ = std::move(config); return 0; } @@ -1361,85 +761,234 @@ FrameBuffer *CameraDevice::createFrameBuffer(const buffer_handle_t camera3buffer return new FrameBuffer(std::move(planes)); } -int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Request) +int CameraDevice::processControls(Camera3RequestDescriptor *descriptor) +{ + const CameraMetadata &settings = descriptor->settings_; + if (!settings.isValid()) + return 0; + + /* Translate the Android request settings to libcamera controls. */ + camera_metadata_ro_entry_t entry; + if (settings.getEntry(ANDROID_SCALER_CROP_REGION, &entry)) { + const int32_t *data = entry.data.i32; + Rectangle cropRegion{ data[0], data[1], + static_cast<unsigned int>(data[2]), + static_cast<unsigned int>(data[3]) }; + ControlList &controls = descriptor->request_->controls(); + controls.set(controls::ScalerCrop, cropRegion); + } + + return 0; +} + +void CameraDevice::abortRequest(camera3_capture_request_t *request) +{ + notifyError(request->frame_number, nullptr, CAMERA3_MSG_ERROR_REQUEST); + + camera3_capture_result_t result = {}; + result.num_output_buffers = request->num_output_buffers; + result.frame_number = request->frame_number; + result.partial_result = 0; + + std::vector<camera3_stream_buffer_t> resultBuffers(result.num_output_buffers); + for (auto [i, buffer] : utils::enumerate(resultBuffers)) { + buffer = request->output_buffers[i]; + buffer.release_fence = request->output_buffers[i].acquire_fence; + buffer.acquire_fence = -1; + buffer.status = CAMERA3_BUFFER_STATUS_ERROR; + } + result.output_buffers = resultBuffers.data(); + + callbacks_->process_capture_result(callbacks_, &result); +} + +bool CameraDevice::isValidRequest(camera3_capture_request_t *camera3Request) const { - if (!camera3Request->num_output_buffers) { + if (!camera3Request) { + LOG(HAL, Error) << "No capture request provided"; + return false; + } + + if (!camera3Request->num_output_buffers || + !camera3Request->output_buffers) { LOG(HAL, Error) << "No output buffers provided"; - return -EINVAL; + return false; } - /* Start the camera if that's the first request we handle. */ - if (!running_) { - int ret = camera_->start(); - if (ret) { - LOG(HAL, Error) << "Failed to start camera"; - return ret; + /* configureStreams() has not been called or has failed. */ + if (streams_.empty() || !config_) { + LOG(HAL, Error) << "No stream is configured"; + return false; + } + + for (uint32_t i = 0; i < camera3Request->num_output_buffers; i++) { + const camera3_stream_buffer_t &outputBuffer = + camera3Request->output_buffers[i]; + if (!outputBuffer.buffer || !(*outputBuffer.buffer)) { + LOG(HAL, Error) << "Invalid native handle"; + return false; + } + + const native_handle_t *handle = *outputBuffer.buffer; + constexpr int kNativeHandleMaxFds = 1024; + if (handle->numFds < 0 || handle->numFds > kNativeHandleMaxFds) { + LOG(HAL, Error) + << "Invalid number of fds (" << handle->numFds + << ") in buffer " << i; + return false; + } + + constexpr int kNativeHandleMaxInts = 1024; + if (handle->numInts < 0 || handle->numInts > kNativeHandleMaxInts) { + LOG(HAL, Error) + << "Invalid number of ints (" << handle->numInts + << ") in buffer " << i; + return false; } - running_ = true; + const camera3_stream *camera3Stream = outputBuffer.stream; + if (!camera3Stream) + return false; + + const CameraStream *cameraStream = + static_cast<CameraStream *>(camera3Stream->priv); + + auto found = std::find_if(streams_.begin(), streams_.end(), + [cameraStream](const CameraStream &stream) { + return &stream == cameraStream; + }); + if (found == streams_.end()) { + LOG(HAL, Error) + << "No corresponding configured stream found"; + return false; + } } - /* - * Queue a request for the Camera with the provided dmabuf file - * descriptors. - */ - const camera3_stream_buffer_t *camera3Buffers = - camera3Request->output_buffers; + return true; +} + +int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Request) +{ + if (!isValidRequest(camera3Request)) + return -EINVAL; /* * Save the request descriptors for use at completion time. * The descriptor and the associated memory reserved here are freed * at request complete time. */ - Camera3RequestDescriptor *descriptor = - new Camera3RequestDescriptor(camera3Request->frame_number, - camera3Request->num_output_buffers); + Camera3RequestDescriptor descriptor(camera_.get(), camera3Request); - Request *request = - camera_->createRequest(reinterpret_cast<uint64_t>(descriptor)); - - for (unsigned int i = 0; i < descriptor->numBuffers; ++i) { - CameraStream *cameraStream = - static_cast<CameraStream *>(camera3Buffers[i].stream->priv); + /* + * \todo The Android request model is incremental, settings passed in + * previous requests are to be effective until overridden explicitly in + * a new request. Do we need to cache settings incrementally here, or is + * it handled by the Android camera service ? + */ + if (camera3Request->settings) + lastSettings_ = camera3Request->settings; + else + descriptor.settings_ = lastSettings_; + + LOG(HAL, Debug) << "Queueing request " << descriptor.request_->cookie() + << " with " << descriptor.buffers_.size() << " streams"; + for (unsigned int i = 0; i < descriptor.buffers_.size(); ++i) { + const camera3_stream_buffer_t &camera3Buffer = descriptor.buffers_[i]; + camera3_stream *camera3Stream = camera3Buffer.stream; + CameraStream *cameraStream = static_cast<CameraStream *>(camera3Stream->priv); + + std::stringstream ss; + ss << i << " - (" << camera3Stream->width << "x" + << camera3Stream->height << ")" + << "[" << utils::hex(camera3Stream->format) << "] -> " + << "(" << cameraStream->configuration().size.toString() << ")[" + << cameraStream->configuration().pixelFormat.toString() << "]"; /* - * Keep track of which stream the request belongs to and store - * the native buffer handles. + * Inspect the camera stream type, create buffers opportunely + * and add them to the Request if required. */ - descriptor->buffers[i].stream = camera3Buffers[i].stream; - descriptor->buffers[i].buffer = camera3Buffers[i].buffer; - - /* Software streams are handled after hardware streams complete. */ - if (cameraStream->format() == formats::MJPEG) + FrameBuffer *buffer = nullptr; + switch (cameraStream->type()) { + case CameraStream::Type::Mapped: + /* + * Mapped streams don't need buffers added to the + * Request. + */ + LOG(HAL, Debug) << ss.str() << " (mapped)"; continue; - /* - * Create a libcamera buffer using the dmabuf descriptors of - * the camera3Buffer for each stream. The FrameBuffer is - * directly associated with the Camera3RequestDescriptor for - * lifetime management only. - */ - FrameBuffer *buffer = createFrameBuffer(*camera3Buffers[i].buffer); + case CameraStream::Type::Direct: + /* + * Create a libcamera buffer using the dmabuf + * descriptors of the camera3Buffer for each stream and + * associate it with the Camera3RequestDescriptor for + * lifetime management only. + */ + buffer = createFrameBuffer(*camera3Buffer.buffer); + descriptor.frameBuffers_.emplace_back(buffer); + LOG(HAL, Debug) << ss.str() << " (direct)"; + break; + + case CameraStream::Type::Internal: + /* + * Get the frame buffer from the CameraStream internal + * buffer pool. + * + * The buffer has to be returned to the CameraStream + * once it has been processed. + */ + buffer = cameraStream->getBuffer(); + LOG(HAL, Debug) << ss.str() << " (internal)"; + break; + } + if (!buffer) { LOG(HAL, Error) << "Failed to create buffer"; - delete request; - delete descriptor; return -ENOMEM; } - descriptor->frameBuffers.emplace_back(buffer); - - StreamConfiguration *streamConfiguration = &config_->at(cameraStream->index()); - Stream *stream = streamConfiguration->stream(); - request->addBuffer(stream, buffer); + descriptor.request_->addBuffer(cameraStream->stream(), buffer, + camera3Buffer.acquire_fence); } - int ret = camera_->queueRequest(request); - if (ret) { - LOG(HAL, Error) << "Failed to queue request"; - delete request; - delete descriptor; + /* + * Translate controls from Android to libcamera and queue the request + * to the CameraWorker thread. + */ + int ret = processControls(&descriptor); + if (ret) return ret; + + /* + * If flush is in progress abort the request. If the camera has been + * stopped we have to re-start it to be able to process the request. + */ + MutexLocker stateLock(stateMutex_); + + if (state_ == State::Flushing) { + abortRequest(camera3Request); + return 0; + } + + if (state_ == State::Stopped) { + worker_.start(); + + ret = camera_->start(); + if (ret) { + LOG(HAL, Error) << "Failed to start camera"; + worker_.stop(); + return ret; + } + + state_ = State::Running; + } + + worker_.queueRequest(descriptor.request_.get()); + + { + MutexLocker descriptorsLock(descriptorsMutex_); + descriptors_[descriptor.request_->cookie()] = std::move(descriptor); } return 0; @@ -1447,152 +996,127 @@ int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Reques void CameraDevice::requestComplete(Request *request) { - const Request::BufferMap &buffers = request->buffers(); - camera3_buffer_status status = CAMERA3_BUFFER_STATUS_OK; - std::unique_ptr<CameraMetadata> resultMetadata; - Camera3RequestDescriptor *descriptor = - reinterpret_cast<Camera3RequestDescriptor *>(request->cookie()); + decltype(descriptors_)::node_type node; + { + MutexLocker descriptorsLock(descriptorsMutex_); + auto it = descriptors_.find(request->cookie()); + if (it == descriptors_.end()) { + /* + * \todo Clarify if the Camera has to be closed on + * ERROR_DEVICE and possibly demote the Fatal to simple + * Error. + */ + notifyError(0, nullptr, CAMERA3_MSG_ERROR_DEVICE); + LOG(HAL, Fatal) + << "Unknown request: " << request->cookie(); + + return; + } + + node = descriptors_.extract(it); + } + Camera3RequestDescriptor &descriptor = node.mapped(); + + /* + * Prepare the capture result for the Android camera stack. + * + * The buffer status is set to OK and later changed to ERROR if + * post-processing/compression fails. + */ + camera3_capture_result_t captureResult = {}; + captureResult.frame_number = descriptor.frameNumber_; + captureResult.num_output_buffers = descriptor.buffers_.size(); + for (camera3_stream_buffer_t &buffer : descriptor.buffers_) { + buffer.acquire_fence = -1; + buffer.release_fence = -1; + buffer.status = CAMERA3_BUFFER_STATUS_OK; + } + captureResult.output_buffers = descriptor.buffers_.data(); + captureResult.partial_result = 1; + /* + * If the Request has failed, abort the request by notifying the error + * and complete the request with all buffers in error state. + */ if (request->status() != Request::RequestComplete) { - LOG(HAL, Error) << "Request not successfully completed: " + LOG(HAL, Error) << "Request " << request->cookie() + << " not successfully completed: " << request->status(); - status = CAMERA3_BUFFER_STATUS_ERROR; + + notifyError(descriptor.frameNumber_, nullptr, + CAMERA3_MSG_ERROR_REQUEST); + + captureResult.partial_result = 0; + for (camera3_stream_buffer_t &buffer : descriptor.buffers_) + buffer.status = CAMERA3_BUFFER_STATUS_ERROR; + callbacks_->process_capture_result(callbacks_, &captureResult); + + return; } /* - * \todo The timestamp used for the metadata is currently always taken - * from the first buffer (which may be the first stream) in the Request. - * It might be appropriate to return a 'correct' (as determined by - * pipeline handlers) timestamp in the Request itself. + * Notify shutter as soon as we have verified we have a valid request. + * + * \todo The shutter event notification should be sent to the framework + * as soon as possible, earlier than request completion time. + */ + uint64_t sensorTimestamp = static_cast<uint64_t>(request->metadata() + .get(controls::SensorTimestamp)); + notifyShutter(descriptor.frameNumber_, sensorTimestamp); + + LOG(HAL, Debug) << "Request " << request->cookie() << " completed with " + << descriptor.buffers_.size() << " streams"; + + /* + * Generate the metadata associated with the captured buffers. + * + * Notify if the metadata generation has failed, but continue processing + * buffers and return an empty metadata pack. */ - FrameBuffer *buffer = buffers.begin()->second; - resultMetadata = getResultMetadata(descriptor->frameNumber, - buffer->metadata().timestamp); + std::unique_ptr<CameraMetadata> resultMetadata = getResultMetadata(descriptor); + if (!resultMetadata) { + notifyError(descriptor.frameNumber_, nullptr, CAMERA3_MSG_ERROR_RESULT); + + /* The camera framework expects an empy metadata pack on error. */ + resultMetadata = std::make_unique<CameraMetadata>(0, 0); + } /* Handle any JPEG compression. */ - for (unsigned int i = 0; i < descriptor->numBuffers; ++i) { + for (camera3_stream_buffer_t &buffer : descriptor.buffers_) { CameraStream *cameraStream = - static_cast<CameraStream *>(descriptor->buffers[i].stream->priv); - - if (cameraStream->format() != formats::MJPEG) - continue; + static_cast<CameraStream *>(buffer.stream->priv); - Encoder *encoder = cameraStream->encoder(); - if (!encoder) { - LOG(HAL, Error) << "Failed to identify encoder"; + if (cameraStream->camera3Stream().format != HAL_PIXEL_FORMAT_BLOB) continue; - } - StreamConfiguration *streamConfiguration = &config_->at(cameraStream->index()); - Stream *stream = streamConfiguration->stream(); - FrameBuffer *buffer = request->findBuffer(stream); - if (!buffer) { + FrameBuffer *src = request->findBuffer(cameraStream->stream()); + if (!src) { LOG(HAL, Error) << "Failed to find a source stream buffer"; + buffer.status = CAMERA3_BUFFER_STATUS_ERROR; + notifyError(descriptor.frameNumber_, buffer.stream, + CAMERA3_MSG_ERROR_BUFFER); continue; } + int ret = cameraStream->process(*src, *buffer.buffer, + descriptor.settings_, + resultMetadata.get()); /* - * \todo Buffer mapping and compression should be moved to a - * separate thread. + * Return the FrameBuffer to the CameraStream now that we're + * done processing it. */ + if (cameraStream->type() == CameraStream::Type::Internal) + cameraStream->putBuffer(src); - MappedCamera3Buffer mapped(*descriptor->buffers[i].buffer, - PROT_READ | PROT_WRITE); - if (!mapped.isValid()) { - LOG(HAL, Error) << "Failed to mmap android blob buffer"; - continue; - } - - /* Set EXIF metadata for various tags. */ - Exif exif; - /* \todo Set Make and Model from external vendor tags. */ - exif.setMake("libcamera"); - exif.setModel("cameraModel"); - exif.setOrientation(orientation_); - exif.setSize(cameraStream->size()); - /* - * We set the frame's EXIF timestamp as the time of encode. - * Since the precision we need for EXIF timestamp is only one - * second, it is good enough. - */ - exif.setTimestamp(std::time(nullptr)); - if (exif.generate() != 0) - LOG(HAL, Error) << "Failed to generate valid EXIF data"; - - int jpeg_size = encoder->encode(buffer, mapped.maps()[0], exif.data()); - if (jpeg_size < 0) { - LOG(HAL, Error) << "Failed to encode stream image"; - status = CAMERA3_BUFFER_STATUS_ERROR; - continue; + if (ret) { + buffer.status = CAMERA3_BUFFER_STATUS_ERROR; + notifyError(descriptor.frameNumber_, buffer.stream, + CAMERA3_MSG_ERROR_BUFFER); } - - /* - * Fill in the JPEG blob header. - * - * The mapped size of the buffer is being returned as - * substantially larger than the requested JPEG_MAX_SIZE - * (which is referenced from maxJpegBufferSize_). Utilise - * this static size to ensure the correct offset of the blob is - * determined. - * - * \todo Investigate if the buffer size mismatch is an issue or - * expected behaviour. - */ - uint8_t *resultPtr = mapped.maps()[0].data() + - maxJpegBufferSize_ - - sizeof(struct camera3_jpeg_blob); - auto *blob = reinterpret_cast<struct camera3_jpeg_blob *>(resultPtr); - blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID; - blob->jpeg_size = jpeg_size; - - /* Update the JPEG result Metadata. */ - resultMetadata->addEntry(ANDROID_JPEG_SIZE, - &jpeg_size, 1); - - const uint32_t jpeg_quality = 95; - resultMetadata->addEntry(ANDROID_JPEG_QUALITY, - &jpeg_quality, 1); - - const uint32_t jpeg_orientation = 0; - resultMetadata->addEntry(ANDROID_JPEG_ORIENTATION, - &jpeg_orientation, 1); - } - - /* Prepare to call back the Android camera stack. */ - camera3_capture_result_t captureResult = {}; - captureResult.frame_number = descriptor->frameNumber; - captureResult.num_output_buffers = descriptor->numBuffers; - for (unsigned int i = 0; i < descriptor->numBuffers; ++i) { - descriptor->buffers[i].acquire_fence = -1; - descriptor->buffers[i].release_fence = -1; - descriptor->buffers[i].status = status; - } - captureResult.output_buffers = - const_cast<const camera3_stream_buffer_t *>(descriptor->buffers); - - - if (status == CAMERA3_BUFFER_STATUS_OK) { - notifyShutter(descriptor->frameNumber, - buffer->metadata().timestamp); - - captureResult.partial_result = 1; - captureResult.result = resultMetadata->get(); - } - - if (status == CAMERA3_BUFFER_STATUS_ERROR || !captureResult.result) { - /* \todo Improve error handling. In case we notify an error - * because the metadata generation fails, a shutter event has - * already been notified for this frame number before the error - * is here signalled. Make sure the error path plays well with - * the camera stack state machine. - */ - notifyError(descriptor->frameNumber, - descriptor->buffers[0].stream); } + captureResult.result = resultMetadata->get(); callbacks_->process_capture_result(callbacks_, &captureResult); - - delete descriptor; } std::string CameraDevice::logPrefix() const @@ -1611,21 +1135,15 @@ void CameraDevice::notifyShutter(uint32_t frameNumber, uint64_t timestamp) callbacks_->notify(callbacks_, ¬ify); } -void CameraDevice::notifyError(uint32_t frameNumber, camera3_stream_t *stream) +void CameraDevice::notifyError(uint32_t frameNumber, camera3_stream_t *stream, + camera3_error_msg_code code) { camera3_notify_msg_t notify = {}; - /* - * \todo Report and identify the stream number or configuration to - * clarify the stream that failed. - */ - LOG(HAL, Error) << "Error occurred on frame " << frameNumber << " (" - << toPixelFormat(stream->format).toString() << ")"; - notify.type = CAMERA3_MSG_ERROR; notify.message.error.error_stream = stream; notify.message.error.frame_number = frameNumber; - notify.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST; + notify.message.error.error_code = code; callbacks_->notify(callbacks_, ¬ify); } @@ -1634,63 +1152,182 @@ void CameraDevice::notifyError(uint32_t frameNumber, camera3_stream_t *stream) * Produce a set of fixed result metadata. */ std::unique_ptr<CameraMetadata> -CameraDevice::getResultMetadata([[maybe_unused]] int frame_number, - int64_t timestamp) +CameraDevice::getResultMetadata(const Camera3RequestDescriptor &descriptor) const { + const ControlList &metadata = descriptor.request_->metadata(); + const CameraMetadata &settings = descriptor.settings_; + camera_metadata_ro_entry_t entry; + bool found; + /* * \todo Keep this in sync with the actual number of entries. - * Currently: 18 entries, 62 bytes + * Currently: 40 entries, 156 bytes + * + * Reserve more space for the JPEG metadata set by the post-processor. + * Currently: + * ANDROID_JPEG_GPS_COORDINATES (double x 3) = 24 bytes + * ANDROID_JPEG_GPS_PROCESSING_METHOD (byte x 32) = 32 bytes + * ANDROID_JPEG_GPS_TIMESTAMP (int64) = 8 bytes + * ANDROID_JPEG_SIZE (int32_t) = 4 bytes + * ANDROID_JPEG_QUALITY (byte) = 1 byte + * ANDROID_JPEG_ORIENTATION (int32_t) = 4 bytes + * ANDROID_JPEG_THUMBNAIL_QUALITY (byte) = 1 byte + * ANDROID_JPEG_THUMBNAIL_SIZE (int32 x 2) = 8 bytes + * Total bytes for JPEG metadata: 82 */ std::unique_ptr<CameraMetadata> resultMetadata = - std::make_unique<CameraMetadata>(18, 62); + std::make_unique<CameraMetadata>(44, 166); if (!resultMetadata->isValid()) { - LOG(HAL, Error) << "Failed to allocate static metadata"; + LOG(HAL, Error) << "Failed to allocate result metadata"; return nullptr; } - const uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED; - resultMetadata->addEntry(ANDROID_CONTROL_AE_STATE, &ae_state, 1); + /* + * \todo The value of the results metadata copied from the settings + * will have to be passed to the libcamera::Camera and extracted + * from libcamera::Request::metadata. + */ + + uint8_t value = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF; + resultMetadata->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, + value); + + value = ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF; + resultMetadata->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE, value); + + int32_t value32 = 0; + resultMetadata->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, + value32); - const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF; - resultMetadata->addEntry(ANDROID_CONTROL_AE_LOCK, &ae_lock, 1); + value = ANDROID_CONTROL_AE_LOCK_OFF; + resultMetadata->addEntry(ANDROID_CONTROL_AE_LOCK, value); - uint8_t af_state = ANDROID_CONTROL_AF_STATE_INACTIVE; - resultMetadata->addEntry(ANDROID_CONTROL_AF_STATE, &af_state, 1); + value = ANDROID_CONTROL_AE_MODE_ON; + resultMetadata->addEntry(ANDROID_CONTROL_AE_MODE, value); - const uint8_t awb_state = ANDROID_CONTROL_AWB_STATE_CONVERGED; - resultMetadata->addEntry(ANDROID_CONTROL_AWB_STATE, &awb_state, 1); + if (settings.getEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, &entry)) + /* + * \todo Retrieve the AE FPS range from the libcamera metadata. + * As libcamera does not support that control, as a temporary + * workaround return what the framework asked. + */ + resultMetadata->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + entry.data.i32, 2); - const uint8_t awb_lock = ANDROID_CONTROL_AWB_LOCK_OFF; - resultMetadata->addEntry(ANDROID_CONTROL_AWB_LOCK, &awb_lock, 1); + found = settings.getEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &entry); + value = found ? *entry.data.u8 : + (uint8_t)ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; + resultMetadata->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, value); - const uint8_t lens_state = ANDROID_LENS_STATE_STATIONARY; - resultMetadata->addEntry(ANDROID_LENS_STATE, &lens_state, 1); + value = ANDROID_CONTROL_AE_STATE_CONVERGED; + resultMetadata->addEntry(ANDROID_CONTROL_AE_STATE, value); - int32_t sensorSizes[] = { - 0, 0, 2560, 1920, - }; - resultMetadata->addEntry(ANDROID_SCALER_CROP_REGION, sensorSizes, 4); + value = ANDROID_CONTROL_AF_MODE_OFF; + resultMetadata->addEntry(ANDROID_CONTROL_AF_MODE, value); + + value = ANDROID_CONTROL_AF_STATE_INACTIVE; + resultMetadata->addEntry(ANDROID_CONTROL_AF_STATE, value); - resultMetadata->addEntry(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); + value = ANDROID_CONTROL_AF_TRIGGER_IDLE; + resultMetadata->addEntry(ANDROID_CONTROL_AF_TRIGGER, value); + + value = ANDROID_CONTROL_AWB_MODE_AUTO; + resultMetadata->addEntry(ANDROID_CONTROL_AWB_MODE, value); + + value = ANDROID_CONTROL_AWB_LOCK_OFF; + resultMetadata->addEntry(ANDROID_CONTROL_AWB_LOCK, value); + + value = ANDROID_CONTROL_AWB_STATE_CONVERGED; + resultMetadata->addEntry(ANDROID_CONTROL_AWB_STATE, value); + + value = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; + resultMetadata->addEntry(ANDROID_CONTROL_CAPTURE_INTENT, value); + + value = ANDROID_CONTROL_EFFECT_MODE_OFF; + resultMetadata->addEntry(ANDROID_CONTROL_EFFECT_MODE, value); + + value = ANDROID_CONTROL_MODE_AUTO; + resultMetadata->addEntry(ANDROID_CONTROL_MODE, value); + + value = ANDROID_CONTROL_SCENE_MODE_DISABLED; + resultMetadata->addEntry(ANDROID_CONTROL_SCENE_MODE, value); + + value = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; + resultMetadata->addEntry(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, value); + + value = ANDROID_FLASH_MODE_OFF; + resultMetadata->addEntry(ANDROID_FLASH_MODE, value); + + value = ANDROID_FLASH_STATE_UNAVAILABLE; + resultMetadata->addEntry(ANDROID_FLASH_STATE, value); + + if (settings.getEntry(ANDROID_LENS_APERTURE, &entry)) + resultMetadata->addEntry(ANDROID_LENS_APERTURE, entry.data.f, 1); + + float focal_length = 1.0; + resultMetadata->addEntry(ANDROID_LENS_FOCAL_LENGTH, focal_length); + + value = ANDROID_LENS_STATE_STATIONARY; + resultMetadata->addEntry(ANDROID_LENS_STATE, value); + + value = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; + resultMetadata->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, + value); + + value32 = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; + resultMetadata->addEntry(ANDROID_SENSOR_TEST_PATTERN_MODE, value32); + + value = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; + resultMetadata->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE, value); + + value = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; + resultMetadata->addEntry(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, + value); + + value = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; + resultMetadata->addEntry(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, value); + + value = ANDROID_STATISTICS_SCENE_FLICKER_NONE; + resultMetadata->addEntry(ANDROID_STATISTICS_SCENE_FLICKER, value); + + value = ANDROID_NOISE_REDUCTION_MODE_OFF; + resultMetadata->addEntry(ANDROID_NOISE_REDUCTION_MODE, value); /* 33.3 msec */ const int64_t rolling_shutter_skew = 33300000; resultMetadata->addEntry(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, - &rolling_shutter_skew, 1); + rolling_shutter_skew); - /* 16.6 msec */ - const int64_t exposure_time = 16600000; - resultMetadata->addEntry(ANDROID_SENSOR_EXPOSURE_TIME, - &exposure_time, 1); + /* Add metadata tags reported by libcamera. */ + const int64_t timestamp = metadata.get(controls::SensorTimestamp); + resultMetadata->addEntry(ANDROID_SENSOR_TIMESTAMP, timestamp); - const uint8_t lens_shading_map_mode = - ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; - resultMetadata->addEntry(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, - &lens_shading_map_mode, 1); + if (metadata.contains(controls::draft::PipelineDepth)) { + uint8_t pipeline_depth = + metadata.get<int32_t>(controls::draft::PipelineDepth); + resultMetadata->addEntry(ANDROID_REQUEST_PIPELINE_DEPTH, + pipeline_depth); + } - const uint8_t scene_flicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; - resultMetadata->addEntry(ANDROID_STATISTICS_SCENE_FLICKER, - &scene_flicker, 1); + if (metadata.contains(controls::ExposureTime)) { + int64_t exposure = metadata.get(controls::ExposureTime) * 1000ULL; + resultMetadata->addEntry(ANDROID_SENSOR_EXPOSURE_TIME, exposure); + } + + if (metadata.contains(controls::FrameDuration)) { + int64_t duration = metadata.get(controls::FrameDuration) * 1000; + resultMetadata->addEntry(ANDROID_SENSOR_FRAME_DURATION, + duration); + } + + if (metadata.contains(controls::ScalerCrop)) { + Rectangle crop = metadata.get(controls::ScalerCrop); + int32_t cropRect[] = { + crop.x, crop.y, static_cast<int32_t>(crop.width), + static_cast<int32_t>(crop.height), + }; + resultMetadata->addEntry(ANDROID_SCALER_CROP_REGION, cropRect); + } /* * Return the result metadata pack even is not valid: get() will return @@ -1700,5 +1337,12 @@ CameraDevice::getResultMetadata([[maybe_unused]] int frame_number, LOG(HAL, Error) << "Failed to construct result metadata"; } + if (resultMetadata->resized()) { + auto [entryCount, dataCount] = resultMetadata->usage(); + LOG(HAL, Info) + << "Result metadata resized: " << entryCount + << " entries and " << dataCount << " bytes used"; + } + return resultMetadata; } |