/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2019, Google Inc. * * control_serializer.cpp - Control (de)serializer */ #include "libcamera/internal/control_serializer.h" #include #include #include #include #include #include #include #include "libcamera/internal/byte_stream_buffer.h" #include "libcamera/internal/log.h" /** * \file control_serializer.h * \brief Serialization and deserialization helpers for controls */ namespace libcamera { LOG_DEFINE_CATEGORY(Serializer) /** * \class ControlSerializer * \brief Serializer and deserializer for control-related classes * * The control serializer is a helper to serialize and deserialize * ControlInfoMap and ControlValue instances for the purpose of communication * with IPA modules. * * Neither the ControlInfoMap nor the ControlList are self-contained data * container. ControlInfoMap references an external ControlId in each of its * entries, and ControlList references a ControlInfoMap for the purpose of * validation. Serializing and deserializing those objects thus requires a * context that maintains the associations between them. The control serializer * fulfils this task. * * ControlInfoMap instances can be serialized on their own, but require * ControlId instances to be provided at deserialization time. The serializer * recreates those ControlId instances and stores them in an internal cache, * from which the ControlInfoMap is populated. * * ControlList instances need to be associated with a ControlInfoMap when * deserialized. To make this possible, the control lists are serialized with a * handle to their ControlInfoMap, and the map is looked up from the handle at * deserialization time. To make this possible, the serializer assigns a * numerical handle to ControlInfoMap instances when they are serialized, and * stores the mapping between handle and ControlInfoMap both when serializing * (for the pipeline handler side) and deserializing (for the IPA side) them. * This mapping is used when serializing a ControlList to include the * corresponding ControlInfoMap handle in the binary data, and when * deserializing to retrieve the corresponding ControlInfoMap. * * In order to perform those tasks, the serializer keeps an internal state that * needs to be properly populated. This mechanism requires the ControlInfoMap * corresponding to a ControlList to have been serialized or deserialized * before the ControlList is serialized or deserialized. Failure to comply with * that constraint results in serialization or deserialization failure of the * ControlList. * * The serializer can be reset() to clear its internal state. This may be * performed when reconfiguring an IPA to avoid constant growth of the internal * state, especially if the contents of the ControlInfoMap instances change at * that time. A reset of the serializer invalidates all ControlList and * ControlInfoMap that have been previously deserialized. The caller shall thus * proceed with care to avoid stale references. */ ControlSerializer::ControlSerializer() : serial_(0) { } /** * \brief Reset the serializer * * Reset the internal state of the serializer. This invalidates all the * ControlList and ControlInfoMap that have been previously deserialized. */ void ControlSerializer::reset() { serial_ = 0; infoMapHandles_.clear(); infoMaps_.clear(); controlIds_.clear(); } size_t ControlSerializer::binarySize(const ControlValue &value) { return value.data().size_bytes(); } size_t ControlSerializer::binarySize(const ControlInfo &info) { return binarySize(info.min()) + binarySize(info.max()); } /** * \brief Retrieve the size in bytes required to serialize a ControlInfoMap * \param[in] infoMap The control info map * * Compute and return the size in bytes required to store the serialized * ControlInfoMap. * * \return The size in bytes required to store the serialized ControlInfoMap */ size_t ControlSerializer::binarySize(const ControlInfoMap &infoMap) { size_t size = sizeof(struct ipa_controls_header) + infoMap.size() * sizeof(struct ipa_control_info_entry); for (const auto &ctrl : infoMap) size += binarySize(ctrl.second); return size; } /** * \brief Retrieve the size in bytes required to serialize a ControlList * \param[in] list The control list * * Compute and return the size in bytes required to store the serialized * ControlList. * * \return The size in bytes required to store the serialized ControlList */ size_t ControlSerializer::binarySize(const ControlList &list) { size_t size = sizeof(struct ipa_controls_header) + list.size() * sizeof(struct ipa_control_value_entry); for (const auto &ctrl : list) size += binarySize(ctrl.second); return size; } void ControlSerializer::store(const ControlValue &value, ByteStreamBuffer &buffer) { buffer.write(value.data()); } void ControlSerializer::store(const ControlInfo &info, ByteStreamBuffer &buffer) { store(info.min(), buffer); store(info.max(), buffer); } /** * \brief Serialize a ControlInfoMap in a buffer * \param[in] infoMap The control info map to serialize * \param[in] buffer The memory buffer where to serialize the ControlInfoMap * * Serialize the \a infoMap into the \a buffer using the serialization format * defined by the IPA context interface in ipa_controls.h. * * The serializer stores a reference to the \a infoMap internally. The caller * shall ensure that \a infoMap stays valid until the serializer is reset(). * * \return 0 on success, a negative error code otherwise * \retval -ENOSPC Not enough space is available in the buffer */ int ControlSerializer::serialize(const ControlInfoMap &infoMap, ByteStreamBuffer &buffer) { /* Compute entries and data required sizes. */ size_t entriesSize = infoMap.size() * sizeof(struct ipa_control_info_entry); size_t valuesSize = 0; for (const auto &ctrl : infoMap) valuesSize += binarySize(ctrl.second); /* Prepare the packet header, assign a handle to the ControlInfoMap. */ struct ipa_controls_header hdr; hdr.version = IPA_CONTROLS_FORMAT_VERSION; hdr.handle = ++serial_; hdr.entries = infoMap.size(); hdr.size = sizeof(hdr) + entriesSize + valuesSize; hdr.data_offset = sizeof(hdr) + entriesSize; buffer.write(&hdr); /* * Serialize all entries. * \todo Serialize the control name too */ ByteStreamBuffer entries = buffer.carveOut(entriesSize); ByteStreamBuffer values = buffer.carveOut(valuesSize); for (const auto &ctrl : infoMap) { const ControlId *id = ctrl.first; const ControlInfo &info = ctrl.second; struct ipa_control_info_entry entry; entry.id = id->id(); entry.type = id->type(); entry.offset = values.offset(); entries.write(&entry); store(info, values); } if (buffer.overflow()) return -ENOSPC; /* * Store the map to handle association, to be used to serialize and * deserialize control lists. */ infoMapHandles_[&infoMap] = hdr.handle; return 0; } /** * \brief Serialize a ControlList in a buffer * \param[in] list The control list to serialize * \param[in] buffer The memory buffer where to serialize the ControlList * * Serialize the \a list into the \a buffer using the serialization format * defined by the IPA context interface in ipa_controls.h. * * \return 0 on success, a negative error code otherwise * \retval -ENOENT The ControlList is related to an unknown ControlInfoMap * \retval -ENOSPC Not enough space is available in the buffer */ int ControlSerializer::serialize(const ControlList &list, ByteStreamBuffer &buffer) { /* * Find the ControlInfoMap handle for the ControlList if it has one, or * use 0 for ControlList without a ControlInfoMap. */ unsigned int infoMapHandle; if (list.infoMap()) { auto iter = infoMapHandles_.find(list.infoMap()); if (iter == infoMapHandles_.end()) { LOG(Serializer, Error) << "Can't serialize ControlList: unknown ControlInfoMap"; return -ENOENT; } infoMapHandle = iter->second; } else { infoMapHandle = 0; } size_t entriesSize = list.size() * sizeof(struct ipa_control_value_entry); size_t valuesSize = 0; for (const auto &ctrl : list) valuesSize += binarySize(ctrl.second); /* Prepare the packet header. */ struct ipa_controls_header hdr; hdr.version = IPA_CONTROLS_FORMAT_VERSION; hdr.handle = infoMapHandle; hdr.entries = list.size(); hdr.size = sizeof(hdr) + entriesSize + valuesSize; hdr.data_offset = sizeof(hdr) + entriesSize; buffer.write(&hdr); ByteStreamBuffer entries = buffer.carveOut(entriesSize); ByteStreamBuffer values = buffer.carveOut(valuesSize); /* Serialize all entries. */ for (const auto &ctrl : list) { unsigned int id = ctrl.first; const ControlValue &value = ctrl.second; struct ipa_control_value_entry entry; entry.id = id; entry.type = value.type(); entry.is_array = value.isArray(); entry.count = value.numElements(); entry.offset = values.offset(); entries.write(&entry); store(value, values); } if (buffer.overflow()) return -ENOSPC; return 0; } ControlValue ControlSerializer::loadControlValue(ControlType type, ByteStreamBuffer &buffer, bool isArray, unsigned int count) { ControlValue value; value.reserve(type, isArray, count); buffer.read(value.data()); return value; } ControlInfo ControlSerializer::loadControlInfo(ControlType type, ByteStreamBuffer &b) { if (type == ControlTypeString) type = ControlTypeInteger32; ControlValue min = loadControlValue(type, b); ControlValue max = loadControlValue(type, b); return ControlInfo(min, max); } /** * \fn template T ControlSerializer::deserialize(ByteStreamBuffer &buffer) * \brief Deserialize an object from a binary buffer * \param[in] buffer The memory buffer that contains the object * * This method is only valid when specialized for ControlInfoMap or * ControlList. Any other typename \a T is not supported. */ /** * \brief Deserialize a ControlInfoMap from a binary buffer * \param[in] buffer The memory buffer that contains the serialized map * * Re-construct a ControlInfoMap from a binary \a buffer containing data * serialized using the serialize() method. * * \return The deserialized ControlInfoMap */ template<> ControlInfoMap ControlSerializer::deserialize(ByteStreamBuffer &buffer) { const struct ipa_controls_header *hdr = buffer.read(); if (!hdr) { LOG(Serializer, Error) << "Out of data"; return {}; } if (hdr->version != IPA_CONTROLS_FORMAT_VERSION) { LOG(Serializer, Error) << "Unsupported controls format version " << hdr->version; return {}; } ByteStreamBuffer entries = buffer.carveOut(hdr->data_offset - sizeof(*hdr)); ByteStreamBuffer values = buffer.carveOut(hdr->size - hdr->data_offset); if (buffer.overflow()) { LOG(Serializer, Error) << "Out of data"; return {}; } ControlInfoMap::Map ctrls; for (unsigned int i = 0; i < hdr->entries; ++i) { const struct ipa_control_info_entry *entry = entries.read(); if (!entry) { LOG(Serializer, Error) << "Out of data"; return {}; } /* Create and cache the individual ControlId. */ ControlType type = static_cast(entry->type); /** * \todo Find a way to preserve the control name for debugging * purpose. */ controlIds_.emplace_back(std::make_unique(entry->id, "", type)); if (entry->offset != values.offset()) { LOG(Serializer, Error) << "Bad data, entry offset mismatch (entry " << i << ")"; return {}; } /* Create and store the ControlInfo. */ ctrls.emplace(controlIds_.back().get(), loadControlInfo(type, values)); } /* * Create the ControlInfoMap in the cache, and store the map to handle * association. */ ControlInfoMap &map = infoMaps_[hdr->handle] = std::move(ctrls); infoMapHandles_[&map] = hdr->handle; return map; } /** * \brief Deserialize a ControlList from a binary buffer * \param[in] buffer The memory buffer that contains the serialized list * * Re-construct a ControlList from a binary \a buffer containing data * serialized using the serialize() method. * * \return The deserialized ControlList */ template<> ControlList ControlSerializer::deserialize(ByteStreamBuffer &buffer) { const struct ipa_controls_header *hdr = buffer.read(); if (!hdr) { LOG(Serializer, Error) << "Out of data"; return {}; } if (hdr->version != IPA_CONTROLS_FORMAT_VERSION) { LOG(Serializer, Error) << "Unsupported controls format version " << hdr->version; return {}; } ByteStreamBuffer entries = buffer.carveOut(hdr->data_offset - sizeof(*hdr)); ByteStreamBuffer values = buffer.carveOut(hdr->size - hdr->data_offset); if (buffer.overflow()) { LOG(Serializer, Error) << "Out of data"; return {}; } /* * Retrieve the ControlInfoMap associated with the ControlList based on * its ID. The mapping between infoMap and ID is set up when serializing * or deserializing ControlInfoMap. If no mapping is found (which is * currently the case for ControlList related to libcamera controls), * use the global control::control idmap. */ const ControlInfoMap *infoMap; if (hdr->handle) { auto iter = std::find_if(infoMapHandles_.begin(), infoMapHandles_.end(), [&](decltype(infoMapHandles_)::value_type &entry) { return entry.second == hdr->handle; }); if (iter == infoMapHandles_.end()) { LOG(Serializer, Error) << "Can't deserialize ControlList: unknown ControlInfoMap"; return {}; } infoMap = iter->first; } else { infoMap = nullptr; } ControlList ctrls(infoMap ? infoMap->idmap() : controls::controls); for (unsigned int i = 0; i < hdr->entries; ++i) { const struct ipa_control_value_entry *entry = entries.read(); if (!entry) { LOG(Serializer, Error) << "Out of data"; return {}; } if (entry->offset != values.offset()) { LOG(Serializer, Error) << "Bad data, entry offset mismatch (entry " << i << ")"; return {}; } ControlType type = static_cast(entry->type); ctrls.set(entry->id, loadControlValue(type, values, entry->is_array, entry->count)); } return ctrls; } } /* namespace libcamera */ #n282'>282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
 * Copyright (C) 2019, Google Inc.
 *
 * camera_device.cpp - libcamera Android Camera Device
 */

#include "camera_device.h"
#include "camera_ops.h"

#include <libcamera/controls.h>
#include <libcamera/property_ids.h>

#include "log.h"
#include "utils.h"

#include "camera_metadata.h"

using namespace libcamera;

LOG_DECLARE_CATEGORY(HAL);

/*
 * \struct Camera3RequestDescriptor
 *
 * A utility structure that groups information about a capture request to be
 * later re-used at request complete time to notify the framework.
 */

CameraDevice::Camera3RequestDescriptor::Camera3RequestDescriptor(
		unsigned int frameNumber, unsigned int numBuffers)
	: frameNumber(frameNumber), numBuffers(numBuffers)
{
	buffers = new camera3_stream_buffer_t[numBuffers];
}

CameraDevice::Camera3RequestDescriptor::~Camera3RequestDescriptor()
{
	delete[] buffers;
}

/*
 * \class CameraDevice
 *
 * The CameraDevice class wraps a libcamera::Camera instance, and implements
 * the camera3_device_t interface, bridging calls received from the Android
 * camera service to the CameraDevice.
 *
 * The class translates parameters and operations from the Camera HALv3 API to
 * the libcamera API to provide static information for a Camera, create request
 * templates for it, process capture requests and then deliver capture results
 * back to the framework using the designated callbacks.
 */

CameraDevice::CameraDevice(unsigned int id, const std::shared_ptr<Camera> &camera)
	: running_(false), camera_(camera), staticMetadata_(nullptr)
{
	camera_->requestCompleted.connect(this, &CameraDevice::requestComplete);
}

CameraDevice::~CameraDevice()
{
	if (staticMetadata_)
		delete staticMetadata_;

	for (auto &it : requestTemplates_)
		delete it.second;
}

int CameraDevice::open(const hw_module_t *hardwareModule)
{
	int ret = camera_->acquire();
	if (ret) {
		LOG(HAL, Error) << "Failed to acquire the camera";
		return ret;
	}

	/* Initialize the hw_device_t in the instance camera3_module_t. */
	camera3Device_.common.tag = HARDWARE_DEVICE_TAG;
	camera3Device_.common.version = CAMERA_DEVICE_API_VERSION_3_3;
	camera3Device_.common.module = (hw_module_t *)hardwareModule;
	camera3Device_.common.close = hal_dev_close;

	/*
	 * The camera device operations. These actually implement
	 * the Android Camera HALv3 interface.
	 */
	camera3Device_.ops = &hal_dev_ops;
	camera3Device_.priv = this;

	return 0;
}

void CameraDevice::close()
{
	camera_->stop();
	camera_->release();

	running_ = false;
}

void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks)
{
	callbacks_ = callbacks;
}

/*
 * Return static information for the camera.
 */
const camera_metadata_t *CameraDevice::getStaticMetadata()
{
	if (staticMetadata_)
		return staticMetadata_->get();

	const ControlList &properties = camera_->properties();

	/*
	 * The here reported metadata are enough to implement a basic capture
	 * example application, but a real camera implementation will require
	 * more.
	 */

	/*
	 * \todo Keep this in sync with the actual number of entries.
	 * Currently: 50 entries, 666 bytes
	 */
	staticMetadata_ = new CameraMetadata(50, 700);
	if (!staticMetadata_->isValid()) {
		LOG(HAL, Error) << "Failed to allocate static metadata";
		delete staticMetadata_;
		staticMetadata_ = nullptr;
		return nullptr;
	}

	/* Color correction static metadata. */
	std::vector<uint8_t> aberrationModes = {
		ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
	};
	staticMetadata_->addEntry(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
				  aberrationModes.data(),
				  aberrationModes.size());

	/* Control static metadata. */
	std::vector<uint8_t> aeAvailableAntiBandingModes = {
		ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
		ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,
		ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,
		ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
	};
	staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
				  aeAvailableAntiBandingModes.data(),
				  aeAvailableAntiBandingModes.size());

	std::vector<uint8_t> aeAvailableModes = {
		ANDROID_CONTROL_AE_MODE_ON,
	};
	staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_MODES,
				  aeAvailableModes.data(),
				  aeAvailableModes.size());

	std::vector<int32_t> availableAeFpsTarget = {
		15, 30,
	};
	staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
				  availableAeFpsTarget.data(),
				  availableAeFpsTarget.size());

	std::vector<int32_t> aeCompensationRange = {
		0, 0,
	};
	staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
				  aeCompensationRange.data(),
				  aeCompensationRange.size());

	const camera_metadata_rational_t aeCompensationStep[] = {
		{ 0, 1 }
	};
	staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_STEP,
				  aeCompensationStep, 1);

	std::vector<uint8_t> availableAfModes = {
		ANDROID_CONTROL_AF_MODE_OFF,
	};
	staticMetadata_->addEntry(ANDROID_CONTROL_AF_AVAILABLE_MODES,
				  availableAfModes.data(),
				  availableAfModes.size());

	std::vector<uint8_t> availableEffects = {
		ANDROID_CONTROL_EFFECT_MODE_OFF,
	};
	staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_EFFECTS,
				  availableEffects.data(),
				  availableEffects.size());

	std::vector<uint8_t> availableSceneModes = {
		ANDROID_CONTROL_SCENE_MODE_DISABLED,
	};
	staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
				  availableSceneModes.data(),
				  availableSceneModes.size());

	std::vector<uint8_t> availableStabilizationModes = {
		ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
	};
	staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
				  availableStabilizationModes.data(),
				  availableStabilizationModes.size());

	std::vector<uint8_t> availableAwbModes = {
		ANDROID_CONTROL_AWB_MODE_OFF,
	};
	staticMetadata_->addEntry(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
				  availableAwbModes.data(),
				  availableAwbModes.size());

	std::vector<int32_t> availableMaxRegions = {
		0, 0, 0,
	};
	staticMetadata_->addEntry(ANDROID_CONTROL_MAX_REGIONS,
				  availableMaxRegions.data(),
				  availableMaxRegions.size());

	std::vector<uint8_t> sceneModesOverride = {
		ANDROID_CONTROL_AE_MODE_ON,
		ANDROID_CONTROL_AWB_MODE_AUTO,
		ANDROID_CONTROL_AF_MODE_AUTO,
	};
	staticMetadata_->addEntry(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
				  sceneModesOverride.data(),
				  sceneModesOverride.size());

	uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
	staticMetadata_->addEntry(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
				  &aeLockAvailable, 1);

	uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
	staticMetadata_->addEntry(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
				  &awbLockAvailable, 1);

	char availableControlModes = ANDROID_CONTROL_MODE_AUTO;
	staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_MODES,
				  &availableControlModes, 1);

	/* JPEG static metadata. */
	std::vector<int32_t> availableThumbnailSizes = {
		0, 0,
	};
	staticMetadata_->addEntry(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
				  availableThumbnailSizes.data(),
				  availableThumbnailSizes.size());

	/* Sensor static metadata. */
	int32_t pixelArraySize[] = {
		2592, 1944,
	};
	staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
				  &pixelArraySize, 2);

	int32_t sensorSizes[] = {
		0, 0, 2560, 1920,
	};
	staticMetadata_->addEntry(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
				  &sensorSizes, 4);

	int32_t sensitivityRange[] = {
		32, 2400,
	};
	staticMetadata_->addEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
				  &sensitivityRange, 2);

	uint16_t filterArr = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG;
	staticMetadata_->addEntry(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
				  &filterArr, 1);

	int64_t exposureTimeRange[] = {
		100000, 200000000,
	};
	staticMetadata_->addEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
				  &exposureTimeRange, 2);

	/*
	 * The Android orientation metadata and libcamera rotation property are
	 * defined differently but have identical numerical values for Android
	 * devices such as phones and tablets.
	 */
	int32_t orientation = 0;
	if (properties.contains(properties::Rotation))
		orientation = properties.get(properties::Rotation);
	staticMetadata_->addEntry(ANDROID_SENSOR_ORIENTATION, &orientation, 1);

	std::vector<int32_t> testPatterModes = {
		ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,
	};
	staticMetadata_->addEntry(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
				  testPatterModes.data(),
				  testPatterModes.size());

	std::vector<float> physicalSize = {
		2592, 1944,
	};
	staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
				  physicalSize.data(),
				  physicalSize.size());

	uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
	staticMetadata_->addEntry(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
				  &timestampSource, 1);

	/* Statistics static metadata. */
	uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
	staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
				  &faceDetectMode, 1);

	int32_t maxFaceCount = 0;
	staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
				  &maxFaceCount, 1);

	/* Sync static metadata. */
	int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
	staticMetadata_->addEntry(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);

	/* Flash static metadata. */
	char flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
	staticMetadata_->addEntry(ANDROID_FLASH_INFO_AVAILABLE,
				  &flashAvailable, 1);

	/* Lens static metadata. */
	std::vector<float> lensApertures = {
		2.53 / 100,
	};
	staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
				  lensApertures.data(),
				  lensApertures.size());

	uint8_t lensFacing = ANDROID_LENS_FACING_FRONT;
	if (properties.contains(properties::Location)) {
		int32_t location = properties.get(properties::Location);
		switch (location) {
		case properties::CameraLocationFront:
			lensFacing = ANDROID_LENS_FACING_FRONT;
			break;
		case properties::CameraLocationBack:
			lensFacing = ANDROID_LENS_FACING_BACK;
			break;
		case properties::CameraLocationExternal:
			lensFacing = ANDROID_LENS_FACING_EXTERNAL;
			break;
		}
	}
	staticMetadata_->addEntry(ANDROID_LENS_FACING, &lensFacing, 1);

	std::vector<float> lensFocalLenghts = {
		1,
	};
	staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
				  lensFocalLenghts.data(),
				  lensFocalLenghts.size());

	std::vector<uint8_t> opticalStabilizations = {
		ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,
	};
	staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
				  opticalStabilizations.data(),
				  opticalStabilizations.size());

	float hypeFocalDistance = 0;
	staticMetadata_->addEntry(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
				  &hypeFocalDistance, 1);

	float minFocusDistance = 0;
	staticMetadata_->addEntry(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
				  &minFocusDistance, 1);

	/* Noise reduction modes. */
	uint8_t noiseReductionModes = ANDROID_NOISE_REDUCTION_MODE_OFF;
	staticMetadata_->addEntry(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
				  &noiseReductionModes, 1);

	/* Scaler static metadata. */
	float maxDigitalZoom = 1;
	staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
				  &maxDigitalZoom, 1);

	std::vector<uint32_t> availableStreamFormats = {
		ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
		ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
		ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED,
	};
	staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_FORMATS,
				  availableStreamFormats.data(),
				  availableStreamFormats.size());

	std::vector<uint32_t> availableStreamConfigurations = {
		ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920,
		ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
		ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888, 2560, 1920,
		ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
		ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED, 2560, 1920,
		ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
	};
	staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
				  availableStreamConfigurations.data(),
				  availableStreamConfigurations.size());

	std::vector<int64_t> availableStallDurations = {
		ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920, 33333333,
	};
	staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
				  availableStallDurations.data(),
				  availableStallDurations.size());

	std::vector<int64_t> minFrameDurations = {
		ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920, 33333333,
		ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED, 2560, 1920, 33333333,
		ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888, 2560, 1920, 33333333,
	};
	staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
				  minFrameDurations.data(),
				  minFrameDurations.size());

	uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
	staticMetadata_->addEntry(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);

	/* Info static metadata. */
	uint8_t supportedHWLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
	staticMetadata_->addEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
				  &supportedHWLevel, 1);

	/* Request static metadata. */
	int32_t partialResultCount = 1;
	staticMetadata_->addEntry(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
				  &partialResultCount, 1);

	uint8_t maxPipelineDepth = 2;
	staticMetadata_->addEntry(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
				  &maxPipelineDepth, 1);

	std::vector<uint8_t> availableCapabilities = {
		ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
	};
	staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
				  availableCapabilities.data(),
				  availableCapabilities.size());

	std::vector<int32_t> availableCharacteristicsKeys = {
		ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
		ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
		ANDROID_CONTROL_AE_AVAILABLE_MODES,
		ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
		ANDROID_CONTROL_AE_COMPENSATION_RANGE,
		ANDROID_CONTROL_AE_COMPENSATION_STEP,
		ANDROID_CONTROL_AF_AVAILABLE_MODES,
		ANDROID_CONTROL_AVAILABLE_EFFECTS,
		ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
		ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
		ANDROID_CONTROL_AWB_AVAILABLE_MODES,
		ANDROID_CONTROL_MAX_REGIONS,
		ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
		ANDROID_CONTROL_AE_LOCK_AVAILABLE,
		ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
		ANDROID_CONTROL_AVAILABLE_MODES,
		ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
		ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
		ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
		ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
		ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
		ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
		ANDROID_SENSOR_ORIENTATION,
		ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
		ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
		ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
		ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
		ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
		ANDROID_SYNC_MAX_LATENCY,
		ANDROID_FLASH_INFO_AVAILABLE,
		ANDROID_LENS_INFO_AVAILABLE_APERTURES,
		ANDROID_LENS_FACING,
		ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
		ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
		ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
		ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
		ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
		ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
		ANDROID_SCALER_AVAILABLE_FORMATS,
		ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
		ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
		ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
		ANDROID_SCALER_CROPPING_TYPE,
		ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
		ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
		ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
		ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
	};
	staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
				  availableCharacteristicsKeys.data(),
				  availableCharacteristicsKeys.size());

	std::vector<int32_t> availableRequestKeys = {
		ANDROID_CONTROL_AE_MODE,
		ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
		ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
		ANDROID_CONTROL_AE_LOCK,
		ANDROID_CONTROL_AF_TRIGGER,
		ANDROID_CONTROL_AWB_MODE,
		ANDROID_CONTROL_AWB_LOCK,
		ANDROID_FLASH_MODE,
		ANDROID_STATISTICS_FACE_DETECT_MODE,
		ANDROID_NOISE_REDUCTION_MODE,
		ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
		ANDROID_CONTROL_CAPTURE_INTENT,
	};
	staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
				  availableRequestKeys.data(),
				  availableRequestKeys.size());

	std::vector<int32_t> availableResultKeys = {
		ANDROID_CONTROL_AE_STATE,
		ANDROID_CONTROL_AE_LOCK,
		ANDROID_CONTROL_AF_STATE,
		ANDROID_CONTROL_AWB_STATE,
		ANDROID_CONTROL_AWB_LOCK,
		ANDROID_LENS_STATE,
		ANDROID_SCALER_CROP_REGION,
		ANDROID_SENSOR_TIMESTAMP,
		ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
		ANDROID_SENSOR_EXPOSURE_TIME,
		ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
		ANDROID_STATISTICS_SCENE_FLICKER,
	};
	staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
				  availableResultKeys.data(),
				  availableResultKeys.size());

	if (!staticMetadata_->isValid()) {
		LOG(HAL, Error) << "Failed to construct static metadata";
		delete staticMetadata_;
		staticMetadata_ = nullptr;
		return nullptr;
	}

	return staticMetadata_->get();
}

/*
 * Produce a metadata pack to be used as template for a capture request.
 */
const camera_metadata_t *CameraDevice::constructDefaultRequestSettings(int type)
{
	auto it = requestTemplates_.find(type);
	if (it != requestTemplates_.end())
		return it->second->get();

	/* Use the capture intent matching the requested template type. */
	uint8_t captureIntent;
	switch (type) {
	case CAMERA3_TEMPLATE_PREVIEW:
		captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
		break;
	case CAMERA3_TEMPLATE_STILL_CAPTURE:
		captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
		break;
	case CAMERA3_TEMPLATE_VIDEO_RECORD:
		captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
		break;
	case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
		captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
		break;
	case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
		captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
		break;
	case CAMERA3_TEMPLATE_MANUAL:
		captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
		break;
	default:
		LOG(HAL, Error) << "Invalid template request type: " << type;
		return nullptr;
	}

	/*
	 * \todo Keep this in sync with the actual number of entries.
	 * Currently: 12 entries, 15 bytes
	 */
	CameraMetadata *requestTemplate = new CameraMetadata(15, 20);
	if (!requestTemplate->isValid()) {
		LOG(HAL, Error) << "Failed to allocate template metadata";
		delete requestTemplate;
		return nullptr;
	}

	uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
	requestTemplate->addEntry(ANDROID_CONTROL_AE_MODE,
				  &aeMode, 1);

	int32_t aeExposureCompensation = 0;
	requestTemplate->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
				  &aeExposureCompensation, 1);

	uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
	requestTemplate->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
				  &aePrecaptureTrigger, 1);

	uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
	requestTemplate->addEntry(ANDROID_CONTROL_AE_LOCK,
				  &aeLock, 1);

	uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
	requestTemplate->addEntry(ANDROID_CONTROL_AF_TRIGGER,
				  &afTrigger, 1);

	uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
	requestTemplate->addEntry(ANDROID_CONTROL_AWB_MODE,
				  &awbMode, 1);

	uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
	requestTemplate->addEntry(ANDROID_CONTROL_AWB_LOCK,
				  &awbLock, 1);

	uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
	requestTemplate->addEntry(ANDROID_FLASH_MODE,
				  &flashMode, 1);

	uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
	requestTemplate->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE,
				  &faceDetectMode, 1);

	uint8_t noiseReduction = ANDROID_NOISE_REDUCTION_MODE_OFF;
	requestTemplate->addEntry(ANDROID_NOISE_REDUCTION_MODE,
				  &noiseReduction, 1);

	uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
	requestTemplate->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
				  &aberrationMode, 1);

	requestTemplate->addEntry(ANDROID_CONTROL_CAPTURE_INTENT,
				  &captureIntent, 1);

	if (!requestTemplate->isValid()) {
		LOG(HAL, Error) << "Failed to construct request template";
		delete requestTemplate;
		return nullptr;
	}

	requestTemplates_[type] = requestTemplate;
	return requestTemplate->get();
}

/*
 * Inspect the stream_list to produce a list of StreamConfiguration to
 * be use to configure the Camera.
 */
int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
{
	for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
		camera3_stream_t *stream = stream_list->streams[i];

		LOG(HAL, Info) << "Stream #" << i
			       << ", direction: " << stream->stream_type
			       << ", width: " << stream->width
			       << ", height: " << stream->height
			       << ", format: " << utils::hex(stream->format);
	}

	/* Hardcode viewfinder role, collecting sizes from the stream config. */
	if (stream_list->num_streams != 1) {
		LOG(HAL, Error) << "Only one stream supported";
		return -EINVAL;
	}

	StreamRoles roles = { StreamRole::Viewfinder };
	config_ = camera_->generateConfiguration(roles);
	if (!config_ || config_->empty()) {
		LOG(HAL, Error) << "Failed to generate camera configuration";
		return -EINVAL;
	}

	/* Only one stream is supported. */
	camera3_stream_t *camera3Stream = stream_list->streams[0];
	StreamConfiguration *streamConfiguration = &config_->at(0);
	streamConfiguration->size.width = camera3Stream->width;
	streamConfiguration->size.height = camera3Stream->height;

	/*
	 * \todo We'll need to translate from Android defined pixel format codes
	 * to the libcamera image format codes. For now, do not change the
	 * format returned from Camera::generateConfiguration().
	 */

	switch (config_->validate()) {
	case CameraConfiguration::Valid:
		break;
	case CameraConfiguration::Adjusted:
		LOG(HAL, Info) << "Camera configuration adjusted";
		config_.reset();
		return -EINVAL;
	case CameraConfiguration::Invalid:
		LOG(HAL, Info) << "Camera configuration invalid";
		config_.reset();
		return -EINVAL;
	}

	camera3Stream->max_buffers = streamConfiguration->bufferCount;

	/*
	 * Once the CameraConfiguration has been adjusted/validated
	 * it can be applied to the camera.
	 */
	int ret = camera_->configure(config_.get());
	if (ret) {
		LOG(HAL, Error) << "Failed to configure camera '"
				<< camera_->name() << "'";
		return ret;
	}

	return 0;
}

int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Request)
{
	StreamConfiguration *streamConfiguration = &config_->at(0);
	Stream *stream = streamConfiguration->stream();

	if (camera3Request->num_output_buffers != 1) {
		LOG(HAL, Error) << "Invalid number of output buffers: "
				<< camera3Request->num_output_buffers;
		return -EINVAL;
	}

	/* Start the camera if that's the first request we handle. */
	if (!running_) {
		int ret = camera_->start();
		if (ret) {
			LOG(HAL, Error) << "Failed to start camera";
			return ret;
		}

		running_ = true;
	}

	/*
	 * Queue a request for the Camera with the provided dmabuf file
	 * descriptors.
	 */
	const camera3_stream_buffer_t *camera3Buffers =
					camera3Request->output_buffers;

	/*
	 * Save the request descriptors for use at completion time.
	 * The descriptor and the associated memory reserved here are freed
	 * at request complete time.
	 */
	Camera3RequestDescriptor *descriptor =
		new Camera3RequestDescriptor(camera3Request->frame_number,
					     camera3Request->num_output_buffers);
	for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
		/*
		 * Keep track of which stream the request belongs to and store
		 * the native buffer handles.
		 *
		 * \todo Currently we only support one capture buffer. Copy
		 * all of them to be ready once we'll support more.
		 */
		descriptor->buffers[i].stream = camera3Buffers[i].stream;
		descriptor->buffers[i].buffer = camera3Buffers[i].buffer;
	}

	/*
	 * Create a libcamera buffer using the dmabuf descriptors of the first
	 * and (currently) only supported request buffer.
	 */
	const buffer_handle_t camera3Handle = *camera3Buffers[0].buffer;

	std::vector<FrameBuffer::Plane> planes;
	for (int i = 0; i < 3; i++) {
		FrameBuffer::Plane plane;
		plane.fd = FileDescriptor(camera3Handle->data[i]);
		/*
		 * Setting length to zero here is OK as the length is only used
		 * to map the memory of the plane. Libcamera do not need to poke
		 * at the memory content queued by the HAL.
		 */
		plane.length = 0;
		planes.push_back(std::move(plane));
	}

	FrameBuffer *buffer = new FrameBuffer(std::move(planes));
	if (!buffer) {
		LOG(HAL, Error) << "Failed to create buffer";
		delete descriptor;
		return -ENOMEM;
	}

	Request *request =
		camera_->createRequest(reinterpret_cast<uint64_t>(descriptor));
	request->addBuffer(stream, buffer);

	int ret = camera_->queueRequest(request);
	if (ret) {
		LOG(HAL, Error) << "Failed to queue request";
		delete request;
		delete descriptor;
		return ret;
	}

	return 0;
}

void CameraDevice::requestComplete(Request *request)
{
	const std::map<Stream *, FrameBuffer *> &buffers = request->buffers();
	FrameBuffer *buffer = buffers.begin()->second;
	camera3_buffer_status status = CAMERA3_BUFFER_STATUS_OK;
	std::unique_ptr<CameraMetadata> resultMetadata;

	if (request->status() != Request::RequestComplete) {
		LOG(HAL, Error) << "Request not succesfully completed: "
				<< request->status();
		status = CAMERA3_BUFFER_STATUS_ERROR;
	}

	/* Prepare to call back the Android camera stack. */
	Camera3RequestDescriptor *descriptor =
		reinterpret_cast<Camera3RequestDescriptor *>(request->cookie());

	camera3_capture_result_t captureResult = {};
	captureResult.frame_number = descriptor->frameNumber;
	captureResult.num_output_buffers = descriptor->numBuffers;
	for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
		/*
		 * \todo Currently we only support one capture buffer. Prepare
		 * all of them to be ready once we'll support more.
		 */
		descriptor->buffers[i].acquire_fence = -1;
		descriptor->buffers[i].release_fence = -1;
		descriptor->buffers[i].status = status;
	}
	captureResult.output_buffers =
		const_cast<const camera3_stream_buffer_t *>(descriptor->buffers);

	if (status == CAMERA3_BUFFER_STATUS_OK) {
		notifyShutter(descriptor->frameNumber,
			      buffer->metadata().timestamp);

		captureResult.partial_result = 1;
		resultMetadata = getResultMetadata(descriptor->frameNumber,
						   buffer->metadata().timestamp);
		captureResult.result = resultMetadata->get();
	}

	if (status == CAMERA3_BUFFER_STATUS_ERROR || !captureResult.result) {
		/* \todo Improve error handling. In case we notify an error
		 * because the metadata generation fails, a shutter event has
		 * already been notified for this frame number before the error
		 * is here signalled. Make sure the error path plays well with
		 * the camera stack state machine.
		 */
		notifyError(descriptor->frameNumber,
			    descriptor->buffers[0].stream);
	}

	callbacks_->process_capture_result(callbacks_, &captureResult);

	delete descriptor;
	delete buffer;
}

void CameraDevice::notifyShutter(uint32_t frameNumber, uint64_t timestamp)
{
	camera3_notify_msg_t notify = {};

	notify.type = CAMERA3_MSG_SHUTTER;
	notify.message.shutter.frame_number = frameNumber;
	notify.message.shutter.timestamp = timestamp;

	callbacks_->notify(callbacks_, &notify);
}

void CameraDevice::notifyError(uint32_t frameNumber, camera3_stream_t *stream)
{
	camera3_notify_msg_t notify = {};

	notify.type = CAMERA3_MSG_ERROR;
	notify.message.error.error_stream = stream;
	notify.message.error.frame_number = frameNumber;
	notify.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;

	callbacks_->notify(callbacks_, &notify);
}

/*
 * Produce a set of fixed result metadata.
 */
std::unique_ptr<CameraMetadata> CameraDevice::getResultMetadata(int frame_number,
								int64_t timestamp)
{
	/*
	 * \todo Keep this in sync with the actual number of entries.
	 * Currently: 12 entries, 36 bytes
	 */
	std::unique_ptr<CameraMetadata> resultMetadata =
		std::make_unique<CameraMetadata>(15, 50);
	if (!resultMetadata->isValid()) {
		LOG(HAL, Error) << "Failed to allocate static metadata";
		return nullptr;
	}

	const uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED;
	resultMetadata->addEntry(ANDROID_CONTROL_AE_STATE, &ae_state, 1);

	const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF;
	resultMetadata->addEntry(ANDROID_CONTROL_AE_LOCK, &ae_lock, 1);

	uint8_t af_state = ANDROID_CONTROL_AF_STATE_INACTIVE;
	resultMetadata->addEntry(ANDROID_CONTROL_AF_STATE, &af_state, 1);

	const uint8_t awb_state = ANDROID_CONTROL_AWB_STATE_CONVERGED;
	resultMetadata->addEntry(ANDROID_CONTROL_AWB_STATE, &awb_state, 1);

	const uint8_t awb_lock = ANDROID_CONTROL_AWB_LOCK_OFF;
	resultMetadata->addEntry(ANDROID_CONTROL_AWB_LOCK, &awb_lock, 1);

	const uint8_t lens_state = ANDROID_LENS_STATE_STATIONARY;
	resultMetadata->addEntry(ANDROID_LENS_STATE, &lens_state, 1);

	int32_t sensorSizes[] = {
		0, 0, 2560, 1920,
	};
	resultMetadata->addEntry(ANDROID_SCALER_CROP_REGION, sensorSizes, 4);

	resultMetadata->addEntry(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);

	/* 33.3 msec */
	const int64_t rolling_shutter_skew = 33300000;
	resultMetadata->addEntry(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
				 &rolling_shutter_skew, 1);

	/* 16.6 msec */
	const int64_t exposure_time = 16600000;
	resultMetadata->addEntry(ANDROID_SENSOR_EXPOSURE_TIME,
				 &exposure_time, 1);

	const uint8_t lens_shading_map_mode =
				ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
	resultMetadata->addEntry(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
				 &lens_shading_map_mode, 1);

	const uint8_t scene_flicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
	resultMetadata->addEntry(ANDROID_STATISTICS_SCENE_FLICKER,
				 &scene_flicker, 1);

	/*
	 * Return the result metadata pack even is not valid: get() will return
	 * nullptr.
	 */
	if (!resultMetadata->isValid()) {
		LOG(HAL, Error) << "Failed to construct result metadata";
	}

	return resultMetadata;
}