/* SPDX-License-Identifier: LGPL-2.1-or-later */ /* * Copyright (C) 2020, Collabora Ltd. * Author: Nicolas Dufresne * * gstlibcamera-utils.c - GStreamer libcamera Utility Function */ #include "gstlibcamera-utils.h" #include #include using namespace libcamera; static struct { GstVideoFormat gst_format; PixelFormat format; } format_map[] = { /* Compressed */ { GST_VIDEO_FORMAT_ENCODED, formats::MJPEG }, /* RGB16 */ { GST_VIDEO_FORMAT_RGB16, formats::RGB565 }, /* RGB24 */ { GST_VIDEO_FORMAT_RGB, formats::BGR888 }, { GST_VIDEO_FORMAT_BGR, formats::RGB888 }, /* RGB32 */ { GST_VIDEO_FORMAT_BGRx, formats::XRGB8888 }, { GST_VIDEO_FORMAT_RGBx, formats::XBGR8888 }, { GST_VIDEO_FORMAT_xBGR, formats::RGBX8888 }, { GST_VIDEO_FORMAT_xRGB, formats::BGRX8888 }, { GST_VIDEO_FORMAT_BGRA, formats::ARGB8888 }, { GST_VIDEO_FORMAT_RGBA, formats::ABGR8888 }, { GST_VIDEO_FORMAT_ABGR, formats::RGBA8888 }, { GST_VIDEO_FORMAT_ARGB, formats::BGRA8888 }, /* YUV Semiplanar */ { GST_VIDEO_FORMAT_NV12, formats::NV12 }, { GST_VIDEO_FORMAT_NV21, formats::NV21 }, { GST_VIDEO_FORMAT_NV16, formats::NV16 }, { GST_VIDEO_FORMAT_NV61, formats::NV61 }, { GST_VIDEO_FORMAT_NV24, formats::NV24 }, /* YUV Planar */ { GST_VIDEO_FORMAT_I420, formats::YUV420 }, { GST_VIDEO_FORMAT_YV12, formats::YVU420 }, { GST_VIDEO_FORMAT_Y42B, formats::YUV422 }, /* YUV Packed */ { GST_VIDEO_FORMAT_UYVY, formats::UYVY }, { GST_VIDEO_FORMAT_VYUY, formats::VYUY }, { GST_VIDEO_FORMAT_YUY2, formats::YUYV }, { GST_VIDEO_FORMAT_YVYU, formats::YVYU }, /* \todo NV42 is used in libcamera but is not mapped in GStreamer yet. */ }; static GstVideoColorimetry colorimetry_from_colorspace(const ColorSpace &colorSpace) { GstVideoColorimetry colorimetry; switch (colorSpace.primaries) { case ColorSpace::Primaries::Raw: colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN; break; case ColorSpace::Primaries::Smpte170m: colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M; break; case ColorSpace::Primaries::Rec709: colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT709; break; case ColorSpace::Primaries::Rec2020: colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020; break; } switch (colorSpace.transferFunction) { case ColorSpace::TransferFunction::Linear: colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA10; break; case ColorSpace::TransferFunction::Srgb: colorimetry.transfer = GST_VIDEO_TRANSFER_SRGB; break; case ColorSpace::TransferFunction::Rec709: colorimetry.transfer = GST_VIDEO_TRANSFER_BT709; break; } switch (colorSpace.ycbcrEncoding) { case ColorSpace::YcbcrEncoding::None: colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_RGB; break; case ColorSpace::YcbcrEncoding::Rec601: colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601; break; case ColorSpace::YcbcrEncoding::Rec709: colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT709; break; case ColorSpace::YcbcrEncoding::Rec2020: colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT2020; break; } switch (colorSpace.range) { case ColorSpace::Range::Full: colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255; break; case ColorSpace::Range::Limited: colorimetry.range = GST_VIDEO_COLOR_RANGE_16_235; break; } return colorimetry; } static std::optional colorspace_from_colorimetry(const GstVideoColorimetry &colorimetry) { std::optional colorspace = ColorSpace::Raw; switch (colorimetry.primaries) { case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN: /* Unknown primaries map to raw colorspace in gstreamer */ return ColorSpace::Raw; case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M: colorspace->primaries = ColorSpace::Primaries::Smpte170m; break; case GST_VIDEO_COLOR_PRIMARIES_BT709: colorspace->primaries = ColorSpace::Primaries::Rec709; break; case GST_VIDEO_COLOR_PRIMARIES_BT2020: colorspace->primaries = ColorSpace::Primaries::Rec2020; break; default: GST_WARNING("Colorimetry primaries %d not mapped in gstlibcamera", colorimetry.primaries); return std::nullopt; } switch (colorimetry.transfer) { /* Transfer function mappings inspired from v4l2src plugin */ case GST_VIDEO_TRANSFER_GAMMA18: case GST_VIDEO_TRANSFER_GAMMA20: case GST_VIDEO_TRANSFER_GAMMA22: case GST_VIDEO_TRANSFER_GAMMA28: GST_WARNING("GAMMA 18, 20, 22, 28 transfer functions not supported"); /* fallthrough */ case GST_VIDEO_TRANSFER_GAMMA10: colorspace->transferFunction = ColorSpace::TransferFunction::Linear; break; case GST_VIDEO_TRANSFER_SRGB: colorspace->transferFunction = ColorSpace::TransferFunction::Srgb; break; #if GST_CHECK_VERSION(1, 18, 0) case GST_VIDEO_TRANSFER_BT601: case GST_VIDEO_TRANSFER_BT2020_10: #endif case GST_VIDEO_TRANSFER_BT2020_12: case GST_VIDEO_TRANSFER_BT709: colorspace->transferFunction = ColorSpace::TransferFunction::Rec709; break; default: GST_WARNING("Colorimetry transfer function %d not mapped in gstlibcamera", colorimetry.transfer); return std::nullopt; } switch (colorimetry.matrix) { case GST_VIDEO_COLOR_MATRIX_RGB: colorspace->ycbcrEncoding = ColorSpace::YcbcrEncoding::None; break; /* FCC is about the same as BT601 with less digit */ case GST_VIDEO_COLOR_MATRIX_FCC: case GST_VIDEO_COLOR_MATRIX_BT601: colorspace->ycbcrEncoding = ColorSpace::YcbcrEncoding::Rec601; break; case GST_VIDEO_COLOR_MATRIX_BT709: colorspace->ycbcrEncoding = ColorSpace::YcbcrEncoding::Rec709; break; case GST_VIDEO_COLOR_MATRIX_BT2020: colorspace->ycbcrEncoding = ColorSpace::YcbcrEncoding::Rec2020; break; default: GST_WARNING("Colorimetry matrix %d not mapped in gstlibcamera", colorimetry.matrix); return std::nullopt; } switch (colorimetry.range) { case GST_VIDEO_COLOR_RANGE_0_255: colorspace->range = ColorSpace::Range::Full; break; case GST_VIDEO_COLOR_RANGE_16_235: colorspace->range = ColorSpace::Range::Limited; break; default: GST_WARNING("Colorimetry range %d not mapped in gstlibcamera", colorimetry.range); return std::nullopt; } return colorspace; } static GstVideoFormat pixel_format_to_gst_format(const PixelFormat &format) { for (const auto &item : format_map) { if (item.format == format) return item.gst_format; } return GST_VIDEO_FORMAT_UNKNOWN; } static PixelFormat gst_format_to_pixel_format(GstVideoFormat gst_format) { if (gst_format == GST_VIDEO_FORMAT_ENCODED) return PixelFormat{}; for (const auto &item : format_map) if (item.gst_format == gst_format) return item.format; return PixelFormat{}; } static GstStructure * bare_structure_from_format(const PixelFormat &format) { GstVideoFormat gst_format = pixel_format_to_gst_format(format); if (gst_format == GST_VIDEO_FORMAT_UNKNOWN) return nullptr; if (gst_format != GST_VIDEO_FORMAT_ENCODED) return gst_structure_new("video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string(gst_format), nullptr); switch (format) { case formats::MJPEG: return gst_structure_new_empty("image/jpeg"); default: return nullptr; } } GstCaps * gst_libcamera_stream_formats_to_caps(const StreamFormats &formats) { GstCaps *caps = gst_caps_new_empty(); for (PixelFormat pixelformat : formats.pixelformats()) { g_autoptr(GstStructure) bare_s = bare_structure_from_format(pixelformat); if (!bare_s) { GST_WARNING("Unsupported DRM format %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS(pixelformat)); continue; } for (const Size &size : formats.sizes(pixelformat)) { GstStructure *s = gst_structure_copy(bare_s); gst_structure_set(s, "width", G_TYPE_INT, size.width, "height", G_TYPE_INT, size.height, nullptr); gst_caps_append_structure(caps, s); } const SizeRange &range = formats.range(pixelformat); if (range.hStep && range.vStep) { GstStructure *s = gst_structure_copy(bare_s); GValue val = G_VALUE_INIT; g_value_init(&val, GST_TYPE_INT_RANGE); gst_value_set_int_range_step(&val, range.min.width, range.max.width, range.hStep); gst_structure_set_value(s, "width", &val); gst_value_set_int_range_step(&val, range.min.height, range.max.height, range.vStep); gst_structure_set_value(s, "height", &val); g_value_unset(&val); gst_caps_append_structure(caps, s); } } return caps; } GstCaps * gst_libcamera_stream_configuration_to_caps(const StreamConfiguration &stream_cfg) { GstCaps *caps = gst_caps_new_empty(); GstStructure *s = bare_structure_from_format(stream_cfg.pixelFormat); gst_structure_set(s, "width", G_TYPE_INT, stream_cfg.size.width, "height", G_TYPE_INT, stream_cfg.size.height, nullptr); if (stream_cfg.colorSpace) { GstVideoColorimetry colorimetry = colorimetry_from_colorspace(stream_cfg.colorSpace.value()); gchar *colorimetry_str = gst_video_colorimetry_to_string(&colorimetry); if (colorimetry_str) gst_structure_set(s, "colorimetry", G_TYPE_STRING, colorimetry_str, nullptr); else g_error("Got invalid colorimetry from ColorSpace: %s", ColorSpace::toString(stream_cfg.colorSpace).c_str()); } gst_caps_append_structure(caps, s); return caps; } void gst_libcamera_configure_stream_from_caps(StreamConfiguration &stream_cfg, GstCaps *caps) { GstVideoFormat gst_format = pixel_format_to_gst_format(stream_cfg.pixelFormat); guint i; gint best_fixed = -1, best_in_range = -1; GstStructure *s; /* * These are delta weight computed from: * ABS(width - stream_cfg.size.width) * ABS(height - stream_cfg.size.height) */ guint best_fixed_delta = G_MAXUINT; guint best_in_range_delta = G_MAXUINT; /* First fixate the caps using default configuration value. */ g_assert(gst_caps_is_writable(caps)); /* Lookup the structure for a close match to the stream_cfg.size */ for (i = 0; i < gst_caps_get_size(caps); i++) { s = gst_caps_get_structure(caps, i); gint width, height; guint delta; if (gst_structure_has_field_typed(s, "width", G_TYPE_INT) && gst_structure_has_field_typed(s, "height", G_TYPE_INT)) { gst_structure_get_int(s, "width", &width); gst_structure_get_int(s, "height", &height); delta = ABS(width - (gint)stream_cfg.size.width) * ABS(height - (gint)stream_cfg.size.height); if (delta < best_fixed_delta) { best_fixed_delta = delta; best_fixed = i; } } else { gst_structure_fixate_field_nearest_int(s, "width", stream_cfg.size.width); gst_structure_fixate_field_nearest_int(s, "height", stream_cfg.size.height); gst_structure_get_int(s, "width", &width); gst_structure_get_int(s, "height", &height); delta = ABS(width - (gint)stream_cfg.size.width) * ABS(height - (gint)stream_cfg.size.height); if (delta < best_in_range_delta) { best_in_range_delta = delta; best_in_range = i; } } } /* Prefer reliable fixed value over ranges */ if (best_fixed >= 0) s = gst_caps_get_structure(caps, best_fixed); else s = gst_caps_get_structure(caps, best_in_range); if (gst_structure_has_name(s, "video/x-raw")) { const gchar *format = gst_video_format_to_string(gst_format); gst_structure_fixate_field_string(s, "format", format); } /* Then configure the stream with the result. */ if (gst_structure_has_name(s, "video/x-raw")) { const gchar *format = gst_structure_get_string(s, "format"); gst_format = gst_video_format_from_string(format); stream_cfg.pixelFormat = gst_format_to_pixel_format(gst_format); } else if (gst_structure_has_name(s, "image/jpeg")) { stream_cfg.pixelFormat = formats::MJPEG; } else { g_critical("Unsupported media type: %s", gst_structure_get_name(s)); } gint width, height; gst_structure_get_int(s, "width", &width); gst_structure_get_int(s, "height", &height); stream_cfg.size.width = width; stream_cfg.size.height = height; /* Configure colorimetry */ if (gst_structure_has_field(s, "colorimetry")) { const gchar *colorimetry_str = gst_structure_get_string(s, "colorimetry"); GstVideoColorimetry colorimetry; if (!gst_video_colorimetry_from_string(&colorimetry, colorimetry_str)) g_critical("Invalid colorimetry %s", colorimetry_str); stream_cfg.colorSpace = colorspace_from_colorimetry(colorimetry); } } void gst_libcamera_get_framerate_from_caps(GstCaps *caps, GstStructure *element_caps) { GstStructure *s = gst_caps_get_structure(caps, 0); /* * Default to 30 fps. If the "framerate" fraction is invalid below, * libcamerasrc will set 30fps as the framerate. */ gint fps_n = 30, fps_d = 1; if (gst_structure_has_field_typed(s, "framerate", GST_TYPE_FRACTION)) { if (!gst_structure_get_fraction(s, "framerate", &fps_n, &fps_d)) GST_WARNING("Invalid framerate in the caps"); } gst_structure_set(element_caps, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, nullptr); } void gst_libcamera_clamp_and_set_frameduration(ControlList &initCtrls, const ControlInfoMap &cam_ctrls, GstStructure *element_caps) { gint fps_caps_n, fps_caps_d; if (!gst_structure_has_field_typed(element_caps, "framerate", GST_TYPE_FRACTION)) return; auto iterFrameDuration = cam_ctrls.find(&controls::FrameDurationLimits); if (iterFrameDuration == cam_ctrls.end()) { GST_WARNING("FrameDurationLimits not found in camera controls."); return; } const GValue *framerate = gst_structure_get_value(element_caps, "framerate"); fps_caps_n = gst_value_get_fraction_numerator(framerate); fps_caps_d = gst_value_get_fraction_denominator(framerate); int64_t target_duration = (fps_caps_d * 1000000.0) / fps_caps_n; int64_t min_frame_duration = iterFrameDuration->second.min().get(); int64_t max_frame_duration = iterFrameDuration->second.max().get(); int64_t frame_duration = std::clamp(target_duration, min_frame_duration, max_frame_duration); if (frame_duration != target_duration) { gint framerate_clamped = 1000000 / frame_duration; /* * Update the clamped framerate which then will be exposed in * downstream caps. */ gst_structure_set(element_caps, "framerate", GST_TYPE_FRACTION, framerate_clamped, 1, nullptr); } initCtrls.set(controls::FrameDurationLimits, { frame_duration, frame_duration }); } void gst_libcamera_framerate_to_caps(GstCaps *caps, const GstStructure *element_caps) { const GValue *framerate = gst_structure_get_value(element_caps, "framerate"); if (!GST_VALUE_HOLDS_FRACTION(framerate)) return; GstStructure *s = gst_caps_get_structure(caps, 0); gint fps_caps_n, fps_caps_d; fps_caps_n = gst_value_get_fraction_numerator(framerate); fps_caps_d = gst_value_get_fraction_denominator(framerate); gst_structure_set(s, "framerate", GST_TYPE_FRACTION, fps_caps_n, fps_caps_d, nullptr); } #if !GST_CHECK_VERSION(1, 17, 1) gboolean gst_task_resume(GstTask *task) { /* We only want to resume the task if it's paused. */ GLibLocker lock(GST_OBJECT(task)); if (GST_TASK_STATE(task) != GST_TASK_PAUSED) return FALSE; GST_TASK_STATE(task) = GST_TASK_STARTED; GST_TASK_SIGNAL(task); return TRUE; } #endif G_LOCK_DEFINE_STATIC(cm_singleton_lock); static std::weak_ptr cm_singleton_ptr; std::shared_ptr gst_libcamera_get_camera_manager(int &ret) { std::shared_ptr cm; G_LOCK(cm_singleton_lock); cm = cm_singleton_ptr.lock(); if (!cm) { cm = std::make_shared(); cm_singleton_ptr = cm; ret = cm->start(); } else { ret = 0; } G_UNLOCK(cm_singleton_lock); return cm; } f='#n378'>378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698
/* SPDX-License-Identifier: GPL-2.0-or-later */
/*
 * Copyright (C) 2021, Ideas on Board Oy
 *
 * drm.cpp - DRM/KMS Helpers
 */

#include "drm.h"

#include <algorithm>
#include <dirent.h>
#include <errno.h>
#include <fcntl.h>
#include <iostream>
#include <set>
#include <string.h>
#include <sys/ioctl.h>
#include <sys/stat.h>
#include <sys/types.h>

#include <libcamera/framebuffer.h>
#include <libcamera/geometry.h>
#include <libcamera/pixel_format.h>

#include <libdrm/drm_mode.h>

#include "event_loop.h"

namespace DRM {

Object::Object(Device *dev, uint32_t id, Type type)
	: id_(id), dev_(dev), type_(type)
{
	/* Retrieve properties from the objects that support them. */
	if (type != TypeConnector && type != TypeCrtc &&
	    type != TypeEncoder && type != TypePlane)
		return;

	/*
	 * We can't distinguish between failures due to the object having no
	 * property and failures due to other conditions. Assume we use the API
	 * correctly and consider the object has no property.
	 */
	drmModeObjectProperties *properties = drmModeObjectGetProperties(dev->fd(), id, type);
	if (!properties)
		return;

	properties_.reserve(properties->count_props);
	for (uint32_t i = 0; i < properties->count_props; ++i)
		properties_.emplace_back(properties->props[i],
					 properties->prop_values[i]);

	drmModeFreeObjectProperties(properties);
}

Object::~Object()
{
}

const Property *Object::property(const std::string &name) const
{
	for (const PropertyValue &pv : properties_) {
		const Property *property = static_cast<const Property *>(dev_->object(pv.id()));
		if (property && property->name() == name)
			return property;
	}

	return nullptr;
}

const PropertyValue *Object::propertyValue(const std::string &name) const
{
	for (const PropertyValue &pv : properties_) {
		const Property *property = static_cast<const Property *>(dev_->object(pv.id()));
		if (property && property->name() == name)
			return &pv;
	}

	return nullptr;
}

Property::Property(Device *dev, drmModePropertyRes *property)
	: Object(dev, property->prop_id, TypeProperty),
	  name_(property->name), flags_(property->flags),
	  values_(property->values, property->values + property->count_values),
	  blobs_(property->blob_ids, property->blob_ids + property->count_blobs)
{
	if (drm_property_type_is(property, DRM_MODE_PROP_RANGE))
		type_ = TypeRange;
	else if (drm_property_type_is(property, DRM_MODE_PROP_ENUM))
		type_ = TypeEnum;
	else if (drm_property_type_is(property, DRM_MODE_PROP_BLOB))
		type_ = TypeBlob;
	else if (drm_property_type_is(property, DRM_MODE_PROP_BITMASK))
		type_ = TypeBitmask;
	else if (drm_property_type_is(property, DRM_MODE_PROP_OBJECT))
		type_ = TypeObject;
	else if (drm_property_type_is(property, DRM_MODE_PROP_SIGNED_RANGE))
		type_ = TypeSignedRange;
	else
		type_ = TypeUnknown;

	for (int i = 0; i < property->count_enums; ++i)
		enums_[property->enums[i].value] = property->enums[i].name;
}

Blob::Blob(Device *dev, const libcamera::Span<const uint8_t> &data)
	: Object(dev, 0, Object::TypeBlob)
{
	drmModeCreatePropertyBlob(dev->fd(), data.data(), data.size(), &id_);
}

Blob::~Blob()
{
	if (isValid())
		drmModeDestroyPropertyBlob(device()->fd(), id());
}

Mode::Mode(const drmModeModeInfo &mode)
	: drmModeModeInfo(mode)
{
}

std::unique_ptr<Blob> Mode::toBlob(Device *dev) const
{
	libcamera::Span<const uint8_t> data{ reinterpret_cast<const uint8_t *>(this),
					     sizeof(*this) };
	return std::make_unique<Blob>(dev, data);
}

Crtc::Crtc(Device *dev, const drmModeCrtc *crtc, unsigned int index)
	: Object(dev, crtc->crtc_id, Object::TypeCrtc), index_(index)
{
}

Encoder::Encoder(Device *dev, const drmModeEncoder *encoder)
	: Object(dev, encoder->encoder_id, Object::TypeEncoder),
	  type_(encoder->encoder_type)
{
	const std::list<Crtc> &crtcs = dev->crtcs();
	possibleCrtcs_.reserve(crtcs.size());

	for (const Crtc &crtc : crtcs) {
		if (encoder->possible_crtcs & (1 << crtc.index()))
			possibleCrtcs_.push_back(&crtc);
	}

	possibleCrtcs_.shrink_to_fit();
}

namespace {

const std::map<uint32_t, const char *> connectorTypeNames{
	{ DRM_MODE_CONNECTOR_Unknown, "Unknown" },
	{ DRM_MODE_CONNECTOR_VGA, "VGA" },
	{ DRM_MODE_CONNECTOR_DVII, "DVI-I" },
	{ DRM_MODE_CONNECTOR_DVID, "DVI-D" },
	{ DRM_MODE_CONNECTOR_DVIA, "DVI-A" },
	{ DRM_MODE_CONNECTOR_Composite, "Composite" },
	{ DRM_MODE_CONNECTOR_SVIDEO, "S-Video" },
	{ DRM_MODE_CONNECTOR_LVDS, "LVDS" },
	{ DRM_MODE_CONNECTOR_Component, "Component" },
	{ DRM_MODE_CONNECTOR_9PinDIN, "9-Pin-DIN" },
	{ DRM_MODE_CONNECTOR_DisplayPort, "DP" },
	{ DRM_MODE_CONNECTOR_HDMIA, "HDMI-A" },
	{ DRM_MODE_CONNECTOR_HDMIB, "HDMI-B" },
	{ DRM_MODE_CONNECTOR_TV, "TV" },
	{ DRM_MODE_CONNECTOR_eDP, "eDP" },
	{ DRM_MODE_CONNECTOR_VIRTUAL, "Virtual" },
	{ DRM_MODE_CONNECTOR_DSI, "DSI" },
	{ DRM_MODE_CONNECTOR_DPI, "DPI" },
};

} /* namespace */

Connector::Connector(Device *dev, const drmModeConnector *connector)
	: Object(dev, connector->connector_id, Object::TypeConnector),
	  type_(connector->connector_type)
{
	auto typeName = connectorTypeNames.find(connector->connector_type);
	if (typeName == connectorTypeNames.end()) {
		std::cerr
			<< "Invalid connector type "
			<< connector->connector_type << std::endl;
		typeName = connectorTypeNames.find(DRM_MODE_CONNECTOR_Unknown);
	}

	name_ = std::string(typeName->second) + "-"
	      + std::to_string(connector->connector_type_id);

	switch (connector->connection) {
	case DRM_MODE_CONNECTED:
		status_ = Status::Connected;
		break;

	case DRM_MODE_DISCONNECTED:
		status_ = Status::Disconnected;
		break;

	case DRM_MODE_UNKNOWNCONNECTION:
	default:
		status_ = Status::Unknown;
		break;
	}

	const std::list<Encoder> &encoders = dev->encoders();

	encoders_.reserve(connector->count_encoders);

	for (int i = 0; i < connector->count_encoders; ++i) {
		uint32_t encoderId = connector->encoders[i];
		auto encoder = std::find_if(encoders.begin(), encoders.end(),
					    [=](const Encoder &e) {
						    return e.id() == encoderId;
					    });
		if (encoder == encoders.end()) {
			std::cerr
				<< "Encoder " << encoderId << " not found"
				<< std::endl;
			continue;
		}

		encoders_.push_back(&*encoder);
	}

	encoders_.shrink_to_fit();

	modes_ = { connector->modes, connector->modes + connector->count_modes };
}

Plane::Plane(Device *dev, const drmModePlane *plane)
	: Object(dev, plane->plane_id, Object::TypePlane),
	  possibleCrtcsMask_(plane->possible_crtcs)
{
	formats_ = { plane->formats, plane->formats + plane->count_formats };

	const std::list<Crtc> &crtcs = dev->crtcs();
	possibleCrtcs_.reserve(crtcs.size());

	for (const Crtc &crtc : crtcs) {
		if (plane->possible_crtcs & (1 << crtc.index()))
			possibleCrtcs_.push_back(&crtc);
	}

	possibleCrtcs_.shrink_to_fit();
}

bool Plane::supportsFormat(const libcamera::PixelFormat &format) const
{
	return std::find(formats_.begin(), formats_.end(), format.fourcc())
		!= formats_.end();
}

int Plane::setup()
{
	const PropertyValue *pv = propertyValue("type");
	if (!pv)
		return -EINVAL;

	switch (pv->value()) {
	case DRM_PLANE_TYPE_OVERLAY:
		type_ = TypeOverlay;
		break;

	case DRM_PLANE_TYPE_PRIMARY:
		type_ = TypePrimary;
		break;

	case DRM_PLANE_TYPE_CURSOR:
		type_ = TypeCursor;
		break;

	default:
		return -EINVAL;
	}

	return 0;
}

FrameBuffer::FrameBuffer(Device *dev)
	: Object(dev, 0, Object::TypeFb)
{
}

FrameBuffer::~FrameBuffer()
{
	for (const auto &plane : planes_) {
		struct drm_gem_close gem_close = {
			.handle = plane.second.handle,
			.pad = 0,
		};
		int ret;

		do {
			ret = ioctl(device()->fd(), DRM_IOCTL_GEM_CLOSE, &gem_close);
		} while (ret == -1 && (errno == EINTR || errno == EAGAIN));

		if (ret == -1) {
			ret = -errno;
			std::cerr
				<< "Failed to close GEM object: "
				<< strerror(-ret) << std::endl;
		}
	}

	drmModeRmFB(device()->fd(), id());
}

AtomicRequest::AtomicRequest(Device *dev)
	: dev_(dev), valid_(true)
{
	request_ = drmModeAtomicAlloc();
	if (!request_)
		valid_ = false;
}

AtomicRequest::~AtomicRequest()
{
	if (request_)
		drmModeAtomicFree(request_);
}

int AtomicRequest::addProperty(const Object *object, const std::string &property,
			       uint64_t value)
{
	if (!valid_)
		return -EINVAL;

	const Property *prop = object->property(property);
	if (!prop) {
		valid_ = false;
		return -EINVAL;
	}

	return addProperty(object->id(), prop->id(), value);
}

int AtomicRequest::addProperty(const Object *object, const std::string &property,
			       std::unique_ptr<Blob> blob)
{
	if (!valid_)
		return -EINVAL;

	const Property *prop = object->property(property);
	if (!prop) {
		valid_ = false;
		return -EINVAL;
	}

	int ret = addProperty(object->id(), prop->id(), blob->id());
	if (ret < 0)
		return ret;

	blobs_.emplace_back(std::move(blob));

	return 0;
}

int AtomicRequest::addProperty(uint32_t object, uint32_t property, uint64_t value)
{
	int ret = drmModeAtomicAddProperty(request_, object, property, value);
	if (ret < 0) {
		valid_ = false;
		return ret;
	}

	return 0;
}

int AtomicRequest::commit(unsigned int flags)
{
	if (!valid_)
		return -EINVAL;

	uint32_t drmFlags = 0;
	if (flags & FlagAllowModeset)
		drmFlags |= DRM_MODE_ATOMIC_ALLOW_MODESET;
	if (flags & FlagAsync)
		drmFlags |= DRM_MODE_PAGE_FLIP_EVENT | DRM_MODE_ATOMIC_NONBLOCK;
	if (flags & FlagTestOnly)
		drmFlags |= DRM_MODE_ATOMIC_TEST_ONLY;

	return drmModeAtomicCommit(dev_->fd(), request_, drmFlags, this);
}

Device::Device()
	: fd_(-1)
{
}

Device::~Device()
{
	if (fd_ != -1)
		drmClose(fd_);
}

int Device::init()
{
	int ret = openCard();
	if (ret < 0) {
		std::cerr << "Failed to open any DRM/KMS device: "
			  << strerror(-ret) << std::endl;
		return ret;
	}

	/*
	 * Enable the atomic APIs. This also automatically enables the
	 * universal planes API.
	 */
	ret = drmSetClientCap(fd_, DRM_CLIENT_CAP_ATOMIC, 1);
	if (ret < 0) {
		ret = -errno;
		std::cerr
			<< "Failed to enable atomic capability: "
			<< strerror(-ret) << std::endl;
		return ret;
	}

	/* List all the resources. */
	ret = getResources();
	if (ret < 0)
		return ret;

	EventLoop::instance()->addFdEvent(fd_, EventLoop::Read,
					  std::bind(&Device::drmEvent, this));

	return 0;
}

int Device::openCard()
{
	const std::string dirName = "/dev/dri/";
	int ret = -ENOENT;

	/*
	 * Open the first DRM/KMS device beginning with /dev/dri/card. The
	 * libdrm drmOpen*() functions require either a module name or a bus ID,
	 * which we don't have, so bypass them. The automatic module loading and
	 * device node creation from drmOpen() is of no practical use as any
	 * modern system will handle that through udev or an equivalent
	 * component.
	 */
	DIR *folder = opendir(dirName.c_str());
	if (!folder) {
		ret = -errno;
		std::cerr << "Failed to open " << dirName
			  << " directory: " << strerror(-ret) << std::endl;
		return ret;
	}

	for (struct dirent *res; (res = readdir(folder));) {
		if (strncmp(res->d_name, "card", 4))
			continue;

		const std::string devName = dirName + res->d_name;
		fd_ = open(devName.c_str(), O_RDWR | O_CLOEXEC);
		if (fd_ >= 0) {
			ret = 0;
			break;
		}

		ret = -errno;
		std::cerr << "Failed to open DRM/KMS device " << devName << ": "
			  << strerror(-ret) << std::endl;
	}

	closedir(folder);

	return ret;
}

int Device::getResources()
{
	int ret;

	std::unique_ptr<drmModeRes, decltype(&drmModeFreeResources)> resources{
		drmModeGetResources(fd_),
		&drmModeFreeResources
	};
	if (!resources) {
		ret = -errno;
		std::cerr
			<< "Failed to get DRM/KMS resources: "
			<< strerror(-ret) << std::endl;
		return ret;
	}

	for (int i = 0; i < resources->count_crtcs; ++i) {
		drmModeCrtc *crtc = drmModeGetCrtc(fd_, resources->crtcs[i]);
		if (!crtc) {
			ret = -errno;
			std::cerr
				<< "Failed to get CRTC: " << strerror(-ret)
				<< std::endl;
			return ret;
		}

		crtcs_.emplace_back(this, crtc, i);
		drmModeFreeCrtc(crtc);

		Crtc &obj = crtcs_.back();
		objects_[obj.id()] = &obj;
	}

	for (int i = 0; i < resources->count_encoders; ++i) {
		drmModeEncoder *encoder =
			drmModeGetEncoder(fd_, resources->encoders[i]);
		if (!encoder) {
			ret = -errno;
			std::cerr
				<< "Failed to get encoder: " << strerror(-ret)
				<< std::endl;
			return ret;
		}

		encoders_.emplace_back(this, encoder);
		drmModeFreeEncoder(encoder);

		Encoder &obj = encoders_.back();
		objects_[obj.id()] = &obj;
	}

	for (int i = 0; i < resources->count_connectors; ++i) {
		drmModeConnector *connector =
			drmModeGetConnector(fd_, resources->connectors[i]);
		if (!connector) {
			ret = -errno;
			std::cerr
				<< "Failed to get connector: " << strerror(-ret)
				<< std::endl;
			return ret;
		}

		connectors_.emplace_back(this, connector);
		drmModeFreeConnector(connector);

		Connector &obj = connectors_.back();
		objects_[obj.id()] = &obj;
	}

	std::unique_ptr<drmModePlaneRes, decltype(&drmModeFreePlaneResources)> planes{
		drmModeGetPlaneResources(fd_),
		&drmModeFreePlaneResources
	};
	if (!planes) {
		ret = -errno;
		std::cerr
			<< "Failed to get DRM/KMS planes: "
			<< strerror(-ret) << std::endl;
		return ret;
	}

	for (uint32_t i = 0; i < planes->count_planes; ++i) {
		drmModePlane *plane =
			drmModeGetPlane(fd_, planes->planes[i]);
		if (!plane) {
			ret = -errno;
			std::cerr
				<< "Failed to get plane: " << strerror(-ret)
				<< std::endl;
			return ret;
		}

		planes_.emplace_back(this, plane);
		drmModeFreePlane(plane);

		Plane &obj = planes_.back();
		objects_[obj.id()] = &obj;
	}

	/* Set the possible planes for each CRTC. */
	for (Crtc &crtc : crtcs_) {
		for (const Plane &plane : planes_) {
			if (plane.possibleCrtcsMask_ & (1 << crtc.index()))
				crtc.planes_.push_back(&plane);
		}
	}