/* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */ /* * Media Bus API header * * Copyright (C) 2009, Guennadi Liakhovetski * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. */ #ifndef __LINUX_V4L2_MEDIABUS_H #define __LINUX_V4L2_MEDIABUS_H #include #include #include #define V4L2_MBUS_FRAMEFMT_SET_CSC 0x0001 /** * struct v4l2_mbus_framefmt - frame format on the media bus * @width: image width * @height: image height * @code: data format code (from enum v4l2_mbus_pixelcode) * @field: used interlacing type (from enum v4l2_field) * @colorspace: colorspace of the data (from enum v4l2_colorspace) * @ycbcr_enc: YCbCr encoding of the data (from enum v4l2_ycbcr_encoding) * @hsv_enc: HSV encoding of the data (from enum v4l2_hsv_encoding) * @quantization: quantization of the data (from enum v4l2_quantization) * @xfer_func: transfer function of the data (from enum v4l2_xfer_func) * @flags: flags (V4L2_MBUS_FRAMEFMT_*) * @reserved: reserved bytes that can be later used */ struct v4l2_mbus_framefmt { __u32 width; __u32 height; __u32 code; __u32 field; __u32 colorspace; union { /* enum v4l2_ycbcr_encoding */ __u16 ycbcr_enc; /* enum v4l2_hsv_encoding */ __u16 hsv_enc; }; __u16 quantization; __u16 xfer_func; __u16 flags; __u16 reserved[10]; }; /* * enum v4l2_mbus_pixelcode and its definitions are now deprecated, and * MEDIA_BUS_FMT_ definitions (defined in media-bus-format.h) should be * used instead. * * New defines should only be added to media-bus-format.h. The * v4l2_mbus_pixelcode enum is frozen. */ #define V4L2_MBUS_FROM_MEDIA_BUS_FMT(name) \ V4L2_MBUS_FMT_ ## name = MEDIA_BUS_FMT_ ## name enum v4l2_mbus_pixelcode { V4L2_MBUS_FROM_MEDIA_BUS_FMT(FIXED), V4L2_MBUS_FROM_MEDIA_BUS_FMT(RGB444_2X8_PADHI_BE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(RGB444_2X8_PADHI_LE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(RGB555_2X8_PADHI_BE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(RGB555_2X8_PADHI_LE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(BGR565_2X8_BE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(BGR565_2X8_LE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(RGB565_2X8_BE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(RGB565_2X8_LE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(RGB666_1X18), V4L2_MBUS_FROM_MEDIA_BUS_FMT(RGB888_1X24), V4L2_MBUS_FROM_MEDIA_BUS_FMT(RGB888_2X12_BE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(RGB888_2X12_LE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(ARGB8888_1X32), V4L2_MBUS_FROM_MEDIA_BUS_FMT(Y8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(UV8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(UYVY8_1_5X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(VYUY8_1_5X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YUYV8_1_5X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YVYU8_1_5X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(UYVY8_2X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(VYUY8_2X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YUYV8_2X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YVYU8_2X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(Y10_1X10), V4L2_MBUS_FROM_MEDIA_BUS_FMT(UYVY10_2X10), V4L2_MBUS_FROM_MEDIA_BUS_FMT(VYUY10_2X10), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YUYV10_2X10), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YVYU10_2X10), V4L2_MBUS_FROM_MEDIA_BUS_FMT(Y12_1X12), V4L2_MBUS_FROM_MEDIA_BUS_FMT(UYVY8_1X16), V4L2_MBUS_FROM_MEDIA_BUS_FMT(VYUY8_1X16), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YUYV8_1X16), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YVYU8_1X16), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YDYUYDYV8_1X16), V4L2_MBUS_FROM_MEDIA_BUS_FMT(UYVY10_1X20), V4L2_MBUS_FROM_MEDIA_BUS_FMT(VYUY10_1X20), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YUYV10_1X20), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YVYU10_1X20), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YUV10_1X30), V4L2_MBUS_FROM_MEDIA_BUS_FMT(AYUV8_1X32), V4L2_MBUS_FROM_MEDIA_BUS_FMT(UYVY12_2X12), V4L2_MBUS_FROM_MEDIA_BUS_FMT(VYUY12_2X12), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YUYV12_2X12), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YVYU12_2X12), V4L2_MBUS_FROM_MEDIA_BUS_FMT(UYVY12_1X24), V4L2_MBUS_FROM_MEDIA_BUS_FMT(VYUY12_1X24), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YUYV12_1X24), V4L2_MBUS_FROM_MEDIA_BUS_FMT(YVYU12_1X24), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SBGGR8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SGBRG8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SGRBG8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SRGGB8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SBGGR10_ALAW8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SGBRG10_ALAW8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SGRBG10_ALAW8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SRGGB10_ALAW8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SBGGR10_DPCM8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SGBRG10_DPCM8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SGRBG10_DPCM8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SRGGB10_DPCM8_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SBGGR10_2X8_PADHI_BE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SBGGR10_2X8_PADHI_LE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SBGGR10_2X8_PADLO_BE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SBGGR10_2X8_PADLO_LE), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SBGGR10_1X10), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SGBRG10_1X10), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SGRBG10_1X10), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SRGGB10_1X10), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SBGGR12_1X12), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SGBRG12_1X12), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SGRBG12_1X12), V4L2_MBUS_FROM_MEDIA_BUS_FMT(SRGGB12_1X12), V4L2_MBUS_FROM_MEDIA_BUS_FMT(JPEG_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(S5C_UYVY_JPEG_1X8), V4L2_MBUS_FROM_MEDIA_BUS_FMT(AHSV8888_1X32), }; #endif 5' href='#n65'>65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196
/* SPDX-License-Identifier: BSD-2-Clause */
/*
 * Based on cam_helper_imx477.cpp
 * Copyright (C) 2020, Raspberry Pi Ltd
 *
 * cam_helper_imx519.cpp - camera helper for imx519 sensor
 * Copyright (C) 2021, Arducam Technology co., Ltd.
 */

#include <assert.h>
#include <cmath>
#include <stddef.h>
#include <stdio.h>
#include <stdlib.h>

#include <libcamera/base/log.h>

#include "cam_helper.h"
#include "md_parser.h"

using namespace RPiController;
using namespace libcamera;
using libcamera::utils::Duration;

namespace libcamera {
LOG_DECLARE_CATEGORY(IPARPI)
}

/*
 * We care about two gain registers and a pair of exposure registers. Their
 * I2C addresses from the Sony IMX519 datasheet:
 */
constexpr uint32_t expHiReg = 0x0202;
constexpr uint32_t expLoReg = 0x0203;
constexpr uint32_t gainHiReg = 0x0204;
constexpr uint32_t gainLoReg = 0x0205;
constexpr uint32_t frameLengthHiReg = 0x0340;
constexpr uint32_t frameLengthLoReg = 0x0341;
constexpr uint32_t lineLengthHiReg = 0x0342;
constexpr uint32_t lineLengthLoReg = 0x0343;
constexpr std::initializer_list<uint32_t> registerList =
	{ expHiReg, expLoReg, gainHiReg, gainLoReg, frameLengthHiReg, frameLengthLoReg,
	  lineLengthHiReg, lineLengthLoReg };

class CamHelperImx519 : public CamHelper
{
public:
	CamHelperImx519();
	uint32_t gainCode(double gain) const override;
	double gain(uint32_t gainCode) const override;
	void prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata) override;
	std::pair<uint32_t, uint32_t> getBlanking(Duration &exposure, Duration minFrameDuration,
						  Duration maxFrameDuration) const override;
	void getDelays(int &exposureDelay, int &gainDelay,
		       int &vblankDelay, int &hblankDelay) const override;
	bool sensorEmbeddedDataPresent() const override;

private:
	/*
	 * Smallest difference between the frame length and integration time,
	 * in units of lines.
	 */
	static constexpr int frameIntegrationDiff = 32;
	/* Maximum frame length allowable for long exposure calculations. */
	static constexpr int frameLengthMax = 0xffdc;
	/* Largest long exposure scale factor given as a left shift on the frame length. */
	static constexpr int longExposureShiftMax = 7;

	void populateMetadata(const MdParser::RegisterMap &registers,
			      Metadata &metadata) const override;
};

CamHelperImx519::CamHelperImx519()
	: CamHelper(std::make_unique<MdParserSmia>(registerList), frameIntegrationDiff)
{
}

uint32_t CamHelperImx519::gainCode(double gain) const
{
	return static_cast<uint32_t>(1024 - 1024 / gain);
}

double CamHelperImx519::gain(uint32_t gainCode) const
{
	return 1024.0 / (1024 - gainCode);
}

void CamHelperImx519::prepare(libcamera::Span<const uint8_t> buffer, Metadata &metadata)
{
	MdParser::RegisterMap registers;
	DeviceStatus deviceStatus;

	if (metadata.get("device.status", deviceStatus)) {
		LOG(IPARPI, Error) << "DeviceStatus not found from DelayedControls";
		return;
	}

	parseEmbeddedData(buffer, metadata);

	/*
	 * The DeviceStatus struct is first populated with values obtained from
	 * DelayedControls. If this reports frame length is > frameLengthMax,
	 * it means we are using a long exposure mode. Since the long exposure
	 * scale factor is not returned back through embedded data, we must rely
	 * on the existing exposure lines and frame length values returned by
	 * DelayedControls.
	 *
	 * Otherwise, all values are updated with what is reported in the
	 * embedded data.
	 */
	if (deviceStatus.frameLength > frameLengthMax) {
		DeviceStatus parsedDeviceStatus;

		metadata.get("device.status", parsedDeviceStatus);
		parsedDeviceStatus.shutterSpeed = deviceStatus.shutterSpeed;
		parsedDeviceStatus.frameLength = deviceStatus.frameLength;
		metadata.set("device.status", parsedDeviceStatus);

		LOG(IPARPI, Debug) << "Metadata updated for long exposure: "
				   << parsedDeviceStatus;
	}
}

std::pair<uint32_t, uint32_t> CamHelperImx519::getBlanking(Duration &exposure,
							   Duration minFrameDuration,
							   Duration maxFrameDuration) const
{
	uint32_t frameLength, exposureLines;
	unsigned int shift = 0;

	auto [vblank, hblank] = CamHelper::getBlanking(exposure, minFrameDuration,
						       maxFrameDuration);

	frameLength = mode_.height + vblank;
	Duration lineLength = hblankToLineLength(hblank);

	/*
	 * Check if the frame length calculated needs to be setup for long
	 * exposure mode. This will require us to use a long exposure scale
	 * factor provided by a shift operation in the sensor.
	 */
	while (frameLength > frameLengthMax) {
		if (++shift > longExposureShiftMax) {
			shift = longExposureShiftMax;
			frameLength = frameLengthMax;
			break;
		}
		frameLength >>= 1;
	}

	if (shift) {
		/* Account for any rounding in the scaled frame length value. */
		frameLength <<= shift;
		exposureLines = CamHelperImx519::exposureLines(exposure, lineLength);
		exposureLines = std::min(exposureLines, frameLength - frameIntegrationDiff);
		exposure = CamHelperImx519::exposure(exposureLines, lineLength);
	}

	return { frameLength - mode_.height, hblank };
}

void CamHelperImx519::getDelays(int &exposureDelay, int &gainDelay,
				int &vblankDelay, int &hblankDelay) const
{
	exposureDelay = 2;
	gainDelay = 2;
	vblankDelay = 3;
	hblankDelay = 3;
}

bool CamHelperImx519::sensorEmbeddedDataPresent() const
{
	return true;
}

void CamHelperImx519::populateMetadata(const MdParser::RegisterMap &registers,
				       Metadata &metadata) const
{
	DeviceStatus deviceStatus;

	deviceStatus.lineLength = lineLengthPckToDuration(registers.at(lineLengthHiReg) * 256 +
							  registers.at(lineLengthLoReg));
	deviceStatus.shutterSpeed = exposure(registers.at(expHiReg) * 256 + registers.at(expLoReg),
					     deviceStatus.lineLength);
	deviceStatus.analogueGain = gain(registers.at(gainHiReg) * 256 + registers.at(gainLoReg));
	deviceStatus.frameLength = registers.at(frameLengthHiReg) * 256 + registers.at(frameLengthLoReg);

	metadata.set("device.status", deviceStatus);
}

static CamHelper *create()
{
	return new CamHelperImx519();
}

static RegisterCamHelper reg("imx519", &create);