summaryrefslogtreecommitdiff
path: root/src/gstreamer/gstlibcamerapool.cpp
blob: 62db184f436f77060e6fac8224557b2ff151158b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
 * Copyright (C) 2020, Collabora Ltd.
 *     Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
 *
 * gstlibcamerapool.cpp - GStreamer Buffer Pool
 */

#include "gstlibcamerapool.h"

#include <libcamera/stream.h>

#include "gstlibcamera-utils.h"

using namespace libcamera;

enum {
	SIGNAL_BUFFER_NOTIFY,
	N_SIGNALS
};

static guint signals[N_SIGNALS];

struct _GstLibcameraPool {
	GstBufferPool parent;

	GstAtomicQueue *queue;
	GstLibcameraAllocator *allocator;
	Stream *stream;
};

G_DEFINE_TYPE(GstLibcameraPool, gst_libcamera_pool, GST_TYPE_BUFFER_POOL)

static GstFlowReturn
gst_libcamera_pool_acquire_buffer(GstBufferPool *pool, GstBuffer **buffer,
				  [[maybe_unused]] GstBufferPoolAcquireParams *params)
{
	GstLibcameraPool *self = GST_LIBCAMERA_POOL(pool);
	GstBuffer *buf = GST_BUFFER(gst_atomic_queue_pop(self->queue));
	if (!buf)
		return GST_FLOW_ERROR;

	if (!gst_libcamera_allocator_prepare_buffer(self->allocator, self->stream, buf))
		return GST_FLOW_ERROR;

	*buffer = buf;
	return GST_FLOW_OK;
}

static void
gst_libcamera_pool_reset_buffer(GstBufferPool *pool, GstBuffer *buffer)
{
	GstBufferPoolClass *klass = GST_BUFFER_POOL_CLASS(gst_libcamera_pool_parent_class);

	/* Clears all the memories and only pool the GstBuffer objects */
	gst_buffer_remove_all_memory(buffer);
	klass->reset_buffer(pool, buffer);
	GST_BUFFER_FLAGS(buffer) = 0;
}

static void
gst_libcamera_pool_release_buffer(GstBufferPool *pool, GstBuffer *buffer)
{
	GstLibcameraPool *self = GST_LIBCAMERA_POOL(pool);
	bool do_notify = gst_atomic_queue_length(self->queue) == 0;

	gst_atomic_queue_push(self->queue, buffer);

	if (do_notify)
		g_signal_emit(self, signals[SIGNAL_BUFFER_NOTIFY], 0);
}

static void
gst_libcamera_pool_init(GstLibcameraPool *self)
{
	self->queue = gst_atomic_queue_new(4);
}

static void
gst_libcamera_pool_finalize(GObject *object)
{
	GstLibcameraPool *self = GST_LIBCAMERA_POOL(object);
	GstBuffer *buf;

	while ((buf = GST_BUFFER(gst_atomic_queue_pop(self->queue))))
		gst_buffer_unref(buf);

	gst_atomic_queue_unref(self->queue);
	g_object_unref(self->allocator);

	G_OBJECT_CLASS(gst_libcamera_pool_parent_class)->finalize(object);
}

static void
gst_libcamera_pool_class_init(GstLibcameraPoolClass *klass)
{
	auto *object_class = G_OBJECT_CLASS(klass);
	auto *pool_class = GST_BUFFER_POOL_CLASS(klass);

	object_class->finalize = gst_libcamera_pool_finalize;
	pool_class->start = nullptr;
	pool_class->acquire_buffer = gst_libcamera_pool_acquire_buffer;
	pool_class->reset_buffer = gst_libcamera_pool_reset_buffer;
	pool_class->release_buffer = gst_libcamera_pool_release_buffer;

	signals[SIGNAL_BUFFER_NOTIFY] = g_signal_new("buffer-notify",
						     G_OBJECT_CLASS_TYPE(klass), G_SIGNAL_RUN_LAST,
						     0, nullptr, nullptr, nullptr,
						     G_TYPE_NONE, 0);
}

GstLibcameraPool *
gst_libcamera_pool_new(GstLibcameraAllocator *allocator, Stream *stream)
{
	auto *pool = GST_LIBCAMERA_POOL(g_object_new(GST_TYPE_LIBCAMERA_POOL, nullptr));

	pool->allocator = GST_LIBCAMERA_ALLOCATOR(g_object_ref(allocator));
	pool->stream = stream;

	gsize pool_size = gst_libcamera_allocator_get_pool_size(allocator, stream);
	for (gsize i = 0; i < pool_size; i++) {
		GstBuffer *buffer = gst_buffer_new();
		gst_atomic_queue_push(pool->queue, buffer);
	}

	return pool;
}

Stream *
gst_libcamera_pool_get_stream(GstLibcameraPool *self)
{
	return self->stream;
}

Stream *
gst_libcamera_buffer_get_stream(GstBuffer *buffer)
{
	auto *self = (GstLibcameraPool *)buffer->pool;
	return self->stream;
}

FrameBuffer *
gst_libcamera_buffer_get_frame_buffer(GstBuffer *buffer)
{
	GstMemory *mem = gst_buffer_peek_memory(buffer, 0);
	return gst_libcamera_memory_get_frame_buffer(mem);
}
n571' href='#n571'>571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732
/* SPDX-License-Identifier: LGPL-2.1-or-later */
/*
 * Copyright (C) 2020, Linaro
 *
 * viewfinderGL.cpp - OpenGL Viewfinder for rendering by OpenGL shader
 */

#include "viewfinder_gl.h"

#include <QByteArray>
#include <QFile>
#include <QImage>

#include <libcamera/formats.h>

static const QList<libcamera::PixelFormat> supportedFormats{
	/* YUV - packed (single plane) */
	libcamera::formats::UYVY,
	libcamera::formats::VYUY,
	libcamera::formats::YUYV,
	libcamera::formats::YVYU,
	/* YUV - semi planar (two planes) */
	libcamera::formats::NV12,
	libcamera::formats::NV21,
	libcamera::formats::NV16,
	libcamera::formats::NV61,
	libcamera::formats::NV24,
	libcamera::formats::NV42,
	/* YUV - fully planar (three planes) */
	libcamera::formats::YUV420,
	libcamera::formats::YVU420,
	/* RGB */
	libcamera::formats::ABGR8888,
	libcamera::formats::ARGB8888,
	libcamera::formats::BGRA8888,
	libcamera::formats::RGBA8888,
	libcamera::formats::BGR888,
	libcamera::formats::RGB888,
	/* Raw Bayer 8-bit */
	libcamera::formats::SBGGR8,
	libcamera::formats::SGBRG8,
	libcamera::formats::SGRBG8,
	libcamera::formats::SRGGB8,
	/* Raw Bayer 10-bit packed */
	libcamera::formats::SBGGR10_CSI2P,
	libcamera::formats::SGBRG10_CSI2P,
	libcamera::formats::SGRBG10_CSI2P,
	libcamera::formats::SRGGB10_CSI2P,
	/* Raw Bayer 12-bit packed */
	libcamera::formats::SBGGR12_CSI2P,
	libcamera::formats::SGBRG12_CSI2P,
	libcamera::formats::SGRBG12_CSI2P,
	libcamera::formats::SRGGB12_CSI2P,
};

ViewFinderGL::ViewFinderGL(QWidget *parent)
	: QOpenGLWidget(parent), buffer_(nullptr), data_(nullptr),
	  vertexBuffer_(QOpenGLBuffer::VertexBuffer)
{
}

ViewFinderGL::~ViewFinderGL()
{
	removeShader();
}

const QList<libcamera::PixelFormat> &ViewFinderGL::nativeFormats() const
{
	return supportedFormats;
}

int ViewFinderGL::setFormat(const libcamera::PixelFormat &format,
			    const QSize &size)
{
	if (format != format_) {
		/*
		 * If the fragment already exists, remove it and create a new
		 * one for the new format.
		 */
		if (shaderProgram_.isLinked()) {
			shaderProgram_.release();
			shaderProgram_.removeShader(fragmentShader_.get());
			fragmentShader_.reset();
		}

		if (!selectFormat(format))
			return -1;

		format_ = format;
	}

	size_ = size;

	updateGeometry();
	return 0;
}

void ViewFinderGL::stop()
{
	if (buffer_) {
		renderComplete(buffer_);
		buffer_ = nullptr;
	}
}

QImage ViewFinderGL::getCurrentImage()
{
	QMutexLocker locker(&mutex_);

	return grabFramebuffer();
}

void ViewFinderGL::render(libcamera::FrameBuffer *buffer, MappedBuffer *map)
{
	if (buffer->planes().size() != 1) {
		qWarning() << "Multi-planar buffers are not supported";
		return;
	}

	if (buffer_)
		renderComplete(buffer_);

	data_ = static_cast<unsigned char *>(map->memory);
	/*
	 * \todo Get the stride from the buffer instead of computing it naively
	 */
	stride_ = buffer->metadata().planes[0].bytesused / size_.height();
	update();
	buffer_ = buffer;
}

bool ViewFinderGL::selectFormat(const libcamera::PixelFormat &format)
{
	bool ret = true;

	/* Set min/mag filters to GL_LINEAR by default. */
	textureMinMagFilters_ = GL_LINEAR;

	/* Use identity.vert as the default vertex shader. */
	vertexShaderFile_ = ":identity.vert";

	fragmentShaderDefines_.clear();

	switch (format) {
	case libcamera::formats::NV12:
		horzSubSample_ = 2;
		vertSubSample_ = 2;
		fragmentShaderDefines_.append("#define YUV_PATTERN_UV");
		fragmentShaderFile_ = ":YUV_2_planes.frag";
		break;
	case libcamera::formats::NV21:
		horzSubSample_ = 2;
		vertSubSample_ = 2;
		fragmentShaderDefines_.append("#define YUV_PATTERN_VU");
		fragmentShaderFile_ = ":YUV_2_planes.frag";
		break;
	case libcamera::formats::NV16:
		horzSubSample_ = 2;
		vertSubSample_ = 1;
		fragmentShaderDefines_.append("#define YUV_PATTERN_UV");
		fragmentShaderFile_ = ":YUV_2_planes.frag";
		break;
	case libcamera::formats::NV61:
		horzSubSample_ = 2;
		vertSubSample_ = 1;
		fragmentShaderDefines_.append("#define YUV_PATTERN_VU");
		fragmentShaderFile_ = ":YUV_2_planes.frag";
		break;
	case libcamera::formats::NV24:
		horzSubSample_ = 1;
		vertSubSample_ = 1;
		fragmentShaderDefines_.append("#define YUV_PATTERN_UV");
		fragmentShaderFile_ = ":YUV_2_planes.frag";
		break;
	case libcamera::formats::NV42:
		horzSubSample_ = 1;
		vertSubSample_ = 1;
		fragmentShaderDefines_.append("#define YUV_PATTERN_VU");
		fragmentShaderFile_ = ":YUV_2_planes.frag";
		break;
	case libcamera::formats::YUV420:
		horzSubSample_ = 2;
		vertSubSample_ = 2;
		fragmentShaderFile_ = ":YUV_3_planes.frag";
		break;
	case libcamera::formats::YVU420:
		horzSubSample_ = 2;
		vertSubSample_ = 2;
		fragmentShaderFile_ = ":YUV_3_planes.frag";
		break;
	case libcamera::formats::UYVY:
		fragmentShaderDefines_.append("#define YUV_PATTERN_UYVY");
		fragmentShaderFile_ = ":YUV_packed.frag";
		break;
	case libcamera::formats::VYUY:
		fragmentShaderDefines_.append("#define YUV_PATTERN_VYUY");
		fragmentShaderFile_ = ":YUV_packed.frag";
		break;
	case libcamera::formats::YUYV:
		fragmentShaderDefines_.append("#define YUV_PATTERN_YUYV");
		fragmentShaderFile_ = ":YUV_packed.frag";
		break;
	case libcamera::formats::YVYU:
		fragmentShaderDefines_.append("#define YUV_PATTERN_YVYU");
		fragmentShaderFile_ = ":YUV_packed.frag";
		break;
	case libcamera::formats::ABGR8888:
		fragmentShaderDefines_.append("#define RGB_PATTERN rgb");
		fragmentShaderFile_ = ":RGB.frag";
		break;
	case libcamera::formats::ARGB8888:
		fragmentShaderDefines_.append("#define RGB_PATTERN bgr");
		fragmentShaderFile_ = ":RGB.frag";
		break;
	case libcamera::formats::BGRA8888:
		fragmentShaderDefines_.append("#define RGB_PATTERN gba");
		fragmentShaderFile_ = ":RGB.frag";
		break;
	case libcamera::formats::RGBA8888:
		fragmentShaderDefines_.append("#define RGB_PATTERN abg");
		fragmentShaderFile_ = ":RGB.frag";
		break;
	case libcamera::formats::BGR888:
		fragmentShaderDefines_.append("#define RGB_PATTERN rgb");
		fragmentShaderFile_ = ":RGB.frag";
		break;
	case libcamera::formats::RGB888:
		fragmentShaderDefines_.append("#define RGB_PATTERN bgr");
		fragmentShaderFile_ = ":RGB.frag";
		break;
	case libcamera::formats::SBGGR8:
		firstRed_.setX(1.0);
		firstRed_.setY(1.0);
		vertexShaderFile_ = ":bayer_8.vert";
		fragmentShaderFile_ = ":bayer_8.frag";
		textureMinMagFilters_ = GL_NEAREST;
		break;
	case libcamera::formats::SGBRG8:
		firstRed_.setX(0.0);
		firstRed_.setY(1.0);
		vertexShaderFile_ = ":bayer_8.vert";
		fragmentShaderFile_ = ":bayer_8.frag";
		textureMinMagFilters_ = GL_NEAREST;
		break;
	case libcamera::formats::SGRBG8:
		firstRed_.setX(1.0);
		firstRed_.setY(0.0);
		vertexShaderFile_ = ":bayer_8.vert";
		fragmentShaderFile_ = ":bayer_8.frag";
		textureMinMagFilters_ = GL_NEAREST;
		break;
	case libcamera::formats::SRGGB8:
		firstRed_.setX(0.0);
		firstRed_.setY(0.0);
		vertexShaderFile_ = ":bayer_8.vert";
		fragmentShaderFile_ = ":bayer_8.frag";
		textureMinMagFilters_ = GL_NEAREST;
		break;
	case libcamera::formats::SBGGR10_CSI2P:
		firstRed_.setX(1.0);
		firstRed_.setY(1.0);
		fragmentShaderDefines_.append("#define RAW10P");
		fragmentShaderFile_ = ":bayer_1x_packed.frag";
		textureMinMagFilters_ = GL_NEAREST;
		break;
	case libcamera::formats::SGBRG10_CSI2P:
		firstRed_.setX(0.0);
		firstRed_.setY(1.0);
		fragmentShaderDefines_.append("#define RAW10P");
		fragmentShaderFile_ = ":bayer_1x_packed.frag";
		textureMinMagFilters_ = GL_NEAREST;
		break;
	case libcamera::formats::SGRBG10_CSI2P:
		firstRed_.setX(1.0);
		firstRed_.setY(0.0);
		fragmentShaderDefines_.append("#define RAW10P");
		fragmentShaderFile_ = ":bayer_1x_packed.frag";
		textureMinMagFilters_ = GL_NEAREST;
		break;
	case libcamera::formats::SRGGB10_CSI2P:
		firstRed_.setX(0.0);
		firstRed_.setY(0.0);
		fragmentShaderDefines_.append("#define RAW10P");
		fragmentShaderFile_ = ":bayer_1x_packed.frag";
		textureMinMagFilters_ = GL_NEAREST;
		break;
	case libcamera::formats::SBGGR12_CSI2P:
		firstRed_.setX(1.0);
		firstRed_.setY(1.0);
		fragmentShaderDefines_.append("#define RAW12P");
		fragmentShaderFile_ = ":bayer_1x_packed.frag";
		textureMinMagFilters_ = GL_NEAREST;
		break;
	case libcamera::formats::SGBRG12_CSI2P:
		firstRed_.setX(0.0);
		firstRed_.setY(1.0);
		fragmentShaderDefines_.append("#define RAW12P");
		fragmentShaderFile_ = ":bayer_1x_packed.frag";
		textureMinMagFilters_ = GL_NEAREST;
		break;
	case libcamera::formats::SGRBG12_CSI2P:
		firstRed_.setX(1.0);
		firstRed_.setY(0.0);
		fragmentShaderDefines_.append("#define RAW12P");
		fragmentShaderFile_ = ":bayer_1x_packed.frag";
		textureMinMagFilters_ = GL_NEAREST;
		break;
	case libcamera::formats::SRGGB12_CSI2P:
		firstRed_.setX(0.0);
		firstRed_.setY(0.0);
		fragmentShaderDefines_.append("#define RAW12P");
		fragmentShaderFile_ = ":bayer_1x_packed.frag";
		textureMinMagFilters_ = GL_NEAREST;
		break;
	default:
		ret = false;
		qWarning() << "[ViewFinderGL]:"
			   << "format not supported.";
		break;
	};

	return ret;
}

bool ViewFinderGL::createVertexShader()
{
	/* Create Vertex Shader */
	vertexShader_ = std::make_unique<QOpenGLShader>(QOpenGLShader::Vertex, this);

	/* Compile the vertex shader */
	if (!vertexShader_->compileSourceFile(vertexShaderFile_)) {
		qWarning() << "[ViewFinderGL]:" << vertexShader_->log();
		return false;
	}

	shaderProgram_.addShader(vertexShader_.get());
	return true;
}

bool ViewFinderGL::createFragmentShader()
{
	int attributeVertex;
	int attributeTexture;

	/*
	 * Create the fragment shader, compile it, and add it to the shader
	 * program. The #define macros stored in fragmentShaderDefines_, if
	 * any, are prepended to the source code.
	 */
	fragmentShader_ = std::make_unique<QOpenGLShader>(QOpenGLShader::Fragment, this);

	QFile file(fragmentShaderFile_);
	if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) {
		qWarning() << "Shader" << fragmentShaderFile_ << "not found";
		return false;
	}

	QString defines = fragmentShaderDefines_.join('\n') + "\n";
	QByteArray src = file.readAll();
	src.prepend(defines.toUtf8());

	if (!fragmentShader_->compileSourceCode(src)) {
		qWarning() << "[ViewFinderGL]:" << fragmentShader_->log();
		return false;
	}

	shaderProgram_.addShader(fragmentShader_.get());

	/* Link shader pipeline */
	if (!shaderProgram_.link()) {
		qWarning() << "[ViewFinderGL]:" << shaderProgram_.log();
		close();
	}

	/* Bind shader pipeline for use */
	if (!shaderProgram_.bind()) {
		qWarning() << "[ViewFinderGL]:" << shaderProgram_.log();
		close();
	}

	attributeVertex = shaderProgram_.attributeLocation("vertexIn");
	attributeTexture = shaderProgram_.attributeLocation("textureIn");

	shaderProgram_.enableAttributeArray(attributeVertex);
	shaderProgram_.setAttributeBuffer(attributeVertex,
					  GL_FLOAT,
					  0,
					  2,
					  2 * sizeof(GLfloat));

	shaderProgram_.enableAttributeArray(attributeTexture);
	shaderProgram_.setAttributeBuffer(attributeTexture,
					  GL_FLOAT,
					  8 * sizeof(GLfloat),
					  2,
					  2 * sizeof(GLfloat));

	textureUniformY_ = shaderProgram_.uniformLocation("tex_y");
	textureUniformU_ = shaderProgram_.uniformLocation("tex_u");
	textureUniformV_ = shaderProgram_.uniformLocation("tex_v");
	textureUniformStep_ = shaderProgram_.uniformLocation("tex_step");
	textureUniformSize_ = shaderProgram_.uniformLocation("tex_size");
	textureUniformBayerFirstRed_ = shaderProgram_.uniformLocation("tex_bayer_first_red");

	/* Create the textures. */
	for (std::unique_ptr<QOpenGLTexture> &texture : textures_) {
		if (texture)
			continue;

		texture = std::make_unique<QOpenGLTexture>(QOpenGLTexture::Target2D);
		texture->create();
	}

	return true;
}

void ViewFinderGL::configureTexture(QOpenGLTexture &texture)
{
	glBindTexture(GL_TEXTURE_2D, texture.textureId());
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
			textureMinMagFilters_);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
			textureMinMagFilters_);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}

void ViewFinderGL::removeShader()
{
	if (shaderProgram_.isLinked()) {
		shaderProgram_.release();
		shaderProgram_.removeAllShaders();
	}
}

void ViewFinderGL::initializeGL()
{
	initializeOpenGLFunctions();
	glEnable(GL_TEXTURE_2D);
	glDisable(GL_DEPTH_TEST);

	static const GLfloat coordinates[2][4][2]{
		{
			/* Vertex coordinates */
			{ -1.0f, -1.0f },
			{ -1.0f, +1.0f },
			{ +1.0f, +1.0f },
			{ +1.0f, -1.0f },
		},
		{
			/* Texture coordinates */
			{ 0.0f, 1.0f },
			{ 0.0f, 0.0f },
			{ 1.0f, 0.0f },
			{ 1.0f, 1.0f },
		},
	};

	vertexBuffer_.create();
	vertexBuffer_.bind();
	vertexBuffer_.allocate(coordinates, sizeof(coordinates));

	/* Create Vertex Shader */
	if (!createVertexShader())
		qWarning() << "[ViewFinderGL]: create vertex shader failed.";

	glClearColor(1.0f, 1.0f, 1.0f, 0.0f);
}

void ViewFinderGL::doRender()
{
	switch (format_) {
	case libcamera::formats::NV12:
	case libcamera::formats::NV21:
	case libcamera::formats::NV16:
	case libcamera::formats::NV61:
	case libcamera::formats::NV24:
	case libcamera::formats::NV42:
		/* Activate texture Y */
		glActiveTexture(GL_TEXTURE0);
		configureTexture(*textures_[0]);
		glTexImage2D(GL_TEXTURE_2D,
			     0,
			     GL_RED,
			     size_.width(),
			     size_.height(),
			     0,
			     GL_RED,
			     GL_UNSIGNED_BYTE,
			     data_);
		shaderProgram_.setUniformValue(textureUniformY_, 0);

		/* Activate texture UV/VU */
		glActiveTexture(GL_TEXTURE1);
		configureTexture(*textures_[1]);
		glTexImage2D(GL_TEXTURE_2D,
			     0,
			     GL_RG,
			     size_.width() / horzSubSample_,
			     size_.height() / vertSubSample_,
			     0,
			     GL_RG,
			     GL_UNSIGNED_BYTE,
			     data_ + size_.width() * size_.height());
		shaderProgram_.setUniformValue(textureUniformU_, 1);
		break;

	case libcamera::formats::YUV420:
		/* Activate texture Y */
		glActiveTexture(GL_TEXTURE0);
		configureTexture(*textures_[0]);
		glTexImage2D(GL_TEXTURE_2D,
			     0,
			     GL_RED,
			     size_.width(),
			     size_.height(),
			     0,
			     GL_RED,
			     GL_UNSIGNED_BYTE,
			     data_);
		shaderProgram_.setUniformValue(textureUniformY_, 0);

		/* Activate texture U */
		glActiveTexture(GL_TEXTURE1);
		configureTexture(*textures_[1]);
		glTexImage2D(GL_TEXTURE_2D,
			     0,
			     GL_RED,
			     size_.width() / horzSubSample_,
			     size_.height() / vertSubSample_,
			     0,
			     GL_RED,
			     GL_UNSIGNED_BYTE,
			     data_ + size_.width() * size_.height());
		shaderProgram_.setUniformValue(textureUniformU_, 1);

		/* Activate texture V */
		glActiveTexture(GL_TEXTURE2);
		configureTexture(*textures_[2]);
		glTexImage2D(GL_TEXTURE_2D,
			     0,
			     GL_RED,
			     size_.width() / horzSubSample_,
			     size_.height() / vertSubSample_,
			     0,
			     GL_RED,
			     GL_UNSIGNED_BYTE,
			     data_ + size_.width() * size_.height() * 5 / 4);
		shaderProgram_.setUniformValue(textureUniformV_, 2);
		break;

	case libcamera::formats::YVU420:
		/* Activate texture Y */
		glActiveTexture(GL_TEXTURE0);
		configureTexture(*textures_[0]);
		glTexImage2D(GL_TEXTURE_2D,
			     0,
			     GL_RED,
			     size_.width(),
			     size_.height(),
			     0,
			     GL_RED,
			     GL_UNSIGNED_BYTE,
			     data_);
		shaderProgram_.setUniformValue(textureUniformY_, 0);

		/* Activate texture V */
		glActiveTexture(GL_TEXTURE2);
		configureTexture(*textures_[2]);
		glTexImage2D(GL_TEXTURE_2D,
			     0,
			     GL_RED,
			     size_.width() / horzSubSample_,
			     size_.height() / vertSubSample_,
			     0,
			     GL_RED,
			     GL_UNSIGNED_BYTE,
			     data_ + size_.width() * size_.height());
		shaderProgram_.setUniformValue(textureUniformV_, 2);

		/* Activate texture U */
		glActiveTexture(GL_TEXTURE1);
		configureTexture(*textures_[1]);
		glTexImage2D(GL_TEXTURE_2D,
			     0,
			     GL_RED,
			     size_.width() / horzSubSample_,
			     size_.height() / vertSubSample_,
			     0,
			     GL_RED,
			     GL_UNSIGNED_BYTE,
			     data_ + size_.width() * size_.height() * 5 / 4);
		shaderProgram_.setUniformValue(textureUniformU_, 1);
		break;

	case libcamera::formats::UYVY:
	case libcamera::formats::VYUY:
	case libcamera::formats::YUYV:
	case libcamera::formats::YVYU:
		/*
		 * Packed YUV formats are stored in a RGBA texture to match the
		 * OpenGL texel size with the 4 bytes repeating pattern in YUV.
		 * The texture width is thus half of the image with.
		 */
		glActiveTexture(GL_TEXTURE0);
		configureTexture(*textures_[0]);
		glTexImage2D(GL_TEXTURE_2D,
			     0,
			     GL_RGBA,
			     size_.width() / 2,
			     size_.height(),
			     0,
			     GL_RGBA,
			     GL_UNSIGNED_BYTE,
			     data_);
		shaderProgram_.setUniformValue(textureUniformY_, 0);

		/*
		 * The shader needs the step between two texture pixels in the
		 * horizontal direction, expressed in texture coordinate units
		 * ([0, 1]). There are exactly width - 1 steps between the
		 * leftmost and rightmost texels.
		 */
		shaderProgram_.setUniformValue(textureUniformStep_,
					       1.0f / (size_.width() / 2 - 1),
					       1.0f /* not used */);
		break;

	case libcamera::formats::ABGR8888:
	case libcamera::formats::ARGB8888:
	case libcamera::formats::BGRA8888:
	case libcamera::formats::RGBA8888:
		glActiveTexture(GL_TEXTURE0);
		configureTexture(*textures_[0]);
		glTexImage2D(GL_TEXTURE_2D,
			     0,
			     GL_RGBA,
			     size_.width(),
			     size_.height(),
			     0,
			     GL_RGBA,
			     GL_UNSIGNED_BYTE,
			     data_);
		shaderProgram_.setUniformValue(textureUniformY_, 0);
		break;

	case libcamera::formats::BGR888:
	case libcamera::formats::RGB888:
		glActiveTexture(GL_TEXTURE0);
		configureTexture(*textures_[0]);
		glTexImage2D(GL_TEXTURE_2D,
			     0,
			     GL_RGB,
			     size_.width(),
			     size_.height(),
			     0,
			     GL_RGB,
			     GL_UNSIGNED_BYTE,
			     data_);
		shaderProgram_.setUniformValue(textureUniformY_, 0);
		break;

	case libcamera::formats::SBGGR8:
	case libcamera::formats::SGBRG8:
	case libcamera::formats::SGRBG8:
	case libcamera::formats::SRGGB8:
	case libcamera::formats::SBGGR10_CSI2P:
	case libcamera::formats::SGBRG10_CSI2P:
	case libcamera::formats::SGRBG10_CSI2P:
	case libcamera::formats::SRGGB10_CSI2P:
	case libcamera::formats::SBGGR12_CSI2P:
	case libcamera::formats::SGBRG12_CSI2P:
	case libcamera::formats::SGRBG12_CSI2P:
	case libcamera::formats::SRGGB12_CSI2P:
		/*
		 * Raw Bayer 8-bit, and packed raw Bayer 10-bit/12-bit formats
		 * are stored in GL_RED texture.
		 * The texture width is equal to the stride.
		 */
		glActiveTexture(GL_TEXTURE0);
		configureTexture(*textures_[0]);
		glTexImage2D(GL_TEXTURE_2D,
			     0,
			     GL_RED,
			     stride_,
			     size_.height(),
			     0,
			     GL_RED,
			     GL_UNSIGNED_BYTE,
			     data_);
		shaderProgram_.setUniformValue(textureUniformY_, 0);
		shaderProgram_.setUniformValue(textureUniformBayerFirstRed_,
					       firstRed_);
		shaderProgram_.setUniformValue(textureUniformSize_,
					       size_.width(), /* in pixels */
					       size_.height());
		shaderProgram_.setUniformValue(textureUniformStep_,
					       1.0f / (stride_ - 1),
					       1.0f / (size_.height() - 1));
		break;

	default:
		break;
	};
}

void ViewFinderGL::paintGL()
{
	if (!fragmentShader_)
		if (!createFragmentShader()) {
			qWarning() << "[ViewFinderGL]:"
				   << "create fragment shader failed.";
		}

	if (data_) {
		glClearColor(0.0, 0.0, 0.0, 1.0);
		glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

		doRender();
		glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
	}
}

void ViewFinderGL::resizeGL(int w, int h)
{
	glViewport(0, 0, w, h);
}

QSize ViewFinderGL::sizeHint() const
{
	return size_.isValid() ? size_ : QSize(640, 480);
}