summaryrefslogtreecommitdiff
path: root/utils/raspberrypi/ctt/ctt_geq.py
blob: 2aa668f133a80371929a83a9aa91268d7955c279 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
# SPDX-License-Identifier: BSD-2-Clause
#
# Copyright (C) 2019, Raspberry Pi (Trading) Limited
#
# ctt_geq.py - camera tuning tool for GEQ (green equalisation)

from ctt_tools import *
import matplotlib.pyplot as plt
import scipy.optimize as optimize


"""
Uses green differences in macbeth patches to fit green equalisation threshold
model. Ideally, all macbeth chart centres would fall below the threshold as
these should be corrected by geq.
"""
def geq_fit(Cam, plot):
    imgs = Cam.imgs
    """
    green equalisation to mitigate mazing.
    Fits geq model by looking at difference
    between greens in macbeth patches
    """
    geqs = np.array([geq(Cam, Img)*Img.againQ8_norm for Img in imgs])
    Cam.log += '\nProcessed all images'
    geqs = geqs.reshape((-1, 2))
    """
    data is sorted by green difference and top half is selected since higher
    green difference data define the decision boundary.
    """
    geqs = np.array(sorted(geqs, key=lambda r: np.abs((r[1]-r[0])/r[0])))

    length = len(geqs)
    g0 = geqs[length//2:, 0]
    g1 = geqs[length//2:, 1]
    gdiff = np.abs(g0-g1)
    """
    find linear fit by minimising asymmetric least square errors
    in order to cover most of the macbeth images.
    the philosophy here is that every macbeth patch should fall within the
    threshold, hence the upper bound approach
    """
    def f(params):
        m, c = params
        a = gdiff - (m*g0+c)
        """
        asymmetric square error returns:
            1.95 * a**2 if a is positive
            0.05 * a**2 if a is negative
        """
        return(np.sum(a**2+0.95*np.abs(a)*a))

    initial_guess = [0.01, 500]
    """
    Nelder-Mead is usually not the most desirable optimisation method
    but has been chosen here due to its robustness to undifferentiability
    (is that a word?)
    """
    result = optimize.minimize(f, initial_guess, method='Nelder-Mead')
    """
    need to check if the fit worked correectly
    """
    if result.success:
        slope, offset = result.x
        Cam.log += '\nFit result: slope = {:.5f} '.format(slope)
        Cam.log += 'offset = {}'.format(int(offset))
        """
        optional plotting code
        """
        if plot:
            x = np.linspace(max(g0)*1.1, 100)
            y = slope*x + offset
            plt.title('GEQ Asymmetric \'Upper Bound\' Fit')
            plt.plot(x, y, color='red', ls='--', label='fit')
            plt.scatter(g0, gdiff, color='b', label='data')
            plt.ylabel('Difference in green channels')
            plt.xlabel('Green value')

        """
        This upper bound asymmetric gives correct order of magnitude values.
        The pipeline approximates a 1st derivative of a gaussian with some
        linear piecewise functions, introducing arbitrary cutoffs. For
        pessimistic geq, the model parameters have been increased by a
        scaling factor/constant.

        Feel free to tune these or edit the json files directly if you
        belive there are still mazing effects left (threshold too low) or if you
        think it is being overcorrected (threshold too high).
        We have gone for a one size fits most approach that will produce
        acceptable results in most applications.
        """
        slope *= 1.5
        offset += 201
        Cam.log += '\nFit after correction factors: slope = {:.5f}'.format(slope)
        Cam.log += ' offset = {}'.format(int(offset))
        """
        clamp offset at 0 due to pipeline considerations
        """
        if offset < 0:
            Cam.log += '\nOffset raised to 0'
            offset = 0
        """
        optional plotting code
        """
        if plot:
            y2 = slope*x + offset
            plt.plot(x, y2, color='green', ls='--', label='scaled fit')
            plt.grid()
            plt.legend()
            plt.show()

        """
    the case where for some reason the fit didn't work correctly

    Transpose data and then least squares linear fit. Transposing data
    makes it robust to many patches where green difference is the same
    since they only contribute to one error minimisation, instead of dragging
    the entire linear fit down.
    """

    else:
        print('\nError! Couldn\'t fit asymmetric lest squares')
        print(result.message)
        Cam.log += '\nWARNING: Asymmetric least squares fit failed! '
        Cam.log += 'Standard fit used could possibly lead to worse results'
        fit = np.polyfit(gdiff, g0, 1)
        offset, slope = -fit[1]/fit[0], 1/fit[0]
        Cam.log += '\nFit result: slope = {:.5f} '.format(slope)
        Cam.log += 'offset = {}'.format(int(offset))
        """
        optional plotting code
        """
        if plot:
            x = np.linspace(max(g0)*1.1, 100)
            y = slope*x + offset
            plt.title('GEQ Linear Fit')
            plt.plot(x, y, color='red', ls='--', label='fit')
            plt.scatter(g0, gdiff, color='b', label='data')
            plt.ylabel('Difference in green channels')
            plt.xlabel('Green value')
        """
        Scaling factors (see previous justification)
        The model here will not be an upper bound so scaling factors have
        been increased.
        This method of deriving geq model parameters is extremely arbitrary
        and undesirable.
        """
        slope *= 2.5
        offset += 301
        Cam.log += '\nFit after correction factors: slope = {:.5f}'.format(slope)
        Cam.log += ' offset = {}'.format(int(offset))

        if offset < 0:
            Cam.log += '\nOffset raised to 0'
            offset = 0

        """
        optional plotting code
        """
        if plot:
            y2 = slope*x + offset
            plt.plot(x, y2, color='green', ls='--', label='scaled fit')
            plt.legend()
            plt.grid()
            plt.show()

    return round(slope, 5), int(offset)


""""
Return green channels of macbeth patches
returns g0, g1 where
> g0 is green next to red
> g1 is green next to blue
"""
def geq(Cam, Img):
    Cam.log += '\nProcessing image {}'.format(Img.name)
    patches = [Img.patches[i] for i in Img.order][1:3]
    g_patches = np.array([(np.mean(patches[0][i]), np.mean(patches[1][i])) for i in range(24)])
    Cam.log += '\n'
    return(g_patches)
> * \fn MediaEntity::deviceNode() * \brief Retrieve the entity's device node path, if any * \return The entity's device node path, or an empty string if it is not set * \sa int setDeviceNode() */ /** * \fn MediaEntity::deviceMajor() * \brief Retrieve the major number of the interface associated with the entity * \return The interface major number, or 0 if the entity isn't associated with * an interface */ /** * \fn MediaEntity::deviceMinor() * \brief Retrieve the minor number of the interface associated with the entity * \return The interface minor number, or 0 if the entity isn't associated with * an interface */ /** * \fn MediaEntity::pads() * \brief Retrieve all pads of the entity * \return The list of the entity's pads */ /** * \brief Get a pad in this entity by its index * \param[in] index The 0-based pad index * \return The pad identified by \a index, or nullptr if no such pad exist */ const MediaPad *MediaEntity::getPadByIndex(unsigned int index) const { for (MediaPad *p : pads_) { if (p->index() == index) return p; } return nullptr; } /** * \brief Get a pad in this entity by its object id * \param[in] id The pad id * \return The pad identified by \a id, or nullptr if no such pad exist */ const MediaPad *MediaEntity::getPadById(unsigned int id) const { for (MediaPad *p : pads_) { if (p->id() == id) return p; } return nullptr; } /** * \brief Set the path to the device node for the associated interface * \param[in] deviceNode The interface device node path associated with this entity * \return 0 on success or a negative error code otherwise */ int MediaEntity::setDeviceNode(const std::string &deviceNode) { /* Make sure the device node can be accessed. */ int ret = ::access(deviceNode.c_str(), R_OK | W_OK); if (ret < 0) { ret = -errno; LOG(MediaDevice, Error) << "Device node " << deviceNode << " can't be accessed: " << strerror(-ret); return ret; } deviceNode_ = deviceNode; return 0; } /** * \brief Construct a MediaEntity * \param[in] dev The media device this entity belongs to * \param[in] entity The media entity kernel data * \param[in] major The major number of the entity associated interface * \param[in] minor The minor number of the entity associated interface */ MediaEntity::MediaEntity(MediaDevice *dev, const struct media_v2_entity *entity, unsigned int major, unsigned int minor) : MediaObject(dev, entity->id), name_(entity->name), function_(entity->function), flags_(entity->flags), major_(major), minor_(minor) { } MediaEntity::~MediaEntity() { /* * Don't delete the pads as we only borrow the reference owned by * MediaDevice. */ pads_.clear(); } /** * \brief Add \a pad to the entity's list of pads * \param[in] pad The pad to add to the list * * This function is meant to add pads to the entity during parsing of the media * graph, after the MediaPad objects are constructed and before the MediaDevice * is made available externally. */ void MediaEntity::addPad(MediaPad *pad) { pads_.push_back(pad); } } /* namespace libcamera */