/* SPDX-License-Identifier: BSD-2-Clause */ /* * Copyright (C) 2019-2021, Raspberry Pi Ltd * * Raspberry Pi VC4/BCM2835 ISP IPA. */ #include #include #include #include #include #include #include #include "common/ipa_base.h" #include "controller/af_status.h" #include "controller/agc_algorithm.h" #include "controller/alsc_status.h" #include "controller/awb_status.h" #include "controller/black_level_status.h" #include "controller/ccm_status.h" #include "controller/contrast_status.h" #include "controller/denoise_algorithm.h" #include "controller/denoise_status.h" #include "controller/dpc_status.h" #include "controller/geq_status.h" #include "controller/lux_status.h" #include "controller/noise_status.h" #include "controller/sharpen_status.h" namespace libcamera { LOG_DECLARE_CATEGORY(IPARPI) namespace ipa::RPi { class IpaVc4 final : public IpaBase { public: IpaVc4() : IpaBase(), lsTable_(nullptr) { } ~IpaVc4() { if (lsTable_) munmap(lsTable_, MaxLsGridSize); } private: int32_t platformInit(const InitParams ¶ms, InitResult *result) override; int32_t platformStart(const ControlList &controls, StartResult *result) override; int32_t platformConfigure(const ConfigParams ¶ms, ConfigResult *result) override; void platformPrepareIsp(const PrepareParams ¶ms, RPiController::Metadata &rpiMetadata) override; RPiController::StatisticsPtr platformProcessStats(Span mem) override; void handleControls(const ControlList &controls) override; bool validateIspControls(); void applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls); void applyDG(const struct AgcPrepareStatus *dgStatus, ControlList &ctrls); void applyCCM(const struct CcmStatus *ccmStatus, ControlList &ctrls); void applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, ControlList &ctrls); void applyGamma(const struct ContrastStatus *contrastStatus, ControlList &ctrls); void applyGEQ(const struct GeqStatus *geqStatus, ControlList &ctrls); void applyDenoise(const struct DenoiseStatus *denoiseStatus, ControlList &ctrls); void applySharpen(const struct SharpenStatus *sharpenStatus, ControlList &ctrls); void applyDPC(const struct DpcStatus *dpcStatus, ControlList &ctrls); void applyLS(const struct AlscStatus *lsStatus, ControlList &ctrls); void applyAF(const struct AfStatus *afStatus, ControlList &lensCtrls); void resampleTable(uint16_t dest[], const std::vector &src, int destW, int destH); /* VC4 ISP controls. */ ControlInfoMap ispCtrls_; /* LS table allocation passed in from the pipeline handler. */ SharedFD lsTableHandle_; void *lsTable_; }; int32_t IpaVc4::platformInit([[maybe_unused]] const InitParams ¶ms, [[maybe_unused]] InitResult *result) { const std::string &target = controller_.getTarget(); if (target != "bcm2835") { LOG(IPARPI, Error) << "Tuning data file target returned \"" << target << "\"" << ", expected \"bcm2835\""; return -EINVAL; } return 0; } int32_t IpaVc4::platformStart([[maybe_unused]] const ControlList &controls, [[maybe_unused]] StartResult *result) { return 0; } int32_t IpaVc4::platformConfigure(const ConfigParams ¶ms, [[maybe_unused]] ConfigResult *result) { ispCtrls_ = params.ispControls; if (!validateIspControls()) { LOG(IPARPI, Error) << "ISP control validation failed."; return -1; } /* Store the lens shading table pointer and handle if available. */ if (params.lsTableHandle.isValid()) { /* Remove any previous table, if there was one. */ if (lsTable_) { munmap(lsTable_, MaxLsGridSize); lsTable_ = nullptr; } /* Map the LS table buffer into user space. */ lsTableHandle_ = std::move(params.lsTableHandle); if (lsTableHandle_.isValid()) { lsTable_ = mmap(nullptr, MaxLsGridSize, PROT_READ | PROT_WRITE, MAP_SHARED, lsTableHandle_.get(), 0); if (lsTable_ == MAP_FAILED) { LOG(IPARPI, Error) << "dmaHeap mmap failure for LS table."; lsTable_ = nullptr; } } } return 0; } void IpaVc4::platformPrepareIsp([[maybe_unused]] const PrepareParams ¶ms, RPiController::Metadata &rpiMetadata) { ControlList ctrls(ispCtrls_); /* Lock the metadata buffer to avoid constant locks/unlocks. */ std::unique_lock lock(rpiMetadata); AwbStatus *awbStatus = rpiMetadata.getLocked("awb.status"); if (awbStatus) applyAWB(awbStatus, ctrls); CcmStatus *ccmStatus = rpiMetadata.getLocked("ccm.status"); if (ccmStatus) applyCCM(ccmStatus, ctrls); AgcPrepareStatus *dgStatus = rpiMetadata.getLocked("agc.prepare_status"); if (dgStatus) applyDG(dgStatus, ctrls); AlscStatus *lsStatus = rpiMetadata.getLocked("alsc.status"); if (lsStatus) applyLS(lsStatus, ctrls); ContrastStatus *contrastStatus = rpiMetadata.getLocked("contrast.status"); if (contrastStatus) applyGamma(contrastStatus, ctrls); BlackLevelStatus *blackLevelStatus = rpiMetadata.getLocked("black_level.status"); if (blackLevelStatus) applyBlackLevel(blackLevelStatus, ctrls); GeqStatus *geqStatus = rpiMetadata.getLocked("geq.status"); if (geqStatus) applyGEQ(geqStatus, ctrls); DenoiseStatus *denoiseStatus = rpiMetadata.getLocked("denoise.status"); if (denoiseStatus) applyDenoise(denoiseStatus, ctrls); SharpenStatus *sharpenStatus = rpiMetadata.getLocked("sharpen.status"); if (sharpenStatus) applySharpen(sharpenStatus, ctrls); DpcStatus *dpcStatus = rpiMetadata.getLocked("dpc.status"); if (dpcStatus) applyDPC(dpcStatus, ctrls); const AfStatus *afStatus = rpiMetadata.getLocked("af.status"); if (afStatus) { ControlList lensctrls(lensCtrls_); applyAF(afStatus, lensctrls); if (!lensctrls.empty()) setLensControls.emit(lensctrls); } if (!ctrls.empty()) setIspControls.emit(ctrls); } RPiController::StatisticsPtr IpaVc4::platformProcessStats(Span mem) { using namespace RPiController; const bcm2835_isp_stats *stats = reinterpret_cast(mem.data()); StatisticsPtr statistics = std::make_shared(Statistics::AgcStatsPos::PreWb, Statistics::ColourStatsPos::PostLsc); const Controller::HardwareConfig &hw = controller_.getHardwareConfig(); unsigned int i; /* RGB histograms are not used, so do not populate them. */ statistics->yHist = RPiController::Histogram(stats->hist[0].g_hist, hw.numHistogramBins); /* All region sums are based on a 16-bit normalised pipeline bit-depth. */ unsigned int scale = Statistics::NormalisationFactorPow2 - hw.pipelineWidth; statistics->awbRegions.init(hw.awbRegions); for (i = 0; i < statistics->awbRegions.numRegions(); i++) statistics->awbRegions.set(i, { { stats->awb_stats[i].r_sum << scale, stats->awb_stats[i].g_sum << scale, stats->awb_stats[i].b_sum << scale }, stats->awb_stats[i].counted, stats->awb_stats[i].notcounted }); RPiController::AgcAlgorithm *agc = dynamic_cast( controller_.getAlgorithm("agc")); if (!agc) { LOG(IPARPI, Debug) << "No AGC algorithm - not copying statistics"; statistics->agcRegions.init(0); } else { statistics->agcRegions.init(hw.agcRegions); const std::vector &weights = agc->getWeights(); for (i = 0; i < statistics->agcRegions.numRegions(); i++) { uint64_t rSum = (stats->agc_stats[i].r_sum << scale) * weights[i]; uint64_t gSum = (stats->agc_stats[i].g_sum << scale) * weights[i]; uint64_t bSum = (stats->agc_stats[i].b_sum << scale) * weights[i]; uint32_t counted = stats->agc_stats[i].counted * weights[i]; uint32_t notcounted = stats->agc_stats[i].notcounted * weights[i]; statistics->agcRegions.set(i, { { rSum, gSum, bSum }, counted, notcounted }); } } statistics->focusRegions.init(hw.focusRegions); for (i = 0; i < statistics->focusRegions.numRegions(); i++) statistics->focusRegions.set(i, { stats->focus_stats[i].contrast_val[1][1] / 1000, stats->focus_stats[i].contrast_val_num[1][1], stats->focus_stats[i].contrast_val_num[1][0] }); if (statsMetadataOutput_) { Span statsSpan(reinterpret_cast(stats), sizeof(bcm2835_isp_stats)); libcameraMetadata_.set(controls::rpi::Bcm2835StatsOutput, statsSpan); } return statistics; } void IpaVc4::handleControls(const ControlList &controls) { static const std::map DenoiseModeTable = { { controls::draft::NoiseReductionModeOff, RPiController::DenoiseMode::Off }, { controls::draft::NoiseReductionModeFast, RPiController::DenoiseMode::ColourFast }, { controls::draft::NoiseReductionModeHighQuality, RPiController::DenoiseMode::ColourHighQuality }, { controls::draft::NoiseReductionModeMinimal, RPiController::DenoiseMode::ColourOff }, { controls::draft::NoiseReductionModeZSL, RPiController::DenoiseMode::ColourHighQuality }, }; for (auto const &ctrl : controls) { switch (ctrl.first) { case controls::draft::NOISE_REDUCTION_MODE: { RPiController::DenoiseAlgorithm *sdn = dynamic_cast( controller_.getAlgorithm("SDN")); /* Some platforms may have a combined "denoise" algorithm instead. */ if (!sdn) sdn = dynamic_cast( controller_.getAlgorithm("denoise")); if (!sdn) { LOG(IPARPI, Warning) << "Could not set NOISE_REDUCTION_MODE - no SDN algorithm"; return; } int32_t idx = ctrl.second.get(); auto mode = DenoiseModeTable.find(idx); if (mode != DenoiseModeTable.end()) sdn->setMode(mode->second); break; } } } } bool IpaVc4::validateIspControls() { static const uint32_t ctrls[] = { V4L2_CID_RED_BALANCE, V4L2_CID_BLUE_BALANCE, V4L2_CID_DIGITAL_GAIN, V4L2_CID_USER_BCM2835_ISP_CC_MATRIX, V4L2_CID_USER_BCM2835_ISP_GAMMA, V4L2_CID_USER_BCM2835_ISP_BLACK_LEVEL, V4L2_CID_USER_BCM2835_ISP_GEQ, V4L2_CID_USER_BCM2835_ISP_DENOISE, V4L2_CID_USER_BCM2835_ISP_SHARPEN, V4L2_CID_USER_BCM2835_ISP_DPC, V4L2_CID_USER_BCM2835_ISP_LENS_SHADING, V4L2_CID_USER_BCM2835_ISP_CDN, }; for (auto c : ctrls) { if (ispCtrls_.find(c) == ispCtrls_.end()) { LOG(IPARPI, Error) << "Unable to find ISP control " << utils::hex(c); return false; } } return true; } void IpaVc4::applyAWB(const struct AwbStatus *awbStatus, ControlList &ctrls) { LOG(IPARPI, Debug) << "Applying WB R: " << awbStatus->gainR << " B: " << awbStatus->gainB; ctrls.set(V4L2_CID_RED_BALANCE, static_cast(awbStatus->gainR * 1000)); ctrls.set(V4L2_CID_BLUE_BALANCE, static_cast(awbStatus->gainB * 1000)); } void IpaVc4::applyDG(const struct AgcPrepareStatus *dgStatus, ControlList &ctrls) { ctrls.set(V4L2_CID_DIGITAL_GAIN, static_cast(dgStatus->digitalGain * 1000)); } void IpaVc4::applyCCM(const struct CcmStatus *ccmStatus, ControlList &ctrls) { bcm2835_isp_custom_ccm ccm; for (int i = 0; i < 9; i++) { ccm.ccm.ccm[i / 3][i % 3].den = 1000; ccm.ccm.ccm[i / 3][i % 3].num = 1000 * ccmStatus->matrix[i]; } ccm.enabled = 1; ccm.ccm.offsets[0] = ccm.ccm.offsets[1] = ccm.ccm.offsets[2] = 0; ControlValue c(Span{ reinterpret_cast(&ccm), sizeof(ccm) }); ctrls.set(V4L2_CID_USER_BCM2835_ISP_CC_MATRIX, c); } void IpaVc4::applyBlackLevel(const struct BlackLevelStatus *blackLevelStatus, ControlList &ctrls) { bcm2835_isp_black_level blackLevel; blackLevel.enabled = 1; blackLevel.black_level_r = blackLevelStatus->blackLevelR; blackLevel.black_level_g = blackLevelStatus->blackLevelG; blackLevel.black_level_b = blackLevelStatus->blackLevelB; ControlValue c(Span{ reinterpret_cast(&blackLevel), sizeof(blackLevel) }); ctrls.set(V4L2_CID_USER_BCM2835_ISP_BLACK_LEVEL, c); } void IpaVc4::applyGamma(const struct ContrastStatus *contrastStatus, ControlList &ctrls) { const unsigned int numGammaPoints = controller_.getHardwareConfig().numGammaPoints; struct bcm2835_isp_gamma gamma; for (unsigned int i = 0; i < numGammaPoints - 1; i++) { int x = i < 16 ? i * 1024 : (i < 24 ? (i - 16) * 2048 + 16384 : (i - 24) * 4096 + 32768); gamma.x[i] = x; gamma.y[i] = std::min(65535, contrastStatus->gammaCurve.eval(x)); } gamma.x[numGammaPoints - 1] = 65535; gamma.y[numGammaPoints - 1] = 65535; gamma.enabled = 1; ControlValue c(Span{ reinterpret_cast(&gamma), sizeof(gamma) }); ctrls.set(V4L2_CID_USER_BCM2835_ISP_GAMMA, c); } void IpaVc4::applyGEQ(const struct GeqStatus *geqStatus, ControlList &ctrls) { bcm2835_isp_geq geq; geq.enabled = 1; geq.offset = geqStatus->offset; geq.slope.den = 1000; geq.slope.num = 1000 * geqStatus->slope; ControlValue c(Span{ reinterpret_cast(&geq), sizeof(geq) }); ctrls.set(V4L2_CID_USER_BCM2835_ISP_GEQ, c); } void IpaVc4::applyDenoise(const struct DenoiseStatus *denoiseStatus, ControlList &ctrls) { using RPiController::DenoiseMode; bcm2835_isp_denoise denoise; DenoiseMode mode = static_cast(denoiseStatus->mode); denoise.enabled = mode != DenoiseMode::Off; denoise.constant = denoiseStatus->noiseConstant; denoise.slope.num = 1000 * denoiseStatus->noiseSlope; denoise.slope.den = 1000; denoise.strength.num = 1000 * denoiseStatus->strength; denoise.strength.den = 1000; /* Set the CDN mode to match the SDN operating mode. */ bcm2835_isp_cdn cdn; switch (mode) { case DenoiseMode::ColourFast: cdn.enabled = 1; cdn.mode = CDN_MODE_FAST; break; case DenoiseMode::ColourHighQuality: cdn.enabled = 1; cdn.mode = CDN_MODE_HIGH_QUALITY; break; default: cdn.enabled = 0; } ControlValue c(Span{ reinterpret_cast(&denoise), sizeof(denoise) }); ctrls.set(V4L2_CID_USER_BCM2835_ISP_DENOISE, c); c = ControlValue(Span{ reinterpret_cast(&cdn), sizeof(cdn) }); ctrls.set(V4L2_CID_USER_BCM2835_ISP_CDN, c); } void IpaVc4::applySharpen(const struct SharpenStatus *sharpenStatus, ControlList &ctrls) { bcm2835_isp_sharpen sharpen; sharpen.enabled = 1; sharpen.threshold.num = 1000 * sharpenStatus->threshold; sharpen.threshold.den = 1000; sharpen.strength.num = 1000 * sharpenStatus->strength; sharpen.strength.den = 1000; sharpen.limit.num = 1000 * sharpenStatus->limit; sharpen.limit.den = 1000; ControlValue c(Span{ reinterpret_cast(&sharpen), sizeof(sharpen) }); ctrls.set(V4L2_CID_USER_BCM2835_ISP_SHARPEN, c); } void IpaVc4::applyDPC(const struct DpcStatus *dpcStatus, ControlList &ctrls) { bcm2835_isp_dpc dpc; dpc.enabled = 1; dpc.strength = dpcStatus->strength; ControlValue c(Span{ reinterpret_cast(&dpc), sizeof(dpc) }); ctrls.set(V4L2_CID_USER_BCM2835_ISP_DPC, c); } void IpaVc4::applyLS(const struct AlscStatus *lsStatus, ControlList &ctrls) { /* * Program lens shading tables into pipeline. * Choose smallest cell size that won't exceed 63x48 cells. */ const int cellSizes[] = { 16, 32, 64, 128, 256 }; unsigned int numCells = std::size(cellSizes); unsigned int i, w, h, cellSize; for (i = 0; i < numCells; i++) { cellSize = cellSizes[i]; w = (mode_.width + cellSize - 1) / cellSize; h = (mode_.height + cellSize - 1) / cellSize; if (w < 64 && h <= 48) break; } if (i == numCells) { LOG(IPARPI, Error) << "Cannot find cell size"; return; } /* We're going to supply corner sampled tables, 16 bit samples. */ w++, h++; bcm2835_isp_lens_shading ls = { .enabled = 1, .grid_cell_size = cellSize, .grid_width = w, .grid_stride = w, .grid_height = h, /* .dmabuf will be filled in by pipeline handler. */ .dmabuf = 0, .ref_transform = 0, .corner_sampled = 1, .gain_format = GAIN_FORMAT_U4P10 }; if (!lsTable_ || w * h * 4 * sizeof(uint16_t) > MaxLsGridSize) { LOG(IPARPI, Error) << "Do not have a correctly allocate lens shading table!"; return; } if (lsStatus) { /* Format will be u4.10 */ uint16_t *grid = static_cast(lsTable_); resampleTable(grid, lsStatus->r, w, h); resampleTable(grid + w * h, lsStatus->g, w, h); memcpy(grid + 2 * w * h, grid + w * h, w * h * sizeof(uint16_t)); resampleTable(grid + 3 * w * h, lsStatus->b, w, h); } ControlValue c(Span{ reinterpret_cast(&ls), sizeof(ls) }); ctrls.set(V4L2_CID_USER_BCM2835_ISP_LENS_SHADING, c); } void IpaVc4::applyAF(const struct AfStatus *afStatus, ControlList &lensCtrls) { if (afStatus->lensSetting) { ControlValue v(afStatus->lensSetting.value()); lensCtrls.set(V4L2_CID_FOCUS_ABSOLUTE, v); } } /* * Resamples a 16x12 table with central sampling to destW x destH with corner * sampling. */ void IpaVc4::resampleTable(uint16_t dest[], const std::vector &src, int destW, int destH) { /* * Precalculate and cache the x sampling locations and phases to * save recomputing them on every row. */ assert(destW > 1 && destH > 1 && destW <= 64); int xLo[64], xHi[64]; double xf[64]; double x = -0.5, xInc = 16.0 / (destW - 1); for (int i = 0; i < destW; i++, x += xInc) { xLo[i] = floor(x); xf[i] = x - xLo[i]; xHi[i] = xLo[i] < 15 ? xLo[i] + 1 : 15; xLo[i] = xLo[i] > 0 ? xLo[i] : 0; } /* Now march over the output table generating the new values. */ double y = -0.5, yInc = 12.0 / (destH - 1); for (int j = 0; j < destH; j++, y += yInc) { int yLo = floor(y); double yf = y - yLo; int yHi = yLo < 11 ? yLo + 1 : 11; yLo = yLo > 0 ? yLo : 0; double const *rowAbove = src.data() + yLo * 16; double const *rowBelow = src.data() + yHi * 16; for (int i = 0; i < destW; i++) { double above = rowAbove[xLo[i]] * (1 - xf[i]) + rowAbove[xHi[i]] * xf[i]; double below = rowBelow[xLo[i]] * (1 - xf[i]) + rowBelow[xHi[i]] * xf[i]; int result = floor(1024 * (above * (1 - yf) + below * yf) + .5); *(dest++) = result > 16383 ? 16383 : result; /* want u4.10 */ } } } } /* namespace ipa::RPi */ /* * External IPA module interface */ extern "C" { const struct IPAModuleInfo ipaModuleInfo = { IPA_MODULE_API_VERSION, 1, "rpi/vc4", "rpi/vc4", }; IPAInterface *ipaCreate() { return new ipa::RPi::IpaVc4(); } } /* extern "C" */ } /* namespace libcamera */ ot;" Image class load image from raw data and extracts metadata. Once image is extracted from data, it finds 24 16x16 patches for each channel, centred at the macbeth chart squares """ class Image: def __init__(self, buf): self.buf = buf self.patches = None self.saturated = False ''' obtain metadata from buffer ''' def get_meta(self): self.ver = ba_to_b(self.buf[4:5]) self.w = ba_to_b(self.buf[0xd0:0xd2]) self.h = ba_to_b(self.buf[0xd2:0xd4]) self.pad = ba_to_b(self.buf[0xd4:0xd6]) self.fmt = self.buf[0xf5] self.sigbits = 2*self.fmt + 4 self.pattern = self.buf[0xf4] self.exposure = ba_to_b(self.buf[0x90:0x94]) self.againQ8 = ba_to_b(self.buf[0x94:0x96]) self.againQ8_norm = self.againQ8/256 camName = self.buf[0x10:0x10+128] camName_end = camName.find(0x00) self.camName = self.buf[0x10:0x10+128][:camName_end].decode() """ Channel order depending on bayer pattern """ bayer_case = { 0: (0, 1, 2, 3), # red 1: (2, 0, 3, 1), # green next to red 2: (3, 2, 1, 0), # green next to blue 3: (1, 0, 3, 2), # blue 128: (0, 1, 2, 3) # arbitrary order for greyscale casw } self.order = bayer_case[self.pattern] ''' manual blacklevel - not robust ''' if 'ov5647' in self.camName: self.blacklevel = 16 else: self.blacklevel = 64 self.blacklevel_16 = self.blacklevel << (6) return 1 ''' print metadata for debug ''' def print_meta(self): print('\nData:') print(' ver = {}'.format(self.ver)) print(' w = {}'.format(self.w)) print(' h = {}'.format(self.h)) print(' pad = {}'.format(self.pad)) print(' fmt = {}'.format(self.fmt)) print(' sigbits = {}'.format(self.sigbits)) print(' pattern = {}'.format(self.pattern)) print(' exposure = {}'.format(self.exposure)) print(' againQ8 = {}'.format(self.againQ8)) print(' againQ8_norm = {}'.format(self.againQ8_norm)) print(' camName = {}'.format(self.camName)) print(' blacklevel = {}'.format(self.blacklevel)) print(' blacklevel_16 = {}'.format(self.blacklevel_16)) return 1 """ get image from raw scanline data """ def get_image(self, raw): self.dptr = [] """ check if data is 10 or 12 bits """ if self.sigbits == 10: """ calc length of scanline """ lin_len = ((((((self.w+self.pad+3)>>2)) * 5)+31)>>5) * 32 """ stack scan lines into matrix """ raw = np.array(raw).reshape(-1, lin_len).astype(np.int64)[:self.h, ...] """ separate 5 bits in each package, stopping when w is satisfied """ ba0 = raw[..., 0:5*((self.w+3)>>2):5] ba1 = raw[..., 1:5*((self.w+3)>>2):5] ba2 = raw[..., 2:5*((self.w+3)>>2):5] ba3 = raw[..., 3:5*((self.w+3)>>2):5] ba4 = raw[..., 4:5*((self.w+3)>>2):5] """ assemble 10 bit numbers """ ch0 = np.left_shift((np.left_shift(ba0, 2) + (ba4 % 4)), 6) ch1 = np.left_shift((np.left_shift(ba1, 2) + (np.right_shift(ba4, 2) % 4)), 6) ch2 = np.left_shift((np.left_shift(ba2, 2) + (np.right_shift(ba4, 4) % 4)), 6) ch3 = np.left_shift((np.left_shift(ba3, 2) + (np.right_shift(ba4, 6) % 4)), 6) """ interleave bits """ mat = np.empty((self.h, self.w), dtype=ch0.dtype) mat[..., 0::4] = ch0 mat[..., 1::4] = ch1 mat[..., 2::4] = ch2 mat[..., 3::4] = ch3 """ There is som eleaking memory somewhere in the code. This code here seemed to make things good enough that the code would run for reasonable numbers of images, however this is techincally just a workaround. (sorry) """ ba0, ba1, ba2, ba3, ba4 = None, None, None, None, None del ba0, ba1, ba2, ba3, ba4 ch0, ch1, ch2, ch3 = None, None, None, None del ch0, ch1, ch2, ch3 """ same as before but 12 bit case """ elif self.sigbits == 12: lin_len = ((((((self.w+self.pad+1)>>1)) * 3)+31)>>5) * 32 raw = np.array(raw).reshape(-1, lin_len).astype(np.int64)[:self.h, ...] ba0 = raw[..., 0:3*((self.w+1)>>1):3] ba1 = raw[..., 1:3*((self.w+1)>>1):3] ba2 = raw[..., 2:3*((self.w+1)>>1):3] ch0 = np.left_shift((np.left_shift(ba0, 4) + ba2 % 16), 4) ch1 = np.left_shift((np.left_shift(ba1, 4) + (np.right_shift(ba2, 4)) % 16), 4) mat = np.empty((self.h, self.w), dtype=ch0.dtype) mat[..., 0::2] = ch0 mat[..., 1::2] = ch1 else: """ data is neither 10 nor 12 or incorrect data """ print('ERROR: wrong bit format, only 10 or 12 bit supported') return 0 """ separate bayer channels """ c0 = mat[0::2, 0::2] c1 = mat[0::2, 1::2] c2 = mat[1::2, 0::2] c3 = mat[1::2, 1::2] self.channels = [c0, c1, c2, c3] return 1 """ obtain 16x16 patch centred at macbeth square centre for each channel """ def get_patches(self, cen_coords, size=16): """ obtain channel widths and heights """ ch_w, ch_h = self.w, self.h cen_coords = list(np.array((cen_coords[0])).astype(np.int32)) self.cen_coords = cen_coords """ squares are ordered by stacking macbeth chart columns from left to right. Some useful patch indices: white = 3 black = 23 'reds' = 9, 10 'blues' = 2, 5, 8, 20, 22 'greens' = 6, 12, 17 greyscale = 3, 7, 11, 15, 19, 23 """ all_patches = [] for ch in self.channels: ch_patches = [] for cen in cen_coords: ''' macbeth centre is placed at top left of central 2x2 patch to account for rounding Patch pixels are sorted by pixel brightness so spatial information is lost. ''' patch = ch[cen[1]-7:cen[1]+9, cen[0]-7:cen[0]+9].flatten() patch.sort() if patch[-5] == (2**self.sigbits-1)*2**(16-self.sigbits): self.saturated = True ch_patches.append(patch) # print('\nNew Patch\n') all_patches.append(ch_patches) # print('\n\nNew Channel\n\n') self.patches = all_patches return 1 def brcm_load_image(Cam, im_str): """ Load image where raw data and metadata is in the BRCM format """ try: """ create byte array """ with open(im_str, 'rb') as image: f = image.read() b = bytearray(f) """ return error if incorrect image address """ except FileNotFoundError: print('\nERROR:\nInvalid image address') Cam.log += '\nWARNING: Invalid image address' return 0 """ return error if problem reading file """ if f is None: print('\nERROR:\nProblem reading file') Cam.log += '\nWARNING: Problem readin file' return 0 # print('\nLooking for EOI and BRCM header') """ find end of image followed by BRCM header by turning bytearray into hex string and string matching with regexp """ start = -1 match = bytearray(b'\xff\xd9@BRCM') match_str = binascii.hexlify(match) b_str = binascii.hexlify(b) """ note index is divided by two to go from string to hex """ indices = [m.start()//2 for m in re.finditer(match_str, b_str)] # print(indices) try: start = indices[0] + 3 except IndexError: print('\nERROR:\nNo Broadcom header found') Cam.log += '\nWARNING: No Broadcom header found!' return 0 """ extract data after header """ # print('\nExtracting data after header') buf = b[start:start+32768] Img = Image(buf) Img.str = im_str # print('Data found successfully') """ obtain metadata """ # print('\nReading metadata') Img.get_meta() Cam.log += '\nExposure : {} us'.format(Img.exposure) Cam.log += '\nNormalised gain : {}'.format(Img.againQ8_norm) # print('Metadata read successfully') """ obtain raw image data """ # print('\nObtaining raw image data') raw = b[start+32768:] Img.get_image(raw) """ delete raw to stop memory errors """ raw = None del raw # print('Raw image data obtained successfully') return Img def dng_load_image(Cam, im_str): try: Img = Image(None) # RawPy doesn't load all the image tags that we need, so we use py3exiv2 metadata = pyexif.ImageMetadata(im_str) metadata.read() Img.ver = 100 # random value Img.w = metadata['Exif.SubImage1.ImageWidth'].value Img.pad = 0 Img.h = metadata['Exif.SubImage1.ImageLength'].value white = metadata['Exif.SubImage1.WhiteLevel'].value Img.sigbits = int(white).bit_length() Img.fmt = (Img.sigbits - 4) // 2 Img.exposure = int(metadata['Exif.Photo.ExposureTime'].value*1000000) Img.againQ8 = metadata['Exif.Photo.ISOSpeedRatings'].value*256/100 Img.againQ8_norm = Img.againQ8 / 256 Img.camName = metadata['Exif.Image.Model'].value Img.blacklevel = int(metadata['Exif.SubImage1.BlackLevel'].value[0]) Img.blacklevel_16 = Img.blacklevel << (16 - Img.sigbits) bayer_case = { '0 1 1 2': (0, (0, 1, 2, 3)), '1 2 0 1': (1, (2, 0, 3, 1)), '2 1 1 0': (2, (3, 2, 1, 0)), '1 0 2 1': (3, (1, 0, 3, 2)) } cfa_pattern = metadata['Exif.SubImage1.CFAPattern'].value Img.pattern = bayer_case[cfa_pattern][0] Img.order = bayer_case[cfa_pattern][1] # Now use RawPy tp get the raw Bayer pixels raw_im = raw.imread(im_str) raw_data = raw_im.raw_image shift = 16 - Img.sigbits c0 = np.left_shift(raw_data[0::2, 0::2].astype(np.int64), shift) c1 = np.left_shift(raw_data[0::2, 1::2].astype(np.int64), shift) c2 = np.left_shift(raw_data[1::2, 0::2].astype(np.int64), shift) c3 = np.left_shift(raw_data[1::2, 1::2].astype(np.int64), shift) Img.channels = [c0, c1, c2, c3] except Exception: print("\nERROR: failed to load DNG file", im_str) print("Either file does not exist or is incompatible") Cam.log += '\nERROR: DNG file does not exist or is incompatible' raise return Img ''' load image from file location and perform calibration check correct filetype mac boolean is true if image is expected to contain macbeth chart and false if not (alsc images don't have macbeth charts) ''' def load_image(Cam, im_str, mac_config=None, show=False, mac=True, show_meta=False): """ check image is correct filetype """ if '.jpg' in im_str or '.jpeg' in im_str or '.brcm' in im_str or '.dng' in im_str: if '.dng' in im_str: Img = dng_load_image(Cam, im_str) else: Img = brcm_load_image(Cam, im_str) if show_meta: Img.print_meta() if mac: """ find macbeth centres, discarding images that are too dark or light """ av_chan = (np.mean(np.array(Img.channels), axis=0)/(2**16)) av_val = np.mean(av_chan) # print(av_val) if av_val < Img.blacklevel_16/(2**16)+1/64: macbeth = None print('\nError: Image too dark!') Cam.log += '\nWARNING: Image too dark!' else: macbeth = find_macbeth(Cam, av_chan, mac_config) """ if no macbeth found return error """ if macbeth is None: print('\nERROR: No macbeth chart found') return 0 mac_cen_coords = macbeth[1] # print('\nMacbeth centres located successfully') """ obtain image patches """ # print('\nObtaining image patches') Img.get_patches(mac_cen_coords) if Img.saturated: print('\nERROR: Macbeth patches have saturated') Cam.log += '\nWARNING: Macbeth patches have saturated!' return 0 """ clear memory """ Img.buf = None del Img.buf # print('Image patches obtained successfully') """ optional debug """ if show and __name__ == '__main__': copy = sum(Img.channels)/2**18 copy = np.reshape(copy, (Img.h//2, Img.w//2)).astype(np.float64) copy, _ = reshape(copy, 800) represent(copy) return Img """ return error if incorrect filetype """ else: # print('\nERROR:\nInvalid file extension') return 0 """ bytearray splice to number little endian """ def ba_to_b(b): total = 0 for i in range(len(b)): total += 256**i * b[i] return total