[libcamera-devel] [PATCH 1/7] utils: tuning: libtuning: Implement the core of libtuning
Paul Elder
paul.elder at ideasonboard.com
Thu Oct 6 14:00:59 CEST 2022
Implement the core of libtuning, our new tuning tool infrastructure. It
leverages components from raspberrypi's ctt that could be reused for
tuning tools for other platforms.
Signed-off-by: Paul Elder <paul.elder at ideasonboard.com>
---
utils/tuning/libtuning/__init__.py | 9 +
utils/tuning/libtuning/average_functions.py | 21 +
utils/tuning/libtuning/generators/__init__.py | 0
.../tuning/libtuning/generators/generator.py | 12 +
utils/tuning/libtuning/gradient.py | 111 +++
utils/tuning/libtuning/image.py | 272 ++++++++
utils/tuning/libtuning/libtuning.py | 191 +++++
utils/tuning/libtuning/macbeth.py | 654 ++++++++++++++++++
utils/tuning/libtuning/macbeth_ref.pgm | 5 +
utils/tuning/libtuning/modules/__init__.py | 0
utils/tuning/libtuning/modules/module.py | 41 ++
utils/tuning/libtuning/parsers/__init__.py | 0
utils/tuning/libtuning/parsers/parser.py | 18 +
utils/tuning/libtuning/smoothing.py | 21 +
utils/tuning/libtuning/utils.py | 198 ++++++
15 files changed, 1553 insertions(+)
create mode 100644 utils/tuning/libtuning/__init__.py
create mode 100644 utils/tuning/libtuning/average_functions.py
create mode 100644 utils/tuning/libtuning/generators/__init__.py
create mode 100644 utils/tuning/libtuning/generators/generator.py
create mode 100644 utils/tuning/libtuning/gradient.py
create mode 100644 utils/tuning/libtuning/image.py
create mode 100644 utils/tuning/libtuning/libtuning.py
create mode 100644 utils/tuning/libtuning/macbeth.py
create mode 100644 utils/tuning/libtuning/macbeth_ref.pgm
create mode 100644 utils/tuning/libtuning/modules/__init__.py
create mode 100644 utils/tuning/libtuning/modules/module.py
create mode 100644 utils/tuning/libtuning/parsers/__init__.py
create mode 100644 utils/tuning/libtuning/parsers/parser.py
create mode 100644 utils/tuning/libtuning/smoothing.py
create mode 100644 utils/tuning/libtuning/utils.py
diff --git a/utils/tuning/libtuning/__init__.py b/utils/tuning/libtuning/__init__.py
new file mode 100644
index 00000000..63f3c8f9
--- /dev/null
+++ b/utils/tuning/libtuning/__init__.py
@@ -0,0 +1,9 @@
+from libtuning.utils import *
+from libtuning.libtuning import *
+
+from libtuning.image import *
+from libtuning.macbeth import *
+
+from libtuning.average_functions import *
+from libtuning.gradient import *
+from libtuning.smoothing import *
diff --git a/utils/tuning/libtuning/average_functions.py b/utils/tuning/libtuning/average_functions.py
new file mode 100644
index 00000000..4220e481
--- /dev/null
+++ b/utils/tuning/libtuning/average_functions.py
@@ -0,0 +1,21 @@
+import libtuning as lt
+
+import numpy as np
+
+
+# @brief Wrapper for np averaging functions so that they can be duck-typed
+class Average(object):
+ def __init__(self, params: list = []):
+ self.params = params
+ return
+
+ def __average__(self, np_array):
+ raise NotImplementedError
+
+ def average(self, np_array):
+ return self.__average__(np_array)
+
+
+class Mean(Average):
+ def __average__(self, np_array):
+ return np.mean(np_array)
diff --git a/utils/tuning/libtuning/generators/__init__.py b/utils/tuning/libtuning/generators/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/utils/tuning/libtuning/generators/generator.py b/utils/tuning/libtuning/generators/generator.py
new file mode 100644
index 00000000..51dd03de
--- /dev/null
+++ b/utils/tuning/libtuning/generators/generator.py
@@ -0,0 +1,12 @@
+from pathlib import Path
+
+
+class Generator(object):
+ def __init__(self):
+ return
+
+ def __write__(self, output_file: Path, output_dict: dict, output_order: list):
+ raise NotImplementedError
+
+ def write(self, output_path: str, output_dict: dict, output_order: list):
+ return self.__write__(Path(output_path), output_dict, output_order)
diff --git a/utils/tuning/libtuning/gradient.py b/utils/tuning/libtuning/gradient.py
new file mode 100644
index 00000000..ebf4f20e
--- /dev/null
+++ b/utils/tuning/libtuning/gradient.py
@@ -0,0 +1,111 @@
+import libtuning as lt
+
+import math
+
+
+# @brief Gradient for how to allocate pixels to sectors
+# @description There are no parameters for the gradients as the domain is the
+# number of pixels and the range is the number of sectors, and
+# there is only one curve that has a startpoint and endpoint at
+# (0, 0) and at (#pixels, #sectors). The exception is for curves
+# that *do* have multiple solutions for only two points, such as
+# gaussian, and curves of higher polynomial orders if we had them.
+#
+# todo There will probably be a helper in the Gradient class, as I have a
+# feeling that all the other curves (besides Linear and Gaussian) can be
+# implemented in the same way.
+class Gradient(object):
+ # @param remainder Mode of handling remainder
+ def __init__(self, remainder: lt.remainder = lt.remainder.DistributeFront):
+ self.remainder = remainder
+ return
+
+ # @brief Distribute pixels into sectors (only in one dimension)
+ # @param domain Number of pixels
+ # @param sectors Number of sectors
+ # @return A list of number of pixels in each sector
+ def __distribute__(self, domain: list, sectors: list) -> list:
+ raise NotImplementedError
+
+ def distribute(self, domain: list, sectors: list, ) -> list:
+ return self.__distribute__(domain, sectors)
+
+
+class Circular(Gradient):
+ def __distribute__(self, domain, sectors):
+ raise NotImplementedError
+
+
+class Exponential(Gradient):
+ def __distribute__(self, domain, sectors):
+ raise NotImplementedError
+
+
+class Gaussian(Gradient):
+ def __distribute__(self, domain, sectors):
+ raise NotImplementedError
+
+
+class Hyperbolic(Gradient):
+ def __distribute__(self, domain, sectors):
+ raise NotImplementedError
+
+
+class Linear(Gradient):
+ def __distribute__(self, domain, sectors):
+ size = domain / sectors
+ rem = domain % sectors
+
+ if rem == 0:
+ return [int(size) for i in range(sectors)]
+
+ size = math.ceil(size)
+ rem = domain % size
+ output_sectors = [int(size) for i in range(sectors - 1)]
+
+ # Not sure if there's a use case for the first two, and even for
+ # the next two, not sure what to do because we have to shrink the
+ # number of sectors for the divisible ones, and then put remainder
+ # into the remaining sectors, but what if it divides nicely into
+ # the smaller number of sectors? Then the sectors for the remainder
+ # pixels will be empty. I'm leaving them unimplemented for now. Or
+ # we can remove them if we don't think they're necessary.
+
+ # Also not sure if there's a use case for the last two, since
+ # distributing the remaining pixels means that only one sector will
+ # be smaller than all the rest which will be the same size, so we
+ # can't actually split it between *two* sectors.
+
+ # If we eliminate all six of these cases we could use a simpler
+ # parameter as opposed to an entire enum.
+
+ if self.remainder == lt.remainder.Append:
+ raise NotImplementedError
+ elif self.remainder == lt.remainder.Prepend:
+ raise NotImplementedError
+ elif self.remainder == lt.remainder.Midpend:
+ raise NotImplementedError
+ elif self.remainder == lt.remainder.Endpend:
+ raise NotImplementedError
+ elif self.remainder == lt.remainder.DistributeFront:
+ output_sectors.append(rem)
+ elif self.remainder == lt.remainder.DistributeBack:
+ output_sectors.insert(0, rem)
+ elif self.remainder == lt.remainder.DistributeMiddle:
+ raise NotImplementedError
+ elif self.remainder == lt.remainder.DistributeEdges:
+ raise NotImplementedError
+ else:
+ raise ValueError
+
+ return output_sectors
+
+
+class Logarithmic(Gradient):
+ def __distribute__(self, domain, sectors):
+ raise NotImplementedError
+
+
+class Parabolic(Gradient):
+ def __distribute__(self, domain, sectors):
+ raise NotImplementedError
diff --git a/utils/tuning/libtuning/image.py b/utils/tuning/libtuning/image.py
new file mode 100644
index 00000000..51185a69
--- /dev/null
+++ b/utils/tuning/libtuning/image.py
@@ -0,0 +1,272 @@
+import binascii
+import numpy as np
+from pathlib import Path
+import pyexiv2 as pyexif
+import rawpy as raw
+import re
+
+import libtuning.utils as utils
+
+
+class Image:
+ def __init__(self, path: Path):
+ self.path = path
+ self.name = path.name
+ self.alsc_only = False
+ self.color = -1
+ self.lux = -1
+
+ # May raise KeyError as there are too many to check
+ def _loadMetadataExif(self):
+ # RawPy doesn't load all the image tags that we need, so we use py3exiv2
+ metadata = pyexif.ImageMetadata(self.path)
+ metadata.read()
+
+ self.ver = 100 # random value
+ # The DNG and TIFF/EP specifications use different IFDs to store the
+ # raw image data and the Exif tags. DNG stores them in a SubIFD and in
+ # an Exif IFD respectively (named "SubImage1" and "Photo" by pyexiv2),
+ # while TIFF/EP stores them both in IFD0 (name "Image"). Both are used
+ # in "DNG" files, with libcamera-apps following the DNG recommendation
+ # and applications based on picamera2 following TIFF/EP.
+ #
+ # This code detects which tags are being used, and therefore extracts the
+ # correct values.
+ try:
+ self.w = metadata['Exif.SubImage1.ImageWidth'].value
+ subimage = "SubImage1"
+ photo = "Photo"
+ except KeyError:
+ self.w = metadata['Exif.Image.ImageWidth'].value
+ subimage = "Image"
+ photo = "Image"
+ self.pad = 0
+ self.h = metadata[f'Exif.{subimage}.ImageLength'].value
+ white = metadata[f'Exif.{subimage}.WhiteLevel'].value
+ self.sigbits = int(white).bit_length()
+ self.fmt = (self.sigbits - 4) // 2
+ self.exposure = int(metadata[f'Exif.{photo}.ExposureTime'].value * 1000000)
+ self.againQ8 = metadata[f'Exif.{photo}.ISOSpeedRatings'].value * 256 / 100
+ self.againQ8_norm = self.againQ8 / 256
+ self.camName = metadata['Exif.Image.Model'].value
+ self.blacklevel = int(metadata[f'Exif.{subimage}.BlackLevel'].value[0])
+ self.blacklevel_16 = self.blacklevel << (16 - self.sigbits)
+
+ # Channel order depending on bayer pattern
+ # The key is the order given by exif, where 0 is R, 1 is G, and 2 is B
+ # The second value of the value is the index where the color can be
+ # found, where the first is R, then G, then G, then B.
+ # The first value of the value is probably just for consistency with
+ # the brcm loader.
+ bayer_case = {
+ '0 1 1 2': (0, (lt.color.R, lt.color.GR, lt.color.GB, lt.color.B)),
+ '1 2 0 1': (1, (lt.color.GB, lt.color.R, lt.color.B, lt.color.GR)),
+ '2 1 1 0': (2, (lt.color.B, lt.color.GB, lt.color.GR, lt.color.R)),
+ '1 0 2 1': (3, (lt.color.GR, lt.color.R, lt.color.B, lt.color.GB))
+ }
+ cfa_pattern = metadata[f'Exif.{subimage}.CFAPattern'].value
+ self.pattern = bayer_case[cfa_pattern][0]
+ self.order = bayer_case[cfa_pattern][1]
+
+ def _readImageDng(self):
+ raw_im = raw.imread(str(self.path))
+ raw_data = raw_im.raw_image
+ shift = 16 - self.sigbits
+ c0 = np.left_shift(raw_data[0::2, 0::2].astype(np.int64), shift)
+ c1 = np.left_shift(raw_data[0::2, 1::2].astype(np.int64), shift)
+ c2 = np.left_shift(raw_data[1::2, 0::2].astype(np.int64), shift)
+ c3 = np.left_shift(raw_data[1::2, 1::2].astype(np.int64), shift)
+ self.channels = [c0, c1, c2, c3]
+
+ def loadDng(self):
+ try:
+ self._loadMetadataExif()
+ except Exception as e:
+ utils.eprint(f'Failed to load metadata from {self.path}: {e}')
+ return False
+
+ try:
+ self._readImageDng()
+ except Exception as e:
+ utils.eprint(f'Failed to load image data from {self.path}: {e}')
+ return False
+
+ return True
+
+ @staticmethod
+ def baToByte(ba):
+ total = 0
+ for i in range(len(ba)):
+ total += 256**i * b[i]
+ return total
+
+ def _loadMetadataBrcm(self, buf):
+ self.ver = baToByte(buf[4:5])
+ self.w = baToByte(buf[0xd0:0xd2])
+ self.h = baToByte(buf[0xd2:0xd4])
+ self.pad = baToByte(buf[0xd4:0xd6])
+ self.fmt = buf[0xf5]
+ self.sigbits = 2 * self.fmt + 4
+ self.pattern = buf[0xf4]
+ self.exposure = baToByte(buf[0x90:0x94])
+ self.againQ8 = baToByte(buf[0x94:0x96])
+ self.againQ8_norm = self.againQ8 / 256
+ camName = buf[0x10:0x10 + 128]
+ camName_end = camName.find(0x00)
+ self.camName = buf[0x10:0x10 + 128][:camName_end].decode()
+
+ bayer_case = {
+ 0: (lt.color.R, lt.color.GR, lt.color.GB, lt.color.B),
+ 1: (lt.color.GB, lt.color.R, lt.color.B, lt.color.GR),
+ 2: (lt.color.B, lt.color.GB, lt.color.GR, lt.color.R),
+ 3: (lt.color.GR, lt.color.R, lt.color.B, lt.color.GB),
+ # arbitrary order for greyscale casw
+ 128: (lt.color.R, lt.color.GR, lt.color.GB, lt.color.B)
+ }
+ self.order = bayer_case[self.pattern]
+
+ # manual blacklevel - not robust
+ if 'ov5647' in self.camName:
+ self.blacklevel = 16
+ else:
+ self.blacklevel = 64
+ self.blacklevel_16 = self.blacklevel << (6)
+
+ def _readImageBrcm(self, buf):
+ # Check if data is 10 or 12 bits
+ if self.sigbits == 10:
+ # Calculate length of scanline
+ lin_len = ((((((self.w + self.pad + 3) >> 2)) * 5) + 31) >> 5) * 32
+
+ # Stack scan lines into matrix
+ raw = np.array(raw).reshape(-1, lin_len).astype(np.int64)[:self.h, ...]
+
+ # Separate 5 bits in each package, stopping when w is satisfied
+ ba0 = raw[..., 0:5 * ((self.w + 3) >> 2):5]
+ ba1 = raw[..., 1:5 * ((self.w + 3) >> 2):5]
+ ba2 = raw[..., 2:5 * ((self.w + 3) >> 2):5]
+ ba3 = raw[..., 3:5 * ((self.w + 3) >> 2):5]
+ ba4 = raw[..., 4:5 * ((self.w + 3) >> 2):5]
+
+ # Assemble 10 bit numbers
+ ch0 = np.left_shift((np.left_shift(ba0, 2) + (ba4 % 4)), 6)
+ ch1 = np.left_shift((np.left_shift(ba1, 2) + (np.right_shift(ba4, 2) % 4)), 6)
+ ch2 = np.left_shift((np.left_shift(ba2, 2) + (np.right_shift(ba4, 4) % 4)), 6)
+ ch3 = np.left_shift((np.left_shift(ba3, 2) + (np.right_shift(ba4, 6) % 4)), 6)
+
+ # Interleave bits
+ mat = np.empty((self.h, self.w), dtype=ch0.dtype)
+ mat[..., 0::4] = ch0
+ mat[..., 1::4] = ch1
+ mat[..., 2::4] = ch2
+ mat[..., 3::4] = ch3
+
+ # There is some leaking memory somewhere in the code. This code
+ # here seemed to make things good enough that the code would run
+ # for reasonable numbers of images, however this is techincally
+ # just a workaround. (sorry)
+ ba0, ba1, ba2, ba3, ba4 = None, None, None, None, None
+ del ba0, ba1, ba2, ba3, ba4
+ ch0, ch1, ch2, ch3 = None, None, None, None
+ del ch0, ch1, ch2, ch3
+
+ # Same as before but 12 bit case
+ elif self.sigbits == 12:
+ lin_len = ((((((self.w + self.pad + 1) >> 1)) * 3) + 31) >> 5) * 32
+ raw = np.array(raw).reshape(-1, lin_len).astype(np.int64)[:self.h, ...]
+ ba0 = raw[..., 0:3 * ((self.w + 1) >> 1):3]
+ ba1 = raw[..., 1:3 * ((self.w + 1) >> 1):3]
+ ba2 = raw[..., 2:3 * ((self.w + 1) >> 1):3]
+ ch0 = np.left_shift((np.left_shift(ba0, 4) + ba2 % 16), 4)
+ ch1 = np.left_shift((np.left_shift(ba1, 4) + (np.right_shift(ba2, 4)) % 16), 4)
+ mat = np.empty((self.h, self.w), dtype=ch0.dtype)
+ mat[..., 0::2] = ch0
+ mat[..., 1::2] = ch1
+
+ else:
+ raise ValueError('BRCM image data must be 10 bit or 12 bits')
+
+ # Separate bayer channels
+ c0 = mat[0::2, 0::2]
+ c1 = mat[0::2, 1::2]
+ c2 = mat[1::2, 0::2]
+ c3 = mat[1::2, 1::2]
+ self.channels = [c0, c1, c2, c3]
+
+ def loadBrcm(self):
+ try:
+ with open(self.path, 'rb') as image:
+ f = image.read()
+ except FileNotFoundError:
+ utils.eprint(f'File {self.path} not found')
+ return False
+
+ if f is None:
+ utils.eprint(f'Failed to open {self.path}')
+ return False
+
+ b = bytearray(f)
+
+ # Find end of image followed by BRCM header
+ match = bytearray(b'\xff\xd9 at BRCM')
+ match_str = binascii.hexlify(match)
+ b_str = binascii.hexlify(b)
+
+ # index is divided by two to go from string to hex
+ indices = [m.start() // 2 for m in re.finditer(match_str, b_str)]
+ if len(indices) == 0:
+ utils.eprint(f'No Broadcom header found in {self.path}')
+ return False
+
+ start = indices[0] + 3
+ buf = b[start:start + 32768]
+
+ try:
+ self._loadMetadataBrcm(buf)
+ except Exception as e:
+ utils.eprint(f'Failed to load metadata from {self.path}: {e}')
+ return False
+
+ buf = b[start + 32768:]
+ try:
+ self._readImageBrcm(buf)
+ except Exception as e:
+ utils.eprint(f'Failed to load image data from {self.path}: {e}')
+ return False
+
+ return True
+
+ def getPatches(self, cen_coords, size=16):
+ ret = True
+
+ # Obtain channel widths and heights
+ ch_w, ch_h = self.w, self.h
+ cen_coords = list(np.array((cen_coords[0])).astype(np.int32))
+ self.cen_coords = cen_coords
+
+ # Squares are ordered by stacking macbeth chart columns from left to
+ # right. Some useful patch indices:
+ # white = 3
+ # black = 23
+ # 'reds' = 9, 10
+ # 'blues' = 2, 5, 8, 20, 22
+ # 'greens' = 6, 12, 17
+ # greyscale = 3, 7, 11, 15, 19, 23
+ all_patches = []
+ for ch in self.channels:
+ ch_patches = []
+ for cen in cen_coords:
+ # Macbeth centre is placed at top left of central 2x2 patch to
+ # account for rounding Patch pixels are sorted by pixel
+ # brightness so spatial information is lost.
+ patch = ch[cen[1] - 7:cen[1] + 9, cen[0] - 7:cen[0] + 9].flatten()
+ patch.sort()
+ if patch[-5] == (2**self.sigbits - 1) * 2**(16 - self.sigbits):
+ ret = False
+ ch_patches.append(patch)
+
+ all_patches.append(ch_patches)
+
+ self.patches = all_patches
+
+ return ret
diff --git a/utils/tuning/libtuning/libtuning.py b/utils/tuning/libtuning/libtuning.py
new file mode 100644
index 00000000..1b7d1306
--- /dev/null
+++ b/utils/tuning/libtuning/libtuning.py
@@ -0,0 +1,191 @@
+import libtuning.utils as utils
+from libtuning.utils import eprint
+
+from enum import Enum, IntEnum
+
+
+class color(IntEnum):
+ R = 0
+ GR = 1
+ GB = 2
+ B = 3
+ G = 4
+
+
+class debug(Enum):
+ Plot = 1
+
+
+# @brief What to do with the leftover pixels after dividing them into ALSC
+# sectors, when the division gradient is uniform
+# todo Do the first four and last two even make sense?
+# @var Append Put the leftover pixels in their own smaller sector, after the
+# uniform sectors
+# @var Prepend Same as Append, but before the uniform sectors instead of after
+# @var Midpend Same as Append, but in the center sector (if there are an odd
+# number of sectors in that dimension) or the center two sectors (if there
+# are an even number of sectors)
+# @var Endpend same as midpend, but divided between the first and last sectors
+# @var DistributeFront Divide the remainder equally (until running out,
+# obviously) into the existing sectors, starting from the front
+# @var DistributeBack Same as DistributeFront but starting from the back
+# @var DistributeMiddle Same as DistributeFront but spreading from the middle
+# @var DistributeEdges Same as Distribute Middle but spreading from both the
+# front and back
+class remainder(Enum):
+ Append = 1
+ Prepend = 2
+ Midpend = 3
+ Endpend = 4
+ DistributeFront = 5
+ DistributeBack = 6
+ DistributeMiddle = 7
+ DistributeEdges = 8
+
+
+# @brief A helper class to contain a default value for a module configuration
+# parameter
+class param():
+ # @var Required The value contained in this instance is irrelevant, and the
+ # value must be provided by the tuning configuration file.
+ # @var Optional If the value is not provided by the tuning configuration
+ # file, then the value contained in this instance will be used instead.
+ # @var Hardcode The value contained in this instance will be used
+ class mode(Enum):
+ Required = 0
+ Optional = 1
+ Hardcode = 2
+
+ # @param name Name of the parameter. Shall match the name used in the
+ # configuration file for the parameter
+ # @param required Whether or not a value is required in the config
+ # parameter of getVal()
+ # @param val Default value (only relevant if mode is Optional)
+ def __init__(self, name: str, required: mode, val=None):
+ self.name = name
+ self.required = required
+ self.val = val
+
+ def getValue(self, config: dict):
+ if self.required is mode.Hardcode:
+ return self.val
+
+ if self.required is mode.Required and self.name not in config:
+ raise ValueError(f'Parameter {self.name} is required but not provided in the configuration')
+
+ return config[self.name] if self.required is mode.Required else self.val
+
+ def isRequired(self):
+ return self.required is mode.Required
+
+ # @brief Used by libtuning to auto-generate help information for the tuning
+ # script on the available parameters for the configuration file
+ # todo implement this
+ def getInfo(self):
+ raise NotImplementedError
+
+
+class Camera(object):
+
+ # External functions
+
+ def __init__(self, platform_name):
+ self.name = platform_name
+ self.modules = []
+ self.parser = None
+ self.generator = None
+ self.output_order = []
+ self.config = {}
+ self.output = {}
+ return
+
+ def add(self, module):
+ self.modules.append(module)
+ return
+
+ def setInputType(self, parser):
+ self.parser = parser
+ return
+
+ def setOutputType(self, output):
+ self.generator = output
+ return
+
+ def setOutputOrder(self, modules):
+ self.output_order = modules
+ return
+
+ # @brief Convert classes in self.output_order to the instances in self.modules
+ def _prepareOutputOrder(self):
+ output_order = self.output_order
+ self.output_order = []
+ for module_type in output_order:
+ modules = [module for module in self.modules if type(module) == module_type]
+ if len(modules) > 1:
+ eprint(f'Multiple modules found for module type "{module.name}"')
+ return False
+ if len(modules) < 1:
+ eprint(f'No module found for module type "{module.name}"')
+ return False
+ self.output_order.append(modules[0])
+
+ return True
+
+ def _validateSettings(self):
+ if self.parser is None:
+ eprint('Missing parser')
+ return False
+
+ if self.generator is None:
+ eprint('Missing generator')
+ return False
+
+ if len(self.modules) == 0:
+ eprint('No modules added')
+ return False
+
+ if len(self.output_order) != len(self.modules):
+ eprint('Number of outputs does not match number of modules')
+ return False
+
+ return True
+
+ def run(self, argv):
+ args = utils.processArgs(argv, self.name)
+ if args is None:
+ return -1
+
+ if not self._validateSettings():
+ return -1
+
+ if not self._prepareOutputOrder():
+ return -1
+
+ if len(args.config) > 0:
+ self.config, disable = self.parser.parse(args.config, self.modules)
+
+ for module in disable:
+ if module in self.modules:
+ self.modules.remove(module)
+
+ for module in self.modules:
+ if not module.validateConfig(self.config):
+ eprint(f'Config is invalid for module {module.name}')
+ return -1
+
+ images = utils.loadImages(args.input, self.config, self.modules)
+ if images is None:
+ return -1
+
+ # we need args for input image locations and debug options, and config
+ # for stuff like do_color and luminance_strength
+ for module in self.modules:
+ out = module.process(args, self.config, images, self.output)
+ if out is None:
+ eprint(f'Module {module.name} failed to process, aborting')
+ break
+ self.output[module] = out
+
+ self.generator.write(args.output, self.output, self.output_order)
+
+ return 0
diff --git a/utils/tuning/libtuning/macbeth.py b/utils/tuning/libtuning/macbeth.py
new file mode 100644
index 00000000..bfedbe95
--- /dev/null
+++ b/utils/tuning/libtuning/macbeth.py
@@ -0,0 +1,654 @@
+# SPDX-License-Identifier: BSD-2-Clause
+#
+# Copyright (C) 2019, Raspberry Pi Ltd
+#
+# (Copied from: ctt_macbeth_locator.py)
+
+import cv2
+import os
+from pathlib import Path
+
+
+# Reshape image to fixed width without distorting returns image and scale
+# factor
+def reshape(img, width):
+ factor = width / img.shape[0]
+ return cv2.resize(img, None, fx=factor, fy=factor), factor
+
+
+# Display image for debugging... read at your own risk...
+def represent(img, name='image'):
+ # if type(img) == tuple or type(img) == list:
+ # for i in range(len(img)):
+ # name = 'image {}'.format(i)
+ # cv2.imshow(name, img[i])
+ # else:
+ # cv2.imshow(name, img)
+ # cv2.waitKey(0)
+ # cv2.destroyAllWindows()
+ # return 0
+ """
+ code above displays using opencv, but this doesn't catch users pressing 'x'
+ with their mouse to close the window.... therefore matplotlib is used....
+ (thanks a lot opencv)
+ """
+ grid = plt.GridSpec(22, 1)
+ plt.subplot(grid[:19, 0])
+ plt.imshow(img, cmap='gray')
+ plt.axis('off')
+ plt.subplot(grid[21, 0])
+ plt.title('press \'q\' to continue')
+ plt.axis('off')
+ plt.show()
+
+ # f = plt.figure()
+ # ax = f.add_subplot(211)
+ # ax2 = f.add_subplot(122)
+ # ax.imshow(img, cmap='gray')
+ # ax.axis('off')
+ # ax2.set_figheight(2)
+ # ax2.title('press \'q\' to continue')
+ # ax2.axis('off')
+ # plt.show()
+
+
+def draw_macbeth_results(img, coords_fit):
+ # Extract data from coords_fit and plot on original image
+ if show and coords_fit is not None:
+ copy = img.copy()
+ verts = coords_fit[0][0]
+ cents = coords_fit[1][0]
+
+ # Draw circles at vertices of macbeth chart
+ for vert in verts:
+ p = tuple(np.round(vert).astype(np.int32))
+ cv2.circle(copy, p, 10, 1, -1)
+
+ # Draw circles at centres of squares
+ for i in range(len(cents)):
+ cent = cents[i]
+ p = tuple(np.round(cent).astype(np.int32))
+
+ # Draw black circle on white square, white circle on black square
+ # an grey circle everywhere else.
+ if i == 3:
+ cv2.circle(copy, p, 8, 0, -1)
+ elif i == 23:
+ cv2.circle(copy, p, 8, 1, -1)
+ else:
+ cv2.circle(copy, p, 8, 0.5, -1)
+ copy, _ = reshape(copy, 400)
+ represent(copy)
+
+
+def find_macbeth(img, mac_config):
+ small_chart = mac_config['small']
+ show = mac_config['show']
+
+ # Catch the warnings
+ warnings.simplefilter("ignore")
+ warnings.warn("runtime", RuntimeWarning)
+
+ # Reference macbeth chart is created that will be correlated with the
+ # located macbeth chart guess to produce a confidence value for the match.
+ script_dir = Path(os.path.realpath(os.path.dirname(__file__)))
+ macbeth_ref_path = script_dir.joinpath('macbeth_ref.pgm')
+ ref = cv2.imread(str(macbeth_ref_path), flags=cv2.IMREAD_GRAYSCALE)
+ ref_w = 120
+ ref_h = 80
+ rc1 = (0, 0)
+ rc2 = (0, ref_h)
+ rc3 = (ref_w, ref_h)
+ rc4 = (ref_w, 0)
+ ref_corns = np.array((rc1, rc2, rc3, rc4), np.float32)
+ ref_data = (ref, ref_w, ref_h, ref_corns)
+
+ # Locate macbeth chart
+ cor, mac, coords, ret = get_macbeth_chart(img, ref_data)
+
+ # Following bits of code try to fix common problems with simple techniques.
+ # If now or at any point the best correlation is of above 0.75, then
+ # nothing more is tried as this is a high enough confidence to ensure
+ # reliable macbeth square centre placement.
+
+ for brightness in [2, 4]:
+ if cor >= 0.75:
+ break
+ img_br = cv2.convertScaleAbs(img, alpha=brightness, beta=0)
+ cor_b, mac_b, coords_b, ret_b = get_macbeth_chart(img_br, ref_data)
+ if cor_b > cor:
+ cor, mac, coords, ret = cor_b, mac_b, coords_b, ret_b
+
+ # In case macbeth chart is too small, take a selection of the image and
+ # attempt to locate macbeth chart within that. The scale increment is
+ # root 2
+
+ # These variables will be used to transform the found coordinates at
+ # smaller scales back into the original. If ii is still -1 after this
+ # section that means it was not successful
+ ii = -1
+ w_best = 0
+ h_best = 0
+ d_best = 100
+
+ # d_best records the scale of the best match. Macbeth charts are only looked
+ # for at one scale increment smaller than the current best match in order to avoid
+ # unecessarily searching for macbeth charts at small scales.
+ # If a macbeth chart ha already been found then set d_best to 0
+ if cor != 0:
+ d_best = 0
+
+ for index, pair in enumerate([{'sel': 2 / 3, 'inc': 1 / 6},
+ {'sel': 1 / 2, 'inc': 1 / 8},
+ {'sel': 1 / 3, 'inc': 1 / 12},
+ {'sel': 1 / 4, 'inc': 1 / 16}]):
+ if cor >= 0.75:
+ break
+
+ # Check if we need to check macbeth charts at even smaller scales. This
+ # slows the code down significantly and has therefore been omitted by
+ # default, however it is not unusably slow so might be useful if the
+ # macbeth chart is too small to be picked up to by the current
+ # subselections. Use this for macbeth charts with side lengths around
+ # 1/5 image dimensions (and smaller...?) it is, however, recommended
+ # that macbeth charts take up as large as possible a proportion of the
+ # image.
+ if index >= 2 and (not small_chart or d_best <= index - 1):
+ break
+
+ w, h = list(img.shape[:2])
+ # Set dimensions of the subselection and the step along each axis
+ # between selections
+ w_sel = int(w * pair['sel'])
+ h_sel = int(h * pair['sel'])
+ w_inc = int(w * pair['inc'])
+ h_inc = int(h * pair['inc'])
+
+ loop = ((1 - pair['sel']) / pair['inc']) + 1
+ # For each subselection, look for a macbeth chart
+ for i in range(loop):
+ for j in range(loop):
+ w_s, h_s = i * w_inc, j * h_inc
+ img_sel = img[w_s:w_s + w_sel, h_s:h_s + h_sel]
+ cor_ij, mac_ij, coords_ij, ret_ij = get_macbeth_chart(img_sel, ref_data)
+
+ # If the correlation is better than the best then record the
+ # scale and current subselection at which macbeth chart was
+ # found. Also record the coordinates, macbeth chart and message.
+ if cor_ij > cor:
+ cor = cor_ij
+ mac, coords, ret = mac_ij, coords_ij, ret_ij
+ ii, jj = i, j
+ w_best, h_best = w_inc, h_inc
+ d_best = index + 1
+
+ # Transform coordinates from subselection to original image
+ if ii != -1:
+ for a in range(len(coords)):
+ for b in range(len(coords[a][0])):
+ coords[a][0][b][1] += ii * w_best
+ coords[a][0][b][0] += jj * h_best
+
+ if not ret:
+ return None
+
+ coords_fit = coords
+ if cor < 0.75:
+ eprint(f'Warning: Low confidence {cor:.3f} for macbeth chart in {img.path.name}')
+
+ if show:
+ draw_macbeth_results(img, coords_fit)
+
+ return coords_fit
+
+
+# @brief Compute coordinates of macbeth chart vertices and square centres,
+# @return (max_cor, best_map_col_norm, fit_coords, success)
+#
+# Also returns an error/success message for debugging purposes. Additionally,
+# it scores the match with a confidence value.
+#
+# Brief explanation of the macbeth chart locating algorithm:
+# - Find rectangles within image
+# - Take rectangles within percentage offset of median perimeter. The
+# assumption is that these will be the macbeth squares
+# - For each potential square, find the 24 possible macbeth centre locations
+# that would produce a square in that location
+# - Find clusters of potential macbeth chart centres to find the potential
+# macbeth centres with the most votes, i.e. the most likely ones
+# - For each potential macbeth centre, use the centres of the squares that
+# voted for it to find macbeth chart corners
+# - For each set of corners, transform the possible match into normalised
+# space and correlate with a reference chart to evaluate the match
+# - Select the highest correlation as the macbeth chart match, returning the
+# correlation as the confidence score
+#
+# todo: clean this up
+def get_macbeth_chart(img, ref_data):
+ (ref, ref_w, ref_h, ref_corns) = ref_data
+
+ """
+ the code will raise and catch a MacbethError in case of a problem, trying
+ to give some likely reasons why the problem occred, hence the try/except
+ """
+ try:
+ """
+ obtain image, convert to grayscale and normalise
+ """
+ src = img
+ src, factor = reshape(src, 200)
+ original = src.copy()
+ a = 125 / np.average(src)
+ src_norm = cv2.convertScaleAbs(src, alpha=a, beta=0)
+ """
+ This code checks if there are seperate colour channels. In the past the
+ macbeth locator ran on jpgs and this makes it robust to different
+ filetypes. Note that running it on a jpg has 4x the pixels of the
+ average bayer channel so coordinates must be doubled.
+
+ This is best done in img_load.py in the get_patches method. The
+ coordinates and image width, height must be divided by two if the
+ macbeth locator has been run on a demosaicked image.
+ """
+ if len(src_norm.shape) == 3:
+ src_bw = cv2.cvtColor(src_norm, cv2.COLOR_BGR2GRAY)
+ else:
+ src_bw = src_norm
+ original_bw = src_bw.copy()
+ """
+ obtain image edges
+ """
+ sigma = 2
+ src_bw = cv2.GaussianBlur(src_bw, (0, 0), sigma)
+ t1, t2 = 50, 100
+ edges = cv2.Canny(src_bw, t1, t2)
+ """
+ dilate edges to prevent self-intersections in contours
+ """
+ k_size = 2
+ kernel = np.ones((k_size, k_size))
+ its = 1
+ edges = cv2.dilate(edges, kernel, iterations=its)
+ """
+ find Contours in image
+ """
+ conts, _ = cv2.findContours(edges, cv2.RETR_TREE,
+ cv2.CHAIN_APPROX_NONE)
+ if len(conts) == 0:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNo contours found in image\n'
+ 'Possible problems:\n'
+ '- Macbeth chart is too dark or bright\n'
+ '- Macbeth chart is occluded\n'
+ )
+ """
+ find quadrilateral contours
+ """
+ epsilon = 0.07
+ conts_per = []
+ for i in range(len(conts)):
+ per = cv2.arcLength(conts[i], True)
+ poly = cv2.approxPolyDP(conts[i], epsilon * per, True)
+ if len(poly) == 4 and cv2.isContourConvex(poly):
+ conts_per.append((poly, per))
+
+ if len(conts_per) == 0:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNo quadrilateral contours found'
+ '\nPossible problems:\n'
+ '- Macbeth chart is too dark or bright\n'
+ '- Macbeth chart is occluded\n'
+ '- Macbeth chart is out of camera plane\n'
+ )
+
+ """
+ sort contours by perimeter and get perimeters within percent of median
+ """
+ conts_per = sorted(conts_per, key=lambda x: x[1])
+ med_per = conts_per[int(len(conts_per) / 2)][1]
+ side = med_per / 4
+ perc = 0.1
+ med_low, med_high = med_per * (1 - perc), med_per * (1 + perc)
+ squares = []
+ for i in conts_per:
+ if med_low <= i[1] and med_high >= i[1]:
+ squares.append(i[0])
+
+ """
+ obtain coordinates of nomralised macbeth and squares
+ """
+ square_verts, mac_norm = get_square_verts(0.06)
+ """
+ for each square guess, find 24 possible macbeth chart centres
+ """
+ mac_mids = []
+ squares_raw = []
+ for i in range(len(squares)):
+ square = squares[i]
+ squares_raw.append(square)
+ """
+ convert quads to rotated rectangles. This is required as the
+ 'squares' are usually quite irregular quadrilaterls, so performing
+ a transform would result in exaggerated warping and inaccurate
+ macbeth chart centre placement
+ """
+ rect = cv2.minAreaRect(square)
+ square = cv2.boxPoints(rect).astype(np.float32)
+ """
+ reorder vertices to prevent 'hourglass shape'
+ """
+ square = sorted(square, key=lambda x: x[0])
+ square_1 = sorted(square[:2], key=lambda x: x[1])
+ square_2 = sorted(square[2:], key=lambda x: -x[1])
+ square = np.array(np.concatenate((square_1, square_2)), np.float32)
+ square = np.reshape(square, (4, 2)).astype(np.float32)
+ squares[i] = square
+ """
+ find 24 possible macbeth chart centres by trasnforming normalised
+ macbeth square vertices onto candidate square vertices found in image
+ """
+ for j in range(len(square_verts)):
+ verts = square_verts[j]
+ p_mat = cv2.getPerspectiveTransform(verts, square)
+ mac_guess = cv2.perspectiveTransform(mac_norm, p_mat)
+ mac_guess = np.round(mac_guess).astype(np.int32)
+ """
+ keep only if candidate macbeth is within image border
+ (deprecated)
+ """
+ in_border = True
+ # for p in mac_guess[0]:
+ # pptest = cv2.pointPolygonTest(
+ # img_con,
+ # tuple(p),
+ # False
+ # )
+ # if pptest == -1:
+ # in_border = False
+ # break
+
+ if in_border:
+ mac_mid = np.mean(mac_guess,
+ axis=1)
+ mac_mids.append([mac_mid, (i, j)])
+
+ if len(mac_mids) == 0:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNo possible macbeth charts found within image'
+ '\nPossible problems:\n'
+ '- Part of the macbeth chart is outside the image\n'
+ '- Quadrilaterals in image background\n'
+ )
+
+ """
+ reshape data
+ """
+ for i in range(len(mac_mids)):
+ mac_mids[i][0] = mac_mids[i][0][0]
+
+ """
+ find where midpoints cluster to identify most likely macbeth centres
+ """
+ clustering = cluster.AgglomerativeClustering(
+ n_clusters=None,
+ compute_full_tree=True,
+ distance_threshold=side * 2
+ )
+ mac_mids_list = [x[0] for x in mac_mids]
+
+ if len(mac_mids_list) == 1:
+ """
+ special case of only one valid centre found (probably not needed)
+ """
+ clus_list = []
+ clus_list.append([mac_mids, len(mac_mids)])
+
+ else:
+ clustering.fit(mac_mids_list)
+ # try:
+ # clustering.fit(mac_mids_list)
+ # except RuntimeWarning as error:
+ # return(0, None, None, error)
+
+ """
+ create list of all clusters
+ """
+ clus_list = []
+ if clustering.n_clusters_ > 1:
+ for i in range(clustering.labels_.max() + 1):
+ indices = [j for j, x in enumerate(clustering.labels_) if x == i]
+ clus = []
+ for index in indices:
+ clus.append(mac_mids[index])
+ clus_list.append([clus, len(clus)])
+ clus_list.sort(key=lambda x: -x[1])
+
+ elif clustering.n_clusters_ == 1:
+ """
+ special case of only one cluster found
+ """
+ # print('only 1 cluster')
+ clus_list.append([mac_mids, len(mac_mids)])
+ else:
+ raise MacbethError(
+ '\nWARNING: No macebth chart found!'
+ '\nNo clusters found'
+ '\nPossible problems:\n'
+ '- NA\n'
+ )
+
+ """
+ keep only clusters with enough votes
+ """
+ clus_len_max = clus_list[0][1]
+ clus_tol = 0.7
+ for i in range(len(clus_list)):
+ if clus_list[i][1] < clus_len_max * clus_tol:
+ clus_list = clus_list[:i]
+ break
+ cent = np.mean(clus_list[i][0], axis=0)[0]
+ clus_list[i].append(cent)
+
+ """
+ represent most popular cluster centroids
+ """
+ # copy = original_bw.copy()
+ # copy = cv2.cvtColor(copy, cv2.COLOR_GRAY2RGB)
+ # copy = cv2.resize(copy, None, fx=2, fy=2)
+ # for clus in clus_list:
+ # centroid = tuple(2*np.round(clus[2]).astype(np.int32))
+ # cv2.circle(copy, centroid, 7, (255, 0, 0), -1)
+ # cv2.circle(copy, centroid, 2, (0, 0, 255), -1)
+ # represent(copy)
+
+ """
+ get centres of each normalised square
+ """
+ reference = get_square_centres(0.06)
+
+ """
+ for each possible macbeth chart, transform image into
+ normalised space and find correlation with reference
+ """
+ max_cor = 0
+ best_map = None
+ best_fit = None
+ best_cen_fit = None
+ best_ref_mat = None
+
+ for clus in clus_list:
+ clus = clus[0]
+ sq_cents = []
+ ref_cents = []
+ i_list = [p[1][0] for p in clus]
+ for point in clus:
+ i, j = point[1]
+ """
+ remove any square that voted for two different points within
+ the same cluster. This causes the same point in the image to be
+ mapped to two different reference square centres, resulting in
+ a very distorted perspective transform since cv2.findHomography
+ simply minimises error.
+ This phenomenon is not particularly likely to occur due to the
+ enforced distance threshold in the clustering fit but it is
+ best to keep this in just in case.
+ """
+ if i_list.count(i) == 1:
+ square = squares_raw[i]
+ sq_cent = np.mean(square, axis=0)
+ ref_cent = reference[j]
+ sq_cents.append(sq_cent)
+ ref_cents.append(ref_cent)
+
+ """
+ At least four squares need to have voted for a centre in
+ order for a transform to be found
+ """
+ if len(sq_cents) < 4:
+ raise MacbethError(
+ '\nWARNING: No macbeth chart found!'
+ '\nNot enough squares found'
+ '\nPossible problems:\n'
+ '- Macbeth chart is occluded\n'
+ '- Macbeth chart is too dark of bright\n'
+ )
+
+ ref_cents = np.array(ref_cents)
+ sq_cents = np.array(sq_cents)
+ """
+ find best fit transform from normalised centres to image
+ """
+ h_mat, mask = cv2.findHomography(ref_cents, sq_cents)
+ if 'None' in str(type(h_mat)):
+ raise MacbethError(
+ '\nERROR\n'
+ )
+
+ """
+ transform normalised corners and centres into image space
+ """
+ mac_fit = cv2.perspectiveTransform(mac_norm, h_mat)
+ mac_cen_fit = cv2.perspectiveTransform(np.array([reference]), h_mat)
+ """
+ transform located corners into reference space
+ """
+ ref_mat = cv2.getPerspectiveTransform(
+ mac_fit,
+ np.array([ref_corns])
+ )
+ map_to_ref = cv2.warpPerspective(
+ original_bw, ref_mat,
+ (ref_w, ref_h)
+ )
+ """
+ normalise brigthness
+ """
+ a = 125 / np.average(map_to_ref)
+ map_to_ref = cv2.convertScaleAbs(map_to_ref, alpha=a, beta=0)
+ """
+ find correlation with bw reference macbeth
+ """
+ cor = correlate(map_to_ref, ref)
+ """
+ keep only if best correlation
+ """
+ if cor > max_cor:
+ max_cor = cor
+ best_map = map_to_ref
+ best_fit = mac_fit
+ best_cen_fit = mac_cen_fit
+ best_ref_mat = ref_mat
+
+ """
+ rotate macbeth by pi and recorrelate in case macbeth chart is
+ upside-down
+ """
+ mac_fit_inv = np.array(
+ ([[mac_fit[0][2], mac_fit[0][3],
+ mac_fit[0][0], mac_fit[0][1]]])
+ )
+ mac_cen_fit_inv = np.flip(mac_cen_fit, axis=1)
+ ref_mat = cv2.getPerspectiveTransform(
+ mac_fit_inv,
+ np.array([ref_corns])
+ )
+ map_to_ref = cv2.warpPerspective(
+ original_bw, ref_mat,
+ (ref_w, ref_h)
+ )
+ a = 125 / np.average(map_to_ref)
+ map_to_ref = cv2.convertScaleAbs(map_to_ref, alpha=a, beta=0)
+ cor = correlate(map_to_ref, ref)
+ if cor > max_cor:
+ max_cor = cor
+ best_map = map_to_ref
+ best_fit = mac_fit_inv
+ best_cen_fit = mac_cen_fit_inv
+ best_ref_mat = ref_mat
+
+ """
+ Check best match is above threshold
+ """
+ cor_thresh = 0.6
+ if max_cor < cor_thresh:
+ raise MacbethError(
+ '\nWARNING: Correlation too low'
+ '\nPossible problems:\n'
+ '- Bad lighting conditions\n'
+ '- Macbeth chart is occluded\n'
+ '- Background is too noisy\n'
+ '- Macbeth chart is out of camera plane\n'
+ )
+ """
+ Following code is mostly representation for debugging purposes
+ """
+
+ """
+ draw macbeth corners and centres on image
+ """
+ copy = original.copy()
+ copy = cv2.resize(original, None, fx=2, fy=2)
+ # print('correlation = {}'.format(round(max_cor, 2)))
+ for point in best_fit[0]:
+ point = np.array(point, np.float32)
+ point = tuple(2 * np.round(point).astype(np.int32))
+ cv2.circle(copy, point, 4, (255, 0, 0), -1)
+ for point in best_cen_fit[0]:
+ point = np.array(point, np.float32)
+ point = tuple(2 * np.round(point).astype(np.int32))
+ cv2.circle(copy, point, 4, (0, 0, 255), -1)
+ copy = copy.copy()
+ cv2.circle(copy, point, 4, (0, 0, 255), -1)
+
+ """
+ represent coloured macbeth in reference space
+ """
+ best_map_col = cv2.warpPerspective(
+ original, best_ref_mat, (ref_w, ref_h)
+ )
+ best_map_col = cv2.resize(
+ best_map_col, None, fx=4, fy=4
+ )
+ a = 125 / np.average(best_map_col)
+ best_map_col_norm = cv2.convertScaleAbs(
+ best_map_col, alpha=a, beta=0
+ )
+ # cv2.imshow('Macbeth', best_map_col)
+ # represent(copy)
+
+ """
+ rescale coordinates to original image size
+ """
+ fit_coords = (best_fit / factor, best_cen_fit / factor)
+
+ return(max_cor, best_map_col_norm, fit_coords, True)
+
+ """
+ catch macbeth errors and continue with code
+ """
+ except MacbethError as error:
+ eprint(error)
+ return(0, None, None, False)
diff --git a/utils/tuning/libtuning/macbeth_ref.pgm b/utils/tuning/libtuning/macbeth_ref.pgm
new file mode 100644
index 00000000..9b9f4920
--- /dev/null
+++ b/utils/tuning/libtuning/macbeth_ref.pgm
@@ -0,0 +1,5 @@
+P5
+# Reference macbeth chart
+120 80
+255
+ !#!" #!"&&$#$#'"%&#+2///..../.........-()))))))))))))))))))(((-,*)'(&)#($%(%"###""!%""&"&&!$" #!$ !"! $&**" !#5.,%+,-5"0<HBAA54" %##((()*+,---.........+*)))))))))))))))-.,,--+))('((''('%'%##"!""!"!""""#! ! %/vÀ¯z:òøßãLñ©û¶ÑÔcÒ,!#""%%''')**+)-../..../.-*)))))))))))))**,,)**'(''&'((&&%%##$! !!!! ! ! ! 5*"-)&7(1.75Rnge`\`$ ""!"%%%'')())++--/---,-..,-.,++**))))())*)*)''%'%&%&'&%%""""" ! !!$&$$&##(+*,,/10122126545./66402006486869650*.1.***)*+)()&((('('##)('&%%&%$$$#$%$%$ (((*))('((('('(&%V0;>>;@@>@AAAACBCB=&<·³µ¶¾¿ÃÇÇÆÇËÒÐÇÄ<5x|64RYVTSRRRMMNLKJJLH+&0gijgdeffmmnpnkji`#3 ª¦¨¨£bY! 3FHHIIIHIJIIJHIII@#?¾ÈÊÍÏÑÔÖØÚÚÚÛßáßÔ=7}:5Wcbcbdcb`^^`^^_^Y,'6
r'<½ÆÅÅÅÄÂÀ¿¾¾¼»¼¼µl%2FHHIIHJJJJJJIIJI?%;ÁÌÌÒÓÖØÙÛÛÜÜÞßâãÕ>7|;8Xfeeegeccb`^aba]Z+)<r)>¿ÇÇÇÆÅÅÄÂÁÁÀ¾¾¼·q#3GHIIIIJIIJJIHIJI@&5ÁÎÑÔÕØÙÚÜÜÞßßßàâ×=8~;8Zgghggedbdcbda^\Z+(;y)9¿ÈÈÈÇÇÅÄÂÁÁÀ¿½½¹z"3GIIJJJJJKJJJJJJJ@'4ÂÑÔÔÙÚÛÜÞÝßßààààØ>9|¥ =8Zhighgeeeedeca__[/)Bv&:ÁÊÊÊÊÆÆÆÂÁÂÂÁ¿¿º|#3GJJIIJKKKJJJKKJK@&6ÆÒ××ÙÛÛÞÞßààààààÖ>9~ <8Yghegggffihccab^\/*Cz'9ÄÍËÈÈÇÇÆÆÄÂÂÀÀ¿»$ 6IKJJMMMKMKKMKKMLC&2É××ÙÛÜßÞàááâââââÖ@9<9Yghhhhijiegdcebc^0)G(7ÃÍÌËÊÈÇÇÅÆÄÂÂÂÁº% 6JLMMNMMKMMNMMMMMD&2ÊÙÙÛÝßßßààáââáãâÖ@:~=9Xfghhjiigdgddedc`1)M}(:ÄÐÍÌËÊÇÆÆÆÅÂÄÁ¾& "8LNOONNOMONNMMNOND'3ÍÛÛÞßàààáââãâåãå×@; ¡¡ =:Ziiigheegegegggdc1,Q~)8ÂÍÎÌËÊÊÈÆÆÆÆÄÆÇÁ%# "9NNNPPPQOOOOONNOOD'0ÎÜÜßßáàáââååäãåæ×?;¡¡ ¡ =;[iigeeegghgdedgea0-P ¡ (8ÃÏÎÎÌÊÈÈÇÇÇÆÈÇÆÃ' "#$:NNOQPPRPQPOOPQPPD*1ÐßßàààâãããåææåææÛA; ;:Yfghgghgghghhdggc3.\¡£¡ ~);ÅÎÎÑÐÌËÊÇÈÉÊÊÇŤ(&%%;OQQQRSSRPQQQQSQQF)3ÓßàááãâãåææææææçÜB< =:Wfhghhhihggghfhee4/f ¥¤¢¡¡*:ÇÏÍÍÎÎÍÌÉÈËÊÈÆÆä&%%%?RSSSSSTTTTSSSTTRE)5ÕàááãâäåæåæçççèèÛB= ¡ @:Ygiihhiiiihihiiif72p £¤¤£ }(9ÇÎÏÎÍÍÍÍÍËÌÊÈÈÇÆ©'#%&?TUTTTUUQSTTTTTVSF*3ÕàãâãäåæææçççèééßF> ¡¡£ £¡¡¡ A;[ghjiihiiiihihije50r¢¦¥¥££ )6ÈÏÏÎÌÎÎÌÏÏËÊÊÈÈÆ«& &#%?SVVVUUUUUTUUVVUUG*5ÖãããåæææçèèèèééëßF=
¢££££ ¡¡ £ A;Yhijiiijjiiiiijje81t¦¦¦¥¥£¡ ~)5ÇÑÑÏÎËÍÍÑÑÌËÈÈÉÆ°' '$$=OQRRQQPRSRSSSSSSG+6ËÙÙÜÛÜÞÝßààààáããÙD@?;Wefgggggfffgeeefc41x{*5¾ÈÈÇÅÃÃÄÄÃÂÂÂÀ¿¼«( &&&'++++,,*-,-00-0100*-SUX\]]`_ffgiooopo=;X\bedbadbca`]\]ZZ;;<::8:;9983433110/-,...1//12410/..--+)"",---,-./,,.-/-0-( &&%+/0103322011223233)(34534767::;;==:=B9;BFGEEGIKJKIJGIJCD=<:76566554111/0/1.*+00233300/00//..,+*#")(*)++,++))*++**'!!&$*w³½¾¿Â¼ÀÀ¼¼·¹¹¸´²1-_addc`ceccdccedbb?A|B>=>?@@?====;<:;:<:11r+. ¥¢¡¤( !'%*zÀÇÆÆÇÇÊÊÈÈÈÊËËËÉ 42gjmllklomooonpopmHG©¬«««¬©«««ª««ª©£D>AEDEFEECEECCCDDEC46µåçèçççæåäãáàÞÜÚ׿0:Î×Ö×××ÖÕÒÓÏÐÐÍÍѾ,!!&&,|ÂÇÇÇÇÇÇËËÇÈÊËËÍÊ¡61inknnoopoppoqqrqoEE¬®®®®®¯®®¥FACGFFFFFFDFDDDDDDC57¹íñïîîíííëéçæãáßÝÄ09ÓÛÛÛÛÚÙØ×ÖÕÔÔÒÔÒÁ+!"%%-~ÀÆÈÊÇÇÈÉÌÌÊÊËÌÌÊ¡42inopppppoqqqrrsrnAB«®®®®®®®±®¬°¥C?DGGGGFFFFDFFDDEDC48ºíððïïîîíìëèçæãáßÅ1;ÔÞÞÝÜÚÚÙÙ×ÕÕÔÕÔÒÁ+!!"#*|¿ÄÉÊÈÈÈÈÉÍÉÈËÍÍÊ¡62imoppppqqqqrtrqtrGD¬®®°®°°°±±°®®§H?CGGGGGGGGFFFFFFDB38»îðïïïïîíììëèçæâàÅ1<ÖààßÞÞÜÚÚÙÙÙ××ÔÔ½, !)}¿ÃÈÈÊÇÈÈËÎËÊËÌÍË¢63mooppqqqqqqrrtvtoDH®±®°±°°®±°°¦JACHHGGHGGFFFDDGGFD29ÀðóòðïïïîííìêéèæâÆ3>ÖááààßÞÜÛÙÙÙØ×Ø×½, $){¼ÂÅÆÉÇÈÆËËÌÊËÊÍË¢53jpppqprqrrrttuvuo>H®°®±²±±°°°±°±°°ªJAFHHHHHGGHGGFGGFFE28ÁðôòòððïïîíëìëéçãÇ3:×ãáááßÞÝÛÛÚÙÚÚÚÚ½- "*{¸ÁÁÅÆÇÆÆÊËÌÉÊËÎÌ£53loqpqsqrrrtrutsvrAH«®®±±°°°®±±±®°©HCGHIHHHHHHGFGHGGGD5;ÀðóóòñððïîííìëëèäÇ28ØäãááààßÞÜÛÛÛÚÚÚÀ, +}¹¾ÀÂÂÅÅÅÇÉÍËÊËÌÊ¡52mqoqpqrttttttuurpFI®°±°±±²°±±°±±¯°§OCEHHIHHHHGHGGFFIGF8<ÃðòòóóòððïíîìììéæÍ48ÚçåããáààßÝÜÜÜÜÛÛ¿, (|º¼¾ÀÀÃÄÄÆÇÍËÊÊËÊ¢41krqpqqqrrtrtuvtuoEH°°²±±±±¯²²®²±®«PBHHIIIHIIHIHGHGHHE7<ÃðóóòñððððïíííìêçÑ58ÜèæåãââáßßÜÞÜÞÞÚÄ* (zºº»¾ÀÂÂÂÄÄÇËÈÊËÊ¡63kpqprqqstttutrvvoFO¯°¯°±±°±±±±±°±²©LEHHIIHIHHHIGHGIHGF4=ÅñóóóððððïïîìíëéèÓ5<ÞêèçåââááßÞÞßßßÚÇ* 'zº½º»¾ÁÀÂÂÄÅÊÇÈÊÈ¡62lppqrqrrrtttuttvpAG¯°±°±°°°°°±±°±±«MGHIIIIHIIIHHIIJHHG4<ÃñóóóðòððîîïííëéèÓ4<ÞëêççæãâáàßàÞÞÛØÇ+ !){º¼º»¾½ÀÁÁÂÄÉÇÇÉÈ 62jopqqqqqrtttutttrEH¯±°°°¯°°±±²²°±±ªOHFIIIIIJIIIIHIHIHI7>ÅðôóòòòïðïîîíììëèÒ5;àíêèææãâáâßßÜÛÙÖÇ, !)z¼¾¼¹»»ÁÁ¿ÁÁÈÇÆÆÆ53lppqqrqrtttuuuutsFI®±²±±±±²²²±°¯±²«RHGJIJHJKJJJIIIIIIH9>ÂñôôôòóððïîííìëééÓ5;àìééèææäááßÜÛØ×ÔÇ+ !({»¿¸º½½¿¾¿¾ÀÅÆÄÆÅ41joppprqrrrutttvvrIH±°°°±±±²²°±²±±ªTHCJJJJJIJIJJIJJJIH7=ÂòôòóóñðïîîííììéèÒ5;ßìêêèæåäâàÞÛÙÖ×ÕÇ+ (u±±®¯±³µ²´´µº»¸»º65gjlmmmnoopnpprpqoIH¦©ª©«ªªª«¬«ªª¨ª¤OIBIJJJIJJJJIIIHHHG89ºåççæçåäããâáßàßÝÜÈ29ÔàßÝÛÛÙØÕÓÑÎÌÈÌʾ' "&,-*)-01/,0/12102-+04448789<>>??AFAD at DBCIJNRWTSUXT[WUQUOKFEBBABA?>>=<<;;67942:<<<>9999864565363&(13335422./1/-+..+ !"&$$""$"&$%'()(''*+-0124688:<>>??A>?EBCHKOLJLNOSQOXQQVMLACGHGHIGFHGDCCBB@??7432233210111.,++,++%(++)*(''%%%$$#%&$# ")0/001120024455520+-U]`addcdhefeekecYGFJRXYYVWWZWVXXVZTOBF}K7Ybccddfeg`^]^]\[Z[*)OTTPPQPOKOLLJJLIK !1;:9:<<===;=???A at 9*/FJmxyxwyzzzxyzzz{zxLOÉÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿú]=§©¨¦§¥¦¤¤¢¡¡¡ .-
y# !!2><=;==>=<<>@@@@A9-0 IKnz||{|{||{}}~}}{zLOÌÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿý]>¬¬««¨ª¨§¦¥¥££¡¡..
~% $2==;<>>?===>@A at AB;+1
JJo{|y{||}{||}}}}}yMTÎÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿý_>¬¬«ª©©¦¦¦¤¤££¢ -.
}# %2<=;=<@?>==>?A at AA9+3
FMlz{{y|}}}}||}|}}{MTÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd>«¬«ªªª¨§¦¦¤¤¤¡ -,
# %1<<<;==<<=>?A?@AA:,3INo{{y{||||}|}}|~}{RTÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd=©¬«©©§¦¦¥¤¤£¡ /-
}#!$0<<<=<<==>A@@>@AA:-2HInzz{{||{{}~~}}|}zMRÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd=ª«ªªª§¥¥¥£¤¡¡ ++
~# "$/;<==>;===@@@@>AA:+2KHn||y|||||{}~}|}|xMSÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd=©©ª©©§¦¦¥¤£¡ +,
}# ! "/:<=>@<<>=@@@@@AA;-3MFs||{{{y}z}}|}|}}yMWÏÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýc>©ª©§¦¦¥¤£¡£ ,)
|! !1;>?>><<>@>>=>ABB;,0LHr{|{|}|y|}}}}}zNXÎÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýc?©«ª§§¦¥¥££ ()
z# $/;;<=;<>>=>>>@@BB:,1IInyz||||||{||}{~|{NVÏÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýc;§¨¨¦¦¦¤££¡¡('
}# $0:<==<;>@>>>>@ABB:,/HLlx|}y{y{|y{|}}}}yMRÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿýd>~¥§¦¦§¥¤££¢ *(y" !&3:;<<;==@@=>AABBA;-3KLqz{|||y{}|}{}|~{zRQÍÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüc9w¤§¦¥¦¤¢£ )'
y" !%1<<;=>===<=@@ABBC<.5IIlz{|}~~~|}{||~}}zMUÌÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüd;p¤¦¥¥¤¤£¤¢ )$
x" $2===<==@=<>=ABBBC?/0IGkz}}{||}{||y||}zyOVÊÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿüc7o¢¥¥¤££¡¤¡'&~
~z"#"#/;<:<<?>;===@?AAA>07GGgwxz{yyxyzzyz{yuuHO½ùûüüüüüüûûûúúúúò\8v'$w~~}|||{~|{zxxxxv!"""'*+(+)*))()+,,.../0398;=<=>DCCDDCBBDHBCJMMLMPNPOJPKPSJDICCNMPONMNNOKHIFDBHE3/46433323.....*+,)( !##!!!!!$#$$#$#&"!!"(+**,,*+.//1478:<:33ACDFGGIIHIJLPKNMQFIPTTRVXVUXUUTXUSTNEGGFDEFAA>==;94877520-,))*(((('&$#!!" &%'FQPQR]dq£«¹ÍàðÈ=FñûüÿÿÿÿÿÿÿÿÿÿÿÿÿúQN·èììêìæéììêéëëéêáLE
znki^[YTPUOS;.%-/12322221/10//,/%#0¯ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿß@QýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQMÁðôõôóôóôõõôõôôóæKE¨©¨§§¤¥¥¢¤£ H01NNQOQQOOMNNLKLJGB'&/¸ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâAWþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿOLÀñóôôôóóóõôôõóôòèKE¦¨©©§ª©ª¦¨§¥¢¤¢F-,PQQPQPPQPOONMNNKE''0·ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáCZþÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿRMÁñôóóòòôòôõóôôóòåJE¥©¬¬©ª©ª§¥¥¤¤¤¢F,*NSQPPQOOOOMNNMKID('2·ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD[þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQKÀðòòòóóòóõóõóòòðæIF§©ª©§©§©¥¤¤¤¤¡ F,*NPPPPPPNOONMMMJIF!'(2¶ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáF]þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿRL¿íððòðòòóóòòñïòðäHD£¦©©§¨¦¦¦¤¤¤¤¢ F+%MPPPPOOONONNMMKID)*4¸ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD^þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿPL¿ìðïïòòððòòðïðòïäIC¢¦¨¨¥¦¥§¥¤££¡F+&NPOOOPPOONMMKMKHD**6ºÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD_þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQJ¾ëïïïòðððððïðïîïãFC~¢§¥¥¦¦¦¤££¤¡ F,'MPOOOOONONNKKIIIG,+7»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáD^ýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿQI¾êîîîïðïðïððïïïîâEB|£¥¤££¤¤¤£¤¢ E+&MONOOONNNNKMJKJHH,-8¹ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿàD]þÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿPI¼éíìîîîîñðòóóöù÷èHE¥¨§¥¥¤¤£¡£¡ C,#LOOOONONNNKKKMKJF,*6»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿáCaýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþMH»éììíîðððôóõöööõçIF©ª§¦¦¥££¢ D*%KONOMNMMKMKJJJIJE,,6»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿâB^ýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþMG¹èììîðòóòóóóóòóôéHB}£©¦¦§¥¤¤¢D+&LONOOONNMMMMKLKIA,,6ºÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿàA\ýÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþMF¹éìííïòóôððôöõööêIE¦ª©¦§¨§§¡¡E+&LNNMONNMMKKKKKIHF --6¹ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿßA[üÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþKF¶çìðïððïðóöõöõùúîJC©««¦§¦¥¤¡¤F*&LMONMNMNKKJMKJJIF **5»ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿß>WüÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿþKE¶èïíðîðóöõøòùóöôçF?}¨©²¯¬¬©¥¤¤£C*%KONNNJKKKMKJKJKID,*4¶ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿØ<WöþÿÿÿÿÿÿÿÿÿÿÿÿÿÿøMA°áäååçêêïêëëåæéçÝGCx¨¦ ©¥¤ ¡B)%HKLKKJJJKIHIHHFGC!()*q ¡o39v|}wwwwwwrqtuspn=9^gadcfgce`dbUY[\^>;DIJDB?FEGE=7>8634.(&&(%&*&%%'+*)+*#%()''03364443233222243/-+133423333423766645789:><<<;<;<?=?;<<:78673/001113--.-+*)&&#"&$#%&""$!! ))+rbPpAD9-*******+*++)++--.//./.0/21453469:=;98<;<>=;><7766666741012.-13/-+-/(''&&&%%&$.%0()-%-#-#' #&(% )))hnYQgÛ7(*))))*)**,--....../0/0001357666::;;>?>AA866666666656565300/20/.-*)(('((&&%)d=yoP¼<Ñ?ßFQFx;§2»1«0))*RQ.0*,,5*(*))))*,**,+/.../...02/22224456468;:>BB;>;:76666666666755303033/,.-*(())('&')#)"##(+$+*#)) &
diff --git a/utils/tuning/libtuning/modules/__init__.py b/utils/tuning/libtuning/modules/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/utils/tuning/libtuning/modules/module.py b/utils/tuning/libtuning/modules/module.py
new file mode 100644
index 00000000..e45a8751
--- /dev/null
+++ b/utils/tuning/libtuning/modules/module.py
@@ -0,0 +1,41 @@
+# @var hr_name Human-readable module name
+# @var name Name of the module. Should match the standard name eg. 'alsc'
+class Module(object):
+ def __init__(self):
+ self.hr_name = "Base Module"
+ self.name = "module"
+ self.options = {}
+
+ # todo: I don't think we need these and the options member variable
+ def setValue(self, key, value):
+ self.options[key] = value
+
+ def appendValue(self, key, value):
+ if key not in self.options:
+ self.options[key] = []
+ if not isinstance(self.options[key], list):
+ raise TypeError(f'Options "{key}" in module "{self.name}" is not a list')
+ self.options[key].append(value)
+
+ def __validateConfig__(self, config: dict) -> bool:
+ if self not in config:
+ eprint(f'No config found for {self.name}')
+ return False
+ return True
+
+ def __process__(self, args, config: dict, images: list, outputs: dict) -> dict:
+ raise NotImplementedError
+
+ def validateConfig(self, config: dict) -> bool:
+ return self.__validateConfig__(config)
+
+ # @brief Do the module's processing
+ # @param args argparse arguments
+ # @param config Full configuration from the input configuration file
+ # @param images List of images to process
+ # @param outputs The outputs of all modules that were executed before this
+ # module. Note that this is an input parameter, and the
+ # output of this module should be returned directly
+ # @return Result of the module's processing
+ def process(self, args, config: dict, images: list, outputs: dict) -> dict:
+ return self.__process__(args, config, images, outputs)
diff --git a/utils/tuning/libtuning/parsers/__init__.py b/utils/tuning/libtuning/parsers/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/utils/tuning/libtuning/parsers/parser.py b/utils/tuning/libtuning/parsers/parser.py
new file mode 100644
index 00000000..f1e6e629
--- /dev/null
+++ b/utils/tuning/libtuning/parsers/parser.py
@@ -0,0 +1,18 @@
+class Parser(object):
+ def __init__(self):
+ return
+
+ def __parse__(self, config_file):
+ raise NotImplementedError("__parse__() must be implemented")
+
+ # @brief Parse a config file into a config dict
+ # @details The config dict shall have one key 'general' with a dict value
+ # for general configuration options, and all other entries shall
+ # have the module as the key with its configuration options (as a
+ # dict) as the value. The config dict shall prune entries that are
+ # for modules that are not in @a modules.
+ # @param config (str) Path to config file
+ # @param modules (list) List of modules
+ # @return (dict, list) Configuration and list of modules to disable
+ def parse(self, config_file: str, modules: list) -> (dict, list):
+ return self.__parse__(config_file)
diff --git a/utils/tuning/libtuning/smoothing.py b/utils/tuning/libtuning/smoothing.py
new file mode 100644
index 00000000..b05e5c75
--- /dev/null
+++ b/utils/tuning/libtuning/smoothing.py
@@ -0,0 +1,21 @@
+import libtuning as lt
+
+import cv2
+
+
+# @brief Wrapper for cv2 smoothing functions so that they can be duck-typed
+class Smoothing(object):
+ def __init__(self, params: list = []):
+ self.params = params
+ return
+
+ def __smoothing__(self, src, ksize):
+ raise NotImplementedError
+
+ def smoothing(self, src, ksize):
+ return self.__smoothing__(src, ksize)
+
+
+class MedianBlur(Smoothing):
+ def __smoothing__(self, src, ksize):
+ return cv2.medianBlur(src.astype('float32'), ksize).astype('float64')
diff --git a/utils/tuning/libtuning/utils.py b/utils/tuning/libtuning/utils.py
new file mode 100644
index 00000000..63ddedc9
--- /dev/null
+++ b/utils/tuning/libtuning/utils.py
@@ -0,0 +1,198 @@
+import argparse
+import decimal
+import math
+import numpy as np
+import os
+from pathlib import Path
+import re
+import sys
+
+from libtuning.image import Image
+from libtuning.macbeth import find_macbeth
+
+# Utility functions
+
+
+def eprint(*args, **kwargs):
+ print(*args, file=sys.stderr, **kwargs)
+
+
+def getModuleByName(modules, name):
+ for module in modules:
+ if module.name == name:
+ return module
+ return None
+
+
+# @brief Round value while keeping the maximum number of decimal points
+# @param limits Tuple of [min, max] acceptable values
+# @description Prevents rounding such that significant figures are lost
+# todo Bikeshed this name
+def roundWithSigfigs(val, limits: tuple):
+ decimal_points = abs(decimal.Decimal(str(limits[-1])).as_tuple().exponent)
+ lshift = 10**(decimal_points - 1)
+ adjust = 10**(-decimal_points)
+
+ # We need the division to get rid of stray floating points
+ # todo Any better solution?
+ lower_bound = adjust * 10 * 5 * lshift / lshift
+ upper_bound = adjust * 10 * 95 * lshift / lshift
+
+ out = val
+ out = np.where((lshift * out) % 1 <= lower_bound, out + adjust, out)
+ out = np.where((lshift * out) % 1 >= upper_bound, out - adjust, out)
+
+ return out
+
+
+# Private utility functions
+
+
+def _listImageFiles(directory):
+ d = Path(directory)
+ files = [d.joinpath(f) for f in os.listdir(d)
+ if re.search(r'\.(jp[e]g$)|(dng$)|(brcm$)', filename)]
+ files.sort()
+ return files
+
+
+def _parseImageFilename(fn: Path):
+ result = re.search(r'^(alsc_){0,1}(\d+)[kK]_(\d+){0,1}[lLuU].\w{3,4}$', fn.name)
+ if result is None:
+ eprint(f'The file name of {fn.name} is incorrectly formatted')
+ return None, None, None
+
+ color = int(result.group(2))
+ alsc_only = result.group(1) is not None
+ lux = None if alsc_only else int(result.group(3))
+
+ return color, lux, alsc_only
+
+
+def _loadDngImage(path: Path):
+ image = Image(path)
+
+ if not image.loadDng():
+ return None
+
+ return image
+
+
+def _loadBrcmImage(path: Path):
+ image = Image(path)
+
+ if not image.loadBrcm():
+ return None
+
+ return image
+
+
+def _locateMacbeth(image: Image, config: dict):
+ # Find macbeth centres
+ av_chan = (np.mean(np.array(image.channels), axis=0) / (2**16))
+ av_val = np.mean(av_chan)
+ if av_val < image.blacklevel_16 / (2**16) + 1 / 64:
+ eprint(f'Image {image.path.name} too dark')
+ return None
+
+ macbeth = find_macbeth(av_chan, config['general']['macbeth'])
+
+ if macbeth is None:
+ eprint(f'No macbeth chart found in {image.path.name}')
+ return None
+
+ mac_cen_coords = macbeth[1]
+ if not image.getPatches(mac_cen_coords):
+ eprint(f'Macbeth patches have saturated in {image.path.name}')
+ return None
+
+ return macbeth
+
+
+# todo Implement this from check_imgs() in ctt.py
+def _validateImages(images):
+ return True
+
+
+# Public utility functions
+
+
+def processArgs(argv, platform_name):
+ parser = argparse.ArgumentParser(description=f'Camera Tuning for {platform_name}')
+ parser.add_argument('-i', '--input', type=str, required=True,
+ help='''Directory containing calibration images (required).
+ Images for ALSC must be named "alsc_{Color Temperature}k_1[u].dng",
+ and all other images must be named "{Color Temperature}k_{Lux Level}l.dng"''')
+ parser.add_argument('-o', '--output', type=str, required=True,
+ help='Output file (required)')
+ # It is not our duty to scan all modules to figure out their default
+ # options, so simply return an empty configuration if none is provided.
+ parser.add_argument('-c', '--config', type=str, default='',
+ help='Config file (optional)')
+ # todo check if we really need this or if stderr is good enough, or if we
+ # want a better logging infrastructure with log levels
+ parser.add_argument('-l', '--log', type=str, default=None,
+ help='Output log file (optional)')
+ return parser.parse_args(argv)
+
+
+def loadImages(input_dir: str, config: dict, modules: list) -> list:
+ files = _listImageFiles(input_dir)
+ if len(files) == 0:
+ eprint(f'No images found in {input_dir}')
+ return None
+
+ # todo Should this match by name instead of type?
+ has_alsc = any(isinstance(m, modules.ALSC) for m in modules)
+ # todo Is there any use case for multiple ALSC modules?
+ has_only_alsc = has_alsc and len(modules) == 1
+
+ # todo Should this be separated into two lists for alsc_only?
+ images = []
+ for f in files:
+ color, lux, alsc_only = _parseImageFilename(f)
+ if color is None:
+ continue
+
+ # Skip alsc image if we don't have an alsc module
+ if alsc_only and not has_alsc:
+ eprint(f'Skipping {fn.name} as this tuner has no ALSC module')
+ continue
+
+ # Skip non-alsc image if we have only an alsc module
+ if not alsc_only and has_only_alsc:
+ eprint(f'Skipping {fn.name} as this tuner only has an ALSC module')
+ continue
+
+ # Load image
+ if re.search(r'.dng$', f.name):
+ image = _loadDngImage(f)
+ else:
+ image = _loadBrcmImage(f)
+
+ if image is None:
+ eprint(f'Failed to load image {fn.name}')
+ continue
+
+ # Populate simple fields
+ image.alsc_only = alsc_only
+ image.color = color
+ image.lux = lux
+
+ if 'blacklevel' in config['general']:
+ image.blacklevel_16 = config['general']['blacklevel']
+
+ if alsc_only:
+ continue
+
+ # Handle macbeth
+ macbeth = _locateMacbeth(params)
+ if macbeth is None:
+ continue
+
+ images.append(image)
+
+ if not _validateImages(images):
+ return None
+
+ return images
--
2.30.2
More information about the libcamera-devel
mailing list